summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2018-08-28 15:28:34 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2018-08-28 13:54:51 +0000
commit2a19c63448c84c1805fb1a585c3651318bb86ca7 (patch)
treeeb17888e8531aa6ee5e85721bd553b832a7e5156 /chromium/media
parentb014812705fc80bff0a5c120dfcef88f349816dc (diff)
downloadqtwebengine-chromium-2a19c63448c84c1805fb1a585c3651318bb86ca7.tar.gz
BASELINE: Update Chromium to 69.0.3497.70
Change-Id: I2b7b56e4e7a8b26656930def0d4575dc32b900a0 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/DEPS6
-rw-r--r--chromium/media/README.md7
-rw-r--r--chromium/media/audio/BUILD.gn12
-rw-r--r--chromium/media/audio/audio_device_thread.cc17
-rw-r--r--chromium/media/audio/audio_device_thread.h11
-rw-r--r--chromium/media/audio/audio_input_controller.h7
-rw-r--r--chromium/media/audio/audio_input_controller_unittest.cc21
-rw-r--r--chromium/media/audio/audio_input_device.cc58
-rw-r--r--chromium/media/audio/audio_input_device.h24
-rw-r--r--chromium/media/audio/audio_input_device_unittest.cc43
-rw-r--r--chromium/media/audio/audio_input_ipc.h9
-rw-r--r--chromium/media/audio/audio_manager_base.cc5
-rw-r--r--chromium/media/audio/audio_output_device.cc169
-rw-r--r--chromium/media/audio/audio_output_device.h37
-rw-r--r--chromium/media/audio/audio_output_device_thread_callback.cc139
-rw-r--r--chromium/media/audio/audio_output_device_thread_callback.h74
-rw-r--r--chromium/media/audio/audio_output_device_unittest.cc77
-rw-r--r--chromium/media/audio/audio_output_ipc.h9
-rw-r--r--chromium/media/audio/audio_output_resampler.cc3
-rw-r--r--chromium/media/audio/audio_output_resampler.h2
-rw-r--r--chromium/media/audio/audio_output_unittest.cc2
-rw-r--r--chromium/media/audio/audio_thread_impl.cc6
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.cc36
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.h5
-rw-r--r--chromium/media/audio/cras/cras_input.cc7
-rw-r--r--chromium/media/audio/cras/cras_input.h3
-rw-r--r--chromium/media/audio/fuchsia/audio_manager_fuchsia.cc81
-rw-r--r--chromium/media/audio/fuchsia/audio_manager_fuchsia.h8
-rw-r--r--chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc222
-rw-r--r--chromium/media/audio/fuchsia/audio_output_stream_fuchsia.h44
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.cc6
-rw-r--r--chromium/media/audio/mac/core_audio_util_mac.cc55
-rw-r--r--chromium/media/audio/mac/core_audio_util_mac.h10
-rw-r--r--chromium/media/audio/mac/coreaudio_dispatch_override.cc12
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.cc92
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.h15
-rw-r--r--chromium/media/audio/pulse/pulse.sigs7
-rw-r--r--chromium/media/audio/pulse/pulse_input.cc4
-rw-r--r--chromium/media/audio/pulse/pulse_util.cc113
-rw-r--r--chromium/media/audio/pulse/pulse_util.h12
-rw-r--r--chromium/media/audio/test_audio_input_controller_factory.cc79
-rw-r--r--chromium/media/audio/test_audio_input_controller_factory.h141
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc42
-rw-r--r--chromium/media/audio/win/core_audio_util_win.cc54
-rw-r--r--chromium/media/audio/win/core_audio_util_win.h5
-rw-r--r--chromium/media/audio/win/core_audio_util_win_unittest.cc10
-rw-r--r--chromium/media/base/BUILD.gn17
-rw-r--r--chromium/media/base/OWNERS3
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl_unittest.cc14
-rw-r--r--chromium/media/base/android/media_codec_loop.cc9
-rw-r--r--chromium/media/base/android/media_codec_loop.h16
-rw-r--r--chromium/media/base/android/media_codec_loop_unittest.cc32
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc6
-rw-r--r--chromium/media/base/android/media_drm_storage_bridge.cc12
-rw-r--r--chromium/media/base/android/media_player_bridge_unittest.cc1
-rw-r--r--chromium/media/base/android/media_player_manager.h3
-rw-r--r--chromium/media/base/android_overlay_config.h10
-rw-r--r--chromium/media/base/audio_bus.cc12
-rw-r--r--chromium/media/base/audio_bus.h3
-rw-r--r--chromium/media/base/audio_decoder.cc4
-rw-r--r--chromium/media/base/audio_decoder.h6
-rw-r--r--chromium/media/base/audio_parameters.h1
-rw-r--r--chromium/media/base/audio_point.cc40
-rw-r--r--chromium/media/base/audio_point.h7
-rw-r--r--chromium/media/base/audio_point_unittest.cc18
-rw-r--r--chromium/media/base/bind_to_current_loop.h38
-rw-r--r--chromium/media/base/cdm_config.h4
-rw-r--r--chromium/media/base/cdm_key_information.cc14
-rw-r--r--chromium/media/base/cdm_key_information.h6
-rw-r--r--chromium/media/base/content_decryption_module.h8
-rw-r--r--chromium/media/base/decoder_buffer.cc2
-rw-r--r--chromium/media/base/decrypt_config.cc6
-rw-r--r--chromium/media/base/decrypt_config.h9
-rw-r--r--chromium/media/base/demuxer.h11
-rw-r--r--chromium/media/base/eme_constants.h13
-rw-r--r--chromium/media/base/encryption_pattern.cc8
-rw-r--r--chromium/media/base/encryption_pattern.h11
-rw-r--r--chromium/media/base/encryption_scheme.cc16
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h9
-rw-r--r--chromium/media/base/key_system_properties.cc7
-rw-r--r--chromium/media/base/key_system_properties.h12
-rw-r--r--chromium/media/base/key_systems.cc99
-rw-r--r--chromium/media/base/key_systems.h11
-rw-r--r--chromium/media/base/key_systems_unittest.cc311
-rw-r--r--chromium/media/base/media_controller.h44
-rw-r--r--chromium/media/base/media_log.cc17
-rw-r--r--chromium/media/base/media_log.h12
-rw-r--r--chromium/media/base/media_log_unittest.cc79
-rw-r--r--chromium/media/base/media_status.cc25
-rw-r--r--chromium/media/base/media_status.h62
-rw-r--r--chromium/media/base/media_status_observer.h25
-rw-r--r--chromium/media/base/media_switches.cc75
-rw-r--r--chromium/media/base/media_switches.h12
-rw-r--r--chromium/media/base/media_url_demuxer.cc3
-rw-r--r--chromium/media/base/media_url_demuxer.h3
-rw-r--r--chromium/media/base/media_url_demuxer_unittest.cc3
-rw-r--r--chromium/media/base/mime_util_internal.cc23
-rw-r--r--chromium/media/base/mock_demuxer_host.h3
-rw-r--r--chromium/media/base/mock_filters.h6
-rw-r--r--chromium/media/base/overlay_info.cc7
-rw-r--r--chromium/media/base/overlay_info.h6
-rw-r--r--chromium/media/base/pipeline_impl.cc114
-rw-r--r--chromium/media/base/pipeline_impl.h2
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc88
-rw-r--r--chromium/media/base/pipeline_status.h2
-rw-r--r--chromium/media/base/scopedfd_helper.cc30
-rw-r--r--chromium/media/base/scopedfd_helper.h28
-rw-r--r--chromium/media/base/stream_parser.cc1
-rw-r--r--chromium/media/base/stream_parser.h25
-rw-r--r--chromium/media/base/stream_parser_buffer.cc2
-rw-r--r--chromium/media/base/stream_parser_buffer.h20
-rw-r--r--chromium/media/base/subsample_entry.h1
-rw-r--r--chromium/media/base/surface_manager.h45
-rw-r--r--chromium/media/base/test_helpers.cc2
-rw-r--r--chromium/media/base/unaligned_shared_memory.cc74
-rw-r--r--chromium/media/base/unaligned_shared_memory.h57
-rw-r--r--chromium/media/base/unaligned_shared_memory_unittest.cc88
-rw-r--r--chromium/media/base/video_bitrate_allocation.cc26
-rw-r--r--chromium/media/base/video_bitrate_allocation.h9
-rw-r--r--chromium/media/base/video_bitrate_allocation_unittest.cc13
-rw-r--r--chromium/media/base/video_codecs.cc223
-rw-r--r--chromium/media/base/video_codecs.h18
-rw-r--r--chromium/media/base/video_codecs_unittest.cc282
-rw-r--r--chromium/media/base/video_decoder.cc4
-rw-r--r--chromium/media/base/video_decoder.h9
-rw-r--r--chromium/media/base/video_decoder_config.cc4
-rw-r--r--chromium/media/base/video_frame.cc194
-rw-r--r--chromium/media/base/video_frame.h104
-rw-r--r--chromium/media/base/video_frame_layout.cc57
-rw-r--r--chromium/media/base/video_frame_layout.h88
-rw-r--r--chromium/media/base/video_frame_layout_unittest.cc113
-rw-r--r--chromium/media/base/video_frame_metadata.h2
-rw-r--r--chromium/media/base/video_frame_unittest.cc104
-rw-r--r--chromium/media/blink/BUILD.gn9
-rw-r--r--chromium/media/blink/DEPS4
-rw-r--r--chromium/media/blink/key_system_config_selector.cc101
-rw-r--r--chromium/media/blink/key_system_config_selector.h21
-rw-r--r--chromium/media/blink/key_system_config_selector_unittest.cc1126
-rw-r--r--chromium/media/blink/run_all_unittests.cc9
-rw-r--r--chromium/media/blink/video_frame_compositor.cc50
-rw-r--r--chromium/media/blink/video_frame_compositor.h27
-rw-r--r--chromium/media/blink/video_frame_compositor_unittest.cc36
-rw-r--r--chromium/media/blink/watch_time_component.cc133
-rw-r--r--chromium/media/blink/watch_time_component.h135
-rw-r--r--chromium/media/blink/watch_time_component_unittest.cc300
-rw-r--r--chromium/media/blink/watch_time_reporter.cc600
-rw-r--r--chromium/media/blink/watch_time_reporter.h99
-rw-r--r--chromium/media/blink/watch_time_reporter_unittest.cc180
-rw-r--r--chromium/media/blink/webcontentdecryptionmodulesession_impl.cc17
-rw-r--r--chromium/media/blink/webmediacapabilitiesclient_impl.cc25
-rw-r--r--chromium/media/blink/webmediacapabilitiesclient_impl.h3
-rw-r--r--chromium/media/blink/webmediacapabilitiesclient_impl_unittest.cc91
-rw-r--r--chromium/media/blink/webmediaplayer_cast_android.cc7
-rw-r--r--chromium/media/blink/webmediaplayer_delegate.h6
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc231
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h44
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc179
-rw-r--r--chromium/media/blink/webmediaplayer_params.cc9
-rw-r--r--chromium/media/blink/webmediaplayer_params.h30
-rw-r--r--chromium/media/blink/webmediasource_impl.cc4
-rw-r--r--chromium/media/blink/webmediasource_impl.h2
-rw-r--r--chromium/media/blink/websourcebuffer_impl.cc17
-rw-r--r--chromium/media/blink/websourcebuffer_impl.h5
-rw-r--r--chromium/media/capabilities/BUILD.gn5
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc211
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h126
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc391
-rw-r--r--chromium/media/capabilities/video_decode_stats_db.cc64
-rw-r--r--chromium/media/capabilities/video_decode_stats_db.h33
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.cc59
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.h7
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_provider.cc11
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_provider.h36
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_unittest.cc12
-rw-r--r--chromium/media/capture/BUILD.gn110
-rw-r--r--chromium/media/capture/DEPS2
-rw-r--r--chromium/media/capture/content/android/BUILD.gn4
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.cc38
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.h24
-rw-r--r--chromium/media/capture/content/android/thread_safe_capture_oracle.cc (renamed from chromium/media/capture/content/thread_safe_capture_oracle.cc)17
-rw-r--r--chromium/media/capture/content/android/thread_safe_capture_oracle.h (renamed from chromium/media/capture/content/thread_safe_capture_oracle.h)30
-rw-r--r--chromium/media/capture/content/screen_capture_device_core.cc172
-rw-r--r--chromium/media/capture/content/screen_capture_device_core.h122
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom27
-rw-r--r--chromium/media/capture/mojom/video_capture_types.typemap1
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc87
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.h28
-rw-r--r--chromium/media/capture/run_all_unittests.cc12
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc10
-rw-r--r--chromium/media/capture/video/chromeos/DEPS1
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc11
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc73
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h7
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc66
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h8
-rw-r--r--chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc4
-rw-r--r--chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom64
-rw-r--r--chromium/media/capture/video/chromeos/pixel_format_utils.cc2
-rw-r--r--chromium/media/capture/video/chromeos/public/BUILD.gn14
-rw-r--r--chromium/media/capture/video/chromeos/public/cros_features.cc19
-rw-r--r--chromium/media/capture/video/chromeos/public/cros_features.h16
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc31
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc63
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h17
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.cc91
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.h22
-rw-r--r--chromium/media/capture/video/linux/camera_config_chromeos.h10
-rw-r--r--chromium/media/capture/video/linux/fake_v4l2_impl.cc198
-rw-r--r--chromium/media/capture/video/linux/fake_v4l2_impl.h62
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.cc447
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.h37
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc3
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_device.h44
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_device_impl.cc48
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_device_impl.h39
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_chromeos.cc29
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_chromeos.h26
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc341
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.h38
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc107
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_linux.cc5
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_linux.h4
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.mm10
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc4
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.cc4
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.h1
-rw-r--r--chromium/media/capture/video/shared_memory_handle_provider.cc23
-rw-r--r--chromium/media/capture/video/shared_memory_handle_provider.h10
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool.h4
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc28
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.h2
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker.h1
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc42
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h19
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc4
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.cc10
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.h1
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc49
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h12
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc38
-rw-r--r--chromium/media/capture/video/video_capture_jpeg_decoder.h4
-rw-r--r--chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc260
-rw-r--r--chromium/media/capture/video/video_capture_jpeg_decoder_impl.h119
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc25
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc30
-rw-r--r--chromium/media/capture/video/win/video_capture_device_win.cc8
-rw-r--r--chromium/media/capture/video_capture_types.cc3
-rw-r--r--chromium/media/capture/video_capture_types.h8
-rw-r--r--chromium/media/cast/BUILD.gn6
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber.cc96
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber.h29
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber_unittest.cc139
-rw-r--r--chromium/media/cast/logging/serialize_deserialize_test.cc10
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc29
-rw-r--r--chromium/media/cast/sender/frame_sender.cc42
-rw-r--r--chromium/media/cast/sender/frame_sender.h13
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder_unittest.cc5
-rw-r--r--chromium/media/cast/sender/video_encoder_unittest.cc27
-rw-r--r--chromium/media/cdm/BUILD.gn6
-rw-r--r--chromium/media/cdm/aes_decryptor.cc6
-rw-r--r--chromium/media/cdm/aes_decryptor.h2
-rw-r--r--chromium/media/cdm/aes_decryptor_unittest.cc30
-rw-r--r--chromium/media/cdm/api/content_decryption_module.h2
-rw-r--r--chromium/media/cdm/api/content_decryption_module_proxy.h2
-rw-r--r--chromium/media/cdm/cdm_adapter.cc18
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc4
-rw-r--r--chromium/media/cdm/cdm_proxy.h8
-rw-r--r--chromium/media/cdm/cenc_decryptor_unittest.cc30
-rw-r--r--chromium/media/cdm/json_web_key.cc28
-rw-r--r--chromium/media/cdm/json_web_key_unittest.cc42
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn5
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.cc (renamed from chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc)51
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.h (renamed from chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h)30
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc80
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h10
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc54
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc7
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.mm14
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common.cc2
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc2
-rw-r--r--chromium/media/filters/aom_video_decoder.cc61
-rw-r--r--chromium/media/filters/aom_video_decoder_unittest.cc4
-rw-r--r--chromium/media/filters/audio_file_reader.cc23
-rw-r--r--chromium/media/filters/audio_file_reader_unittest.cc81
-rw-r--r--chromium/media/filters/chunk_demuxer.cc132
-rw-r--r--chromium/media/filters/chunk_demuxer.h56
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc501
-rw-r--r--chromium/media/filters/decoder_stream.cc56
-rw-r--r--chromium/media/filters/decoder_stream.h19
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.cc50
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc35
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.cc19
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream_unittest.cc14
-rw-r--r--chromium/media/filters/decrypting_video_decoder.cc61
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc33
-rw-r--r--chromium/media/filters/demuxer_perftest.cc10
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc54
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.h6
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc179
-rw-r--r--chromium/media/filters/ffmpeg_glue_unittest.cc2
-rw-r--r--chromium/media/filters/frame_buffer_pool.cc34
-rw-r--r--chromium/media/filters/frame_processor.cc11
-rw-r--r--chromium/media/filters/frame_processor_unittest.cc7
-rw-r--r--chromium/media/filters/gpu_video_decoder.cc5
-rw-r--r--chromium/media/filters/gpu_video_decoder.h2
-rw-r--r--chromium/media/filters/pipeline_controller.cc8
-rw-r--r--chromium/media/filters/pipeline_controller.h7
-rw-r--r--chromium/media/filters/source_buffer_state.cc166
-rw-r--r--chromium/media/filters/source_buffer_state.h42
-rw-r--r--chromium/media/filters/source_buffer_state_unittest.cc17
-rw-r--r--chromium/media/filters/source_buffer_stream.cc50
-rw-r--r--chromium/media/filters/source_buffer_stream.h10
-rw-r--r--chromium/media/filters/source_buffer_stream_unittest.cc31
-rw-r--r--chromium/media/filters/stream_parser_factory.cc6
-rw-r--r--chromium/media/filters/video_frame_stream_unittest.cc16
-rw-r--r--chromium/media/filters/video_renderer_algorithm.cc10
-rw-r--r--chromium/media/filters/video_renderer_algorithm_unittest.cc15
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc33
-rw-r--r--chromium/media/formats/common/stream_parser_test_base.cc21
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.cc23
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.h3
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc32
-rw-r--r--chromium/media/formats/mp4/avc.cc7
-rw-r--r--chromium/media/formats/mp4/avc.h4
-rw-r--r--chromium/media/formats/mp4/avc_unittest.cc14
-rw-r--r--chromium/media/formats/mp4/box_definitions.cc88
-rw-r--r--chromium/media/formats/mp4/box_definitions.h8
-rw-r--r--chromium/media/formats/mp4/fourccs.h1
-rw-r--r--chromium/media/formats/mp4/hevc.cc13
-rw-r--r--chromium/media/formats/mp4/hevc.h4
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.cc10
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.h3
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser_unittest.cc30
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc11
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h3
-rw-r--r--chromium/media/formats/webm/webm_cluster_parser.cc51
-rw-r--r--chromium/media/formats/webm/webm_cluster_parser.h17
-rw-r--r--chromium/media/formats/webm/webm_cluster_parser_unittest.cc12
-rw-r--r--chromium/media/formats/webm/webm_stream_parser.cc10
-rw-r--r--chromium/media/formats/webm/webm_stream_parser.h3
-rw-r--r--chromium/media/formats/webm/webm_stream_parser_unittest.cc25
-rw-r--r--chromium/media/formats/webm/webm_video_client.cc6
-rw-r--r--chromium/media/gpu/BUILD.gn31
-rw-r--r--chromium/media/gpu/DEPS1
-rw-r--r--chromium/media/gpu/OWNERS3
-rw-r--r--chromium/media/gpu/accelerated_video_decoder.h14
-rw-r--r--chromium/media/gpu/android/android_image_reader_abi.h95
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat.cc138
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat.h75
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat_unittest.cc47
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator.cc38
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator.h12
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc19
-rw-r--r--chromium/media/gpu/android/android_video_encode_accelerator.cc22
-rw-r--r--chromium/media/gpu/android/avda_codec_allocator.h1
-rw-r--r--chromium/media/gpu/android/avda_codec_image.cc15
-rw-r--r--chromium/media/gpu/android/avda_codec_image.h2
-rw-r--r--chromium/media/gpu/android/avda_picture_buffer_manager.cc2
-rw-r--r--chromium/media/gpu/android/avda_surface_bundle.h1
-rw-r--r--chromium/media/gpu/android/codec_image.cc22
-rw-r--r--chromium/media/gpu/android/codec_image.h7
-rw-r--r--chromium/media/gpu/android/codec_image_group.cc6
-rw-r--r--chromium/media/gpu/android/codec_image_group_unittest.cc14
-rw-r--r--chromium/media/gpu/android/codec_wrapper.cc25
-rw-r--r--chromium/media/gpu/android/codec_wrapper_unittest.cc22
-rw-r--r--chromium/media/gpu/android/content_video_view_overlay.cc79
-rw-r--r--chromium/media/gpu/android/content_video_view_overlay.h51
-rw-r--r--chromium/media/gpu/android/content_video_view_overlay_allocator.cc152
-rw-r--r--chromium/media/gpu/android/content_video_view_overlay_allocator.h86
-rw-r--r--chromium/media/gpu/android/content_video_view_overlay_allocator_unittest.cc155
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner.cc347
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner.h74
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner_unittest.cc106
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc42
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h8
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc2
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.cc23
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.h15
-rw-r--r--chromium/media/gpu/android/texture_owner.cc31
-rw-r--r--chromium/media/gpu/android/texture_owner.h5
-rw-r--r--chromium/media/gpu/android/texture_pool.cc72
-rw-r--r--chromium/media/gpu/android/texture_pool.h28
-rw-r--r--chromium/media/gpu/android/texture_pool_unittest.cc81
-rw-r--r--chromium/media/gpu/android/texture_wrapper.cc22
-rw-r--r--chromium/media/gpu/android/texture_wrapper.h43
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc84
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.h6
-rw-r--r--chromium/media/gpu/codec_picture.h6
-rw-r--r--chromium/media/gpu/command_buffer_helper.cc64
-rw-r--r--chromium/media/gpu/command_buffer_helper.h10
-rw-r--r--chromium/media/gpu/fake_command_buffer_helper.cc2
-rw-r--r--chromium/media/gpu/fake_command_buffer_helper.h2
-rw-r--r--chromium/media/gpu/fake_jpeg_decode_accelerator.cc15
-rw-r--r--chromium/media/gpu/fake_jpeg_decode_accelerator.h4
-rw-r--r--chromium/media/gpu/gles2_decoder_helper.cc93
-rw-r--r--chromium/media/gpu/gles2_decoder_helper.h20
-rw-r--r--chromium/media/gpu/gpu_video_accelerator_util.cc4
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.cc2
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.h2
-rw-r--r--chromium/media/gpu/gpu_video_encode_accelerator_factory.h2
-rw-r--r--chromium/media/gpu/h264_decoder.cc163
-rw-r--r--chromium/media/gpu/h264_decoder.h102
-rw-r--r--chromium/media/gpu/h264_decoder_unittest.cc238
-rw-r--r--chromium/media/gpu/image_processor.h74
-rw-r--r--chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc2
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc2
-rw-r--r--chromium/media/gpu/jpeg_decode_accelerator_unittest.cc411
-rw-r--r--chromium/media/gpu/jpeg_encode_accelerator_unittest.cc4
-rw-r--r--chromium/media/gpu/shared_memory_region.cc30
-rw-r--r--chromium/media/gpu/shared_memory_region.h60
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor.cc176
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor.h57
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.cc35
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.h6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc87
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc76
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h13
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc97
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h16
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.cc7
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.h9
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.cc19
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.h3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc44
-rw-r--r--chromium/media/gpu/vaapi/vaapi_h264_accelerator.h31
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.cc66
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h35
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc20
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc20
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h10
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture.h5
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.cc38
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.h12
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.cc89
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.cc58
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.cc32
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc72
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc13
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc64
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h9
-rw-r--r--chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_vp9_accelerator.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc20
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h4
-rw-r--r--chromium/media/gpu/vaapi/vp8_encoder.cc17
-rw-r--r--chromium/media/gpu/vaapi/vp8_encoder.h8
-rw-r--r--chromium/media/gpu/video_decode_accelerator_unittest.cc106
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc126
-rw-r--r--chromium/media/gpu/vp8_decoder.cc39
-rw-r--r--chromium/media/gpu/vp8_decoder.h5
-rw-r--r--chromium/media/gpu/vp8_decoder_unittest.cc248
-rw-r--r--chromium/media/gpu/vp9_decoder.cc13
-rw-r--r--chromium/media/gpu/vp9_decoder.h2
-rw-r--r--chromium/media/gpu/vt_video_decode_accelerator_mac.cc1
-rw-r--r--chromium/media/gpu/vt_video_encode_accelerator_mac.cc8
-rw-r--r--chromium/media/gpu/windows/OWNERS2
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy.cc242
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy.h29
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc368
-rw-r--r--chromium/media/gpu/windows/d3d11_create_device_cb.h33
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.cc280
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.h58
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor_unittest.cc523
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.cc199
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.h50
-rw-r--r--chromium/media/gpu/windows/d3d11_mocks.cc12
-rw-r--r--chromium/media/gpu/windows/d3d11_mocks.h110
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.cc14
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.h2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc76
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h31
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.cc44
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.h8
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc57
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.cc4
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc15
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h2
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc10
-rw-r--r--chromium/media/media_options.gni81
-rw-r--r--chromium/media/midi/midi_manager_alsa.cc10
-rw-r--r--chromium/media/mojo/BUILD.gn2
-rw-r--r--chromium/media/mojo/DEPS2
-rw-r--r--chromium/media/mojo/clients/BUILD.gn5
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.h1
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.cc29
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.h7
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.cc5
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_unittest.cc7
-rw-r--r--chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc14
-rw-r--r--chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.h9
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc17
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h1
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.cc31
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.h2
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc57
-rw-r--r--chromium/media/mojo/common/mojo_shared_buffer_video_frame.cc36
-rw-r--r--chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc50
-rw-r--r--chromium/media/mojo/interfaces/BUILD.gn4
-rw-r--r--chromium/media/mojo/interfaces/application_session_id_manager.mojom13
-rw-r--r--chromium/media/mojo/interfaces/audio_data_pipe.mojom20
-rw-r--r--chromium/media/mojo/interfaces/audio_logging.mojom18
-rw-r--r--chromium/media/mojo/interfaces/audio_output_stream.mojom2
-rw-r--r--chromium/media/mojo/interfaces/key_system_support.mojom39
-rw-r--r--chromium/media/mojo/interfaces/media_drm_storage.mojom16
-rw-r--r--chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc2
-rw-r--r--chromium/media/mojo/interfaces/video_encode_accelerator.mojom27
-rw-r--r--chromium/media/mojo/interfaces/video_encode_accelerator.typemap9
-rw-r--r--chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc69
-rw-r--r--chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.h52
-rw-r--r--chromium/media/mojo/interfaces/watch_time_recorder.mojom37
-rw-r--r--chromium/media/mojo/services/BUILD.gn2
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc3
-rw-r--r--chromium/media/mojo/services/media_manifest.json3
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.cc20
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.h7
-rw-r--r--chromium/media/mojo/services/media_metrics_provider_unittest.cc3
-rw-r--r--chromium/media/mojo/services/media_service_factory.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.cc6
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc11
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.cc7
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc12
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc24
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator.cc11
-rw-r--r--chromium/media/mojo/services/mojo_cdm_proxy.cc5
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service_context.cc8
-rw-r--r--chromium/media/mojo/services/mojo_media_client.cc1
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h1
-rw-r--r--chromium/media/mojo/services/mojo_media_log.cc32
-rw-r--r--chromium/media/mojo/services/mojo_media_log.h18
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc11
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service.cc20
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service.h12
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc52
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.cc1
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.h1
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history.cc33
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history.h33
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history_unittest.cc136
-rw-r--r--chromium/media/mojo/services/video_decode_stats_recorder.cc17
-rw-r--r--chromium/media/mojo/services/video_decode_stats_recorder.h7
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.cc320
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.h35
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc510
-rw-r--r--chromium/media/remoting/proto_enum_utils.cc20
-rw-r--r--chromium/media/remoting/renderer_controller_unittest.cc5
-rw-r--r--chromium/media/remoting/rpc.proto10
-rw-r--r--chromium/media/renderers/BUILD.gn8
-rw-r--r--chromium/media/renderers/DEPS14
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc25
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc1
-rw-r--r--chromium/media/renderers/video_overlay_factory.cc1
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc22
-rw-r--r--chromium/media/renderers/video_resource_updater.cc1154
-rw-r--r--chromium/media/renderers/video_resource_updater.h223
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc751
-rw-r--r--chromium/media/test/BUILD.gn8
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.cc12
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.h7
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc12
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc2
-rw-r--r--chromium/media/video/video_decode_accelerator.h1
-rw-r--r--chromium/media/video/video_encode_accelerator.cc22
-rw-r--r--chromium/media/video/video_encode_accelerator.h48
571 files changed, 19070 insertions, 8501 deletions
diff --git a/chromium/media/DEPS b/chromium/media/DEPS
index cae84d3e449..6214a75c31c 100644
--- a/chromium/media/DEPS
+++ b/chromium/media/DEPS
@@ -8,6 +8,7 @@ include_rules = [
"+gpu",
"+jni",
"+mojo/public/cpp/bindings/callback_helpers.h",
+ "+mojo/public/cpp/system/platform_handle.h",
"+services/ui/public/cpp/gpu/context_provider_command_buffer.h",
"+skia/ext",
"+third_party/ffmpeg",
@@ -29,10 +30,5 @@ include_rules = [
specific_include_rules = {
"audio_manager_unittest.cc": [
"+chromeos/dbus"
- ],
- # TODO(https://crbug.com/844508): Remove this dependency once the
- # AudioOutputDevice shared memory refactor is done.
- "audio_output_device_unittest.cc": [
- "+mojo/public/cpp/system/platform_handle.h"
]
}
diff --git a/chromium/media/README.md b/chromium/media/README.md
index 71ad42ffb92..01b21adc2e9 100644
--- a/chromium/media/README.md
+++ b/chromium/media/README.md
@@ -180,4 +180,9 @@ recommendations:
MediaLog will send logs to `about://media-internals`, which is easily accessible
by developers (including web developes), testers and even users to get detailed
information about a playback instance. For guidance on how to use MediaLog, see
-`media/base/media_log.h`. \ No newline at end of file
+`media/base/media_log.h`.
+
+MediaLog messages should be concise and free of implementation details. Error
+messages should provide clues as to how to fix them, usually by precisely
+describing the circumstances that led to the error. Use properties, rather
+than messages, to record metadata and state changes.
diff --git a/chromium/media/audio/BUILD.gn b/chromium/media/audio/BUILD.gn
index 8754161b0c9..585ad87c6cb 100644
--- a/chromium/media/audio/BUILD.gn
+++ b/chromium/media/audio/BUILD.gn
@@ -107,6 +107,8 @@ source_set("audio") {
"audio_output_delegate.h",
"audio_output_device.cc",
"audio_output_device.h",
+ "audio_output_device_thread_callback.cc",
+ "audio_output_device_thread_callback.h",
"audio_output_dispatcher.cc",
"audio_output_dispatcher.h",
"audio_output_dispatcher_impl.cc",
@@ -315,7 +317,7 @@ source_set("audio") {
"fuchsia/audio_output_stream_fuchsia.cc",
"fuchsia/audio_output_stream_fuchsia.h",
]
- libs += [ "media_client" ]
+ deps += [ "//third_party/fuchsia-sdk:media" ]
}
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
@@ -353,8 +355,6 @@ static_library("test_support") {
"mock_audio_manager.h",
"mock_audio_source_callback.cc",
"mock_audio_source_callback.h",
- "test_audio_input_controller_factory.cc",
- "test_audio_input_controller_factory.h",
"test_audio_thread.cc",
"test_audio_thread.h",
]
@@ -406,12 +406,6 @@ source_set("unit_tests") {
"//base",
"//base/test:test_support",
"//media:test_support",
-
- # TODO(https://crbug.com/844508): Mojo is used in the
- # audio_output_device_unittest.cc for conversion between shared memory
- # types. Remove this dependency once the AudioOutputDevice shared memory
- # refactor is done.
- "//mojo/public/cpp/system:system",
"//testing/gmock",
"//testing/gtest",
"//url",
diff --git a/chromium/media/audio/audio_device_thread.cc b/chromium/media/audio/audio_device_thread.cc
index 719ebeae502..beffbcc113d 100644
--- a/chromium/media/audio/audio_device_thread.cc
+++ b/chromium/media/audio/audio_device_thread.cc
@@ -14,19 +14,13 @@ namespace media {
// AudioDeviceThread::Callback implementation
AudioDeviceThread::Callback::Callback(const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
- bool read_only_memory,
uint32_t segment_length,
uint32_t total_segments)
: audio_parameters_(audio_parameters),
memory_length_(
base::CheckMul(segment_length, total_segments).ValueOrDie()),
total_segments_(total_segments),
- segment_length_(segment_length),
- // CHECK that the shared memory is large enough. The memory allocated
- // must be at least as large as expected.
- shared_memory_((CHECK(memory_length_ <= memory.GetSize()), memory),
- read_only_memory) {
+ segment_length_(segment_length) {
CHECK_GT(total_segments_, 0u);
thread_checker_.DetachFromThread();
}
@@ -39,19 +33,18 @@ void AudioDeviceThread::Callback::InitializeOnAudioThread() {
// another thread before we get here.
DCHECK(thread_checker_.CalledOnValidThread())
<< "Thread checker was attached on the wrong thread";
- DCHECK(!shared_memory_.memory());
MapSharedMemory();
- CHECK(shared_memory_.memory());
}
// AudioDeviceThread implementation
AudioDeviceThread::AudioDeviceThread(Callback* callback,
base::SyncSocket::Handle socket,
- const char* thread_name)
+ const char* thread_name,
+ base::ThreadPriority thread_priority)
: callback_(callback), thread_name_(thread_name), socket_(socket) {
- CHECK(base::PlatformThread::CreateWithPriority(
- 0, this, &thread_handle_, base::ThreadPriority::REALTIME_AUDIO));
+ CHECK(base::PlatformThread::CreateWithPriority(0, this, &thread_handle_,
+ thread_priority));
DCHECK(!thread_handle_.is_null());
}
diff --git a/chromium/media/audio/audio_device_thread.h b/chromium/media/audio/audio_device_thread.h
index f5aa297601a..6aecb90ecf3 100644
--- a/chromium/media/audio/audio_device_thread.h
+++ b/chromium/media/audio/audio_device_thread.h
@@ -30,16 +30,14 @@ class MEDIA_EXPORT AudioDeviceThread : public base::PlatformThread::Delegate {
class Callback {
public:
Callback(const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
- bool read_only_memory,
uint32_t segment_length,
uint32_t total_segments);
// One time initialization for the callback object on the audio thread.
void InitializeOnAudioThread();
- // Derived implementations must call shared_memory_.Map appropriately
- // before Process can be called.
+ // Derived implementations must map shared memory appropriately before
+ // Process can be called.
virtual void MapSharedMemory() = 0;
// Called whenever we receive notifications about pending input data.
@@ -57,8 +55,6 @@ class MEDIA_EXPORT AudioDeviceThread : public base::PlatformThread::Delegate {
const uint32_t total_segments_;
const uint32_t segment_length_;
- base::SharedMemory shared_memory_;
-
// Detached in constructor and attached in InitializeOnAudioThread() which
// is called on the audio device thread. Sub-classes can then use it for
// various thread checking purposes.
@@ -71,7 +67,8 @@ class MEDIA_EXPORT AudioDeviceThread : public base::PlatformThread::Delegate {
// Creates and automatically starts the audio thread.
AudioDeviceThread(Callback* callback,
base::SyncSocket::Handle socket,
- const char* thread_name);
+ const char* thread_name,
+ base::ThreadPriority thread_priority);
// This tells the audio thread to stop and clean up the data; this is a
// synchronous process and the thread will stop before the method returns.
diff --git a/chromium/media/audio/audio_input_controller.h b/chromium/media/audio/audio_input_controller.h
index 9705fccd4ac..fffdce3a312 100644
--- a/chromium/media/audio/audio_input_controller.h
+++ b/chromium/media/audio/audio_input_controller.h
@@ -18,6 +18,9 @@
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
+// Deprecated! https://crbug.com/854612. You may be looking for
+// services/audio/public/cpp/device_factory.h.
+
// An AudioInputController controls an AudioInputStream and records data
// from this input stream. The two main methods are Record() and Close() and
// they are both executed on the audio thread which is injected by the two
@@ -83,7 +86,9 @@ class AudioBus;
class UserInputMonitor;
-class MEDIA_EXPORT AudioInputController
+// Deprecated! https://crbug.com/854612. You may be looking for
+// services/audio/public/cpp/device_factory.h.
+class MEDIA_EXPORT AudioInputController final
: public base::RefCountedThreadSafe<AudioInputController> {
public:
// Error codes to make native logging more clear. These error codes are added
diff --git a/chromium/media/audio/audio_input_controller_unittest.cc b/chromium/media/audio/audio_input_controller_unittest.cc
index 3bc3203441d..cec4dda00d9 100644
--- a/chromium/media/audio/audio_input_controller_unittest.cc
+++ b/chromium/media/audio/audio_input_controller_unittest.cc
@@ -38,13 +38,10 @@ const double kMaxVolume = 1.0;
constexpr base::TimeDelta kOnMuteWaitTimeout =
base::TimeDelta::FromMilliseconds(1500);
-// Posts base::RunLoop::QuitCurrentWhenIdleClosureDeprecated() on specified
-// message loop after a certain number of calls given by |limit|.
-ACTION_P3(CheckCountAndPostQuitTask, count, limit, loop_or_proxy) {
- if (++*count >= limit) {
- loop_or_proxy->PostTask(
- FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
- }
+// Runs |quit_closure| after the |count| of calls reaches |limit|.
+ACTION_P3(CheckCountAndPostQuitTask, count, limit, quit_closure) {
+ if (++*count >= limit)
+ quit_closure.Run();
}
void RunLoopWithTimeout(base::RunLoop* run_loop, base::TimeDelta timeout) {
@@ -141,8 +138,9 @@ class AudioInputControllerTest : public testing::TestWithParam<bool> {
return;
}
- controller_->Close(base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
- base::RunLoop().Run();
+ base::RunLoop run_loop;
+ controller_->Close(run_loop.QuitClosure());
+ run_loop.Run();
}
base::MessageLoop message_loop_;
@@ -182,15 +180,16 @@ TEST_P(AudioInputControllerTest, CreateRecordAndClose) {
ASSERT_TRUE(controller_.get());
// Write() should be called ten times.
+ base::RunLoop run_loop;
EXPECT_CALL(sync_writer_, Write(NotNull(), _, _, _))
.Times(AtLeast(10))
.WillRepeatedly(
- CheckCountAndPostQuitTask(&count, 10, message_loop_.task_runner()));
+ CheckCountAndPostQuitTask(&count, 10, run_loop.QuitClosure()));
EXPECT_CALL(user_input_monitor_, EnableKeyPressMonitoring());
controller_->Record();
// Record and wait until ten Write() callbacks are received.
- base::RunLoop().Run();
+ run_loop.Run();
EXPECT_CALL(user_input_monitor_, DisableKeyPressMonitoring());
EXPECT_CALL(sync_writer_, Close());
diff --git a/chromium/media/audio/audio_input_device.cc b/chromium/media/audio/audio_input_device.cc
index facb465549b..e4a96cafb93 100644
--- a/chromium/media/audio/audio_input_device.cc
+++ b/chromium/media/audio/audio_input_device.cc
@@ -54,7 +54,7 @@ class AudioInputDevice::AudioThreadCallback
: public AudioDeviceThread::Callback {
public:
AudioThreadCallback(const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
uint32_t total_segments,
CaptureCallback* capture_callback,
base::RepeatingClosure got_data_callback);
@@ -66,6 +66,8 @@ class AudioInputDevice::AudioThreadCallback
void Process(uint32_t pending_data) override;
private:
+ base::ReadOnlySharedMemoryRegion shared_memory_region_;
+ base::ReadOnlySharedMemoryMapping shared_memory_mapping_;
const base::TimeTicks start_time_;
bool no_callbacks_received_;
size_t current_segment_id_;
@@ -84,8 +86,10 @@ class AudioInputDevice::AudioThreadCallback
DISALLOW_COPY_AND_ASSIGN(AudioThreadCallback);
};
-AudioInputDevice::AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc)
- : callback_(nullptr),
+AudioInputDevice::AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc,
+ base::ThreadPriority thread_priority)
+ : thread_priority_(thread_priority),
+ callback_(nullptr),
ipc_(std::move(ipc)),
state_(IDLE),
agc_is_enabled_(false) {
@@ -129,8 +133,9 @@ void AudioInputDevice::Stop() {
"Media.Audio.Capture.DetectedMissingCallbacks",
alive_checker_ ? alive_checker_->DetectedDead() : false);
- UMA_HISTOGRAM_BOOLEAN("Media.Audio.Capture.StreamCallbackError",
- had_callback_error_);
+ UMA_HISTOGRAM_ENUMERATION("Media.Audio.Capture.StreamCallbackError2",
+ had_error_);
+ had_error_ = kNoError;
// Close the stream, if we haven't already.
if (state_ >= CREATING_STREAM) {
@@ -142,7 +147,7 @@ void AudioInputDevice::Stop() {
// We can run into an issue where Stop is called right after
// OnStreamCreated is called in cases where Start/Stop are called before we
// get the OnStreamCreated callback. To handle that corner case, we call
- // audio_tread.reset(). In most cases, the thread will already be stopped.
+ // audio_thread_.reset(). In most cases, the thread will already be stopped.
//
// |alive_checker_| must outlive |audio_callback_|.
base::ScopedAllowBlocking allow_blocking;
@@ -190,18 +195,19 @@ void AudioInputDevice::SetOutputDeviceForAec(
ipc_->SetOutputDeviceForAec(output_device_id);
}
-void AudioInputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle,
- bool initially_muted) {
+void AudioInputDevice::OnStreamCreated(
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
+ base::SyncSocket::Handle socket_handle,
+ bool initially_muted) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::OnStreamCreated");
- DCHECK(base::SharedMemory::IsHandleValid(handle));
+ DCHECK(shared_memory_region.IsValid());
#if defined(OS_WIN)
DCHECK(socket_handle);
#else
DCHECK_GE(socket_handle, 0);
#endif
- DCHECK_GT(handle.GetSize(), 0u);
+ DCHECK_GT(shared_memory_region.GetSize(), 0u);
if (state_ != CREATING_STREAM)
return;
@@ -239,11 +245,13 @@ void AudioInputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
// Unretained is safe since |alive_checker_| outlives |audio_callback_|.
audio_callback_ = std::make_unique<AudioInputDevice::AudioThreadCallback>(
- audio_parameters_, handle, kRequestedSharedMemoryCount, callback_,
+ audio_parameters_, std::move(shared_memory_region),
+ kRequestedSharedMemoryCount, callback_,
base::BindRepeating(&AliveChecker::NotifyAlive,
base::Unretained(alive_checker_.get())));
- audio_thread_ = std::make_unique<AudioDeviceThread>(
- audio_callback_.get(), socket_handle, "AudioInputDevice");
+ audio_thread_ =
+ std::make_unique<AudioDeviceThread>(audio_callback_.get(), socket_handle,
+ "AudioInputDevice", thread_priority_);
state_ = RECORDING;
ipc_->RecordStream();
@@ -260,7 +268,6 @@ void AudioInputDevice::OnError() {
if (state_ < CREATING_STREAM)
return;
- had_callback_error_ = true;
if (state_ == CREATING_STREAM) {
// At this point, we haven't attempted to start the audio thread.
@@ -269,6 +276,7 @@ void AudioInputDevice::OnError() {
// We must report the error to the |callback_| so that a potential
// audio source object will enter the correct state (e.g. 'ended' for
// a local audio source).
+ had_error_ = kErrorDuringCreation;
callback_->OnCaptureError(
"Maximum allowed input device limit reached or OS failure.");
} else {
@@ -278,6 +286,7 @@ void AudioInputDevice::OnError() {
// TODO(tommi): Add an explicit contract for clearing the callback
// object. Possibly require calling Initialize again or provide
// a callback object via Start() and clear it in Stop().
+ had_error_ = kErrorDuringCapture;
if (audio_thread_)
callback_->OnCaptureError("IPC delegate state error.");
}
@@ -318,16 +327,15 @@ void AudioInputDevice::DetectedDeadInputStream() {
// AudioInputDevice::AudioThreadCallback
AudioInputDevice::AudioThreadCallback::AudioThreadCallback(
const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
uint32_t total_segments,
CaptureCallback* capture_callback,
base::RepeatingClosure got_data_callback_)
: AudioDeviceThread::Callback(
audio_parameters,
- memory,
- /*read only*/ true,
ComputeAudioInputBufferSize(audio_parameters, 1u),
total_segments),
+ shared_memory_region_(std::move(shared_memory_region)),
start_time_(base::TimeTicks::Now()),
no_callbacks_received_(true),
current_segment_id_(0u),
@@ -336,7 +344,11 @@ AudioInputDevice::AudioThreadCallback::AudioThreadCallback(
got_data_callback_interval_in_frames_(kGotDataCallbackIntervalSeconds *
audio_parameters.sample_rate()),
frames_since_last_got_data_callback_(0),
- got_data_callback_(std::move(got_data_callback_)) {}
+ got_data_callback_(std::move(got_data_callback_)) {
+ // CHECK that the shared memory is large enough. The memory allocated must
+ // be at least as large as expected.
+ CHECK_LE(memory_length_, shared_memory_region_.GetSize());
+}
AudioInputDevice::AudioThreadCallback::~AudioThreadCallback() {
UMA_HISTOGRAM_LONG_TIMES("Media.Audio.Capture.InputStreamDuration",
@@ -344,10 +356,11 @@ AudioInputDevice::AudioThreadCallback::~AudioThreadCallback() {
}
void AudioInputDevice::AudioThreadCallback::MapSharedMemory() {
- shared_memory_.Map(memory_length_);
+ shared_memory_mapping_ = shared_memory_region_.MapAt(0, memory_length_);
// Create vector of audio buses by wrapping existing blocks of memory.
- const uint8_t* ptr = static_cast<const uint8_t*>(shared_memory_.memory());
+ const uint8_t* ptr =
+ static_cast<const uint8_t*>(shared_memory_mapping_.memory());
for (uint32_t i = 0; i < total_segments_; ++i) {
const media::AudioInputBuffer* buffer =
reinterpret_cast<const media::AudioInputBuffer*>(ptr);
@@ -375,7 +388,8 @@ void AudioInputDevice::AudioThreadCallback::Process(uint32_t pending_data) {
// The shared memory represents parameters, size of the data buffer and the
// actual data buffer containing audio data. Map the memory into this
// structure and parse out parameters and the data area.
- const uint8_t* ptr = static_cast<const uint8_t*>(shared_memory_.memory());
+ const uint8_t* ptr =
+ static_cast<const uint8_t*>(shared_memory_mapping_.memory());
ptr += current_segment_id_ * segment_length_;
const AudioInputBuffer* buffer =
reinterpret_cast<const AudioInputBuffer*>(ptr);
diff --git a/chromium/media/audio/audio_input_device.h b/chromium/media/audio/audio_input_device.h
index bc6bfb43022..17351241557 100644
--- a/chromium/media/audio/audio_input_device.h
+++ b/chromium/media/audio/audio_input_device.h
@@ -50,9 +50,10 @@
#include "base/compiler_specific.h"
#include "base/macros.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/optional.h"
#include "base/sequence_checker.h"
+#include "base/threading/platform_thread.h"
#include "base/time/time.h"
#include "media/audio/alive_checker.h"
#include "media/audio/audio_device_thread.h"
@@ -67,7 +68,8 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
public AudioInputIPCDelegate {
public:
// NOTE: Clients must call Initialize() before using.
- AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc);
+ AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc,
+ base::ThreadPriority thread_priority);
// AudioCapturerSource implementation.
void Initialize(const AudioParameters& params,
@@ -92,10 +94,20 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
RECORDING, // Receiving audio data.
};
+ // This enum is used for UMA, so the only allowed operation on this definition
+ // is to add new states to the bottom, update kMaxValue, and update the
+ // histogram "Media.Audio.Capture.StreamCallbackError2".
+ enum Error {
+ kNoError = 0,
+ kErrorDuringCreation = 1,
+ kErrorDuringCapture = 2,
+ kMaxValue = kErrorDuringCapture
+ };
+
~AudioInputDevice() override;
// AudioInputIPCDelegate implementation.
- void OnStreamCreated(base::SharedMemoryHandle handle,
+ void OnStreamCreated(base::ReadOnlySharedMemoryRegion shared_memory_region,
base::SyncSocket::Handle socket_handle,
bool initially_muted) override;
void OnError() override;
@@ -108,6 +120,8 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
AudioParameters audio_parameters_;
+ const base::ThreadPriority thread_priority_;
+
CaptureCallback* callback_;
// A pointer to the IPC layer that takes care of sending requests over to
@@ -117,8 +131,8 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
// Current state. See comments for State enum above.
State state_;
- // For UMA stats.
- bool had_callback_error_ = false;
+ // For UMA stats. May only be accessed on the IO thread.
+ Error had_error_ = kNoError;
// Stores the Automatic Gain Control state. Default is false.
bool agc_is_enabled_;
diff --git a/chromium/media/audio/audio_input_device_unittest.cc b/chromium/media/audio/audio_input_device_unittest.cc
index ad5c30e412a..e1f7a552267 100644
--- a/chromium/media/audio/audio_input_device_unittest.cc
+++ b/chromium/media/audio/audio_input_device_unittest.cc
@@ -16,11 +16,11 @@
#include "testing/gtest/include/gtest/gtest.h"
using base::CancelableSyncSocket;
-using base::SharedMemory;
using base::SyncSocket;
using testing::_;
using testing::DoAll;
using testing::Invoke;
+using testing::InvokeWithoutArgs;
namespace media {
@@ -66,8 +66,8 @@ class MockCaptureCallback : public AudioCapturerSource::CaptureCallback {
TEST(AudioInputDeviceTest, Noop) {
base::MessageLoopForIO io_loop;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
- scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc)));
+ scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
+ base::WrapUnique(input_ipc), base::ThreadPriority::REALTIME_AUDIO));
}
ACTION_P(ReportStateChange, device) {
@@ -81,8 +81,8 @@ TEST(AudioInputDeviceTest, FailToCreateStream) {
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
- scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc)));
+ scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
+ base::WrapUnique(input_ipc), base::ThreadPriority::REALTIME_AUDIO));
device->Initialize(params, &callback);
EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
.WillOnce(ReportStateChange(device.get()));
@@ -91,51 +91,50 @@ TEST(AudioInputDeviceTest, FailToCreateStream) {
device->Stop();
}
-ACTION_P3(ReportOnStreamCreated, device, handle, socket) {
- static_cast<AudioInputIPCDelegate*>(device)->OnStreamCreated(handle, socket,
- false);
-}
-
TEST(AudioInputDeviceTest, CreateStream) {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
CHANNEL_LAYOUT_STEREO, 48000, 480);
- SharedMemory shared_memory;
+ base::MappedReadOnlyRegion shared_memory;
CancelableSyncSocket browser_socket;
CancelableSyncSocket renderer_socket;
const uint32_t memory_size =
media::ComputeAudioInputBufferSize(params, kMemorySegmentCount);
- ASSERT_TRUE(shared_memory.CreateAndMapAnonymous(memory_size));
- memset(shared_memory.memory(), 0xff, memory_size);
+ shared_memory = base::ReadOnlySharedMemoryRegion::Create(memory_size);
+ ASSERT_TRUE(shared_memory.IsValid());
+ memset(shared_memory.mapping.memory(), 0xff, memory_size);
ASSERT_TRUE(
CancelableSyncSocket::CreatePair(&browser_socket, &renderer_socket));
SyncSocket::TransitDescriptor audio_device_socket_descriptor;
ASSERT_TRUE(renderer_socket.PrepareTransitDescriptor(
base::GetCurrentProcessHandle(), &audio_device_socket_descriptor));
- base::SharedMemoryHandle duplicated_memory_handle =
- shared_memory.handle().Duplicate();
- ASSERT_TRUE(duplicated_memory_handle.IsValid());
+ base::ReadOnlySharedMemoryRegion duplicated_shared_memory_region =
+ shared_memory.region.Duplicate();
+ ASSERT_TRUE(duplicated_shared_memory_region.IsValid());
base::test::ScopedTaskEnvironment ste;
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
- scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc)));
+ scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
+ base::WrapUnique(input_ipc), base::ThreadPriority::REALTIME_AUDIO));
device->Initialize(params, &callback);
EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
- .WillOnce(ReportOnStreamCreated(
- device.get(), duplicated_memory_handle,
- SyncSocket::UnwrapHandle(audio_device_socket_descriptor)));
+ .WillOnce(InvokeWithoutArgs([&]() {
+ static_cast<AudioInputIPCDelegate*>(device.get())
+ ->OnStreamCreated(
+ std::move(duplicated_shared_memory_region),
+ SyncSocket::UnwrapHandle(audio_device_socket_descriptor),
+ false);
+ }));
EXPECT_CALL(*input_ipc, RecordStream());
EXPECT_CALL(callback, OnCaptureStarted());
device->Start();
EXPECT_CALL(*input_ipc, CloseStream());
device->Stop();
- duplicated_memory_handle.Close();
}
} // namespace media.
diff --git a/chromium/media/audio/audio_input_ipc.h b/chromium/media/audio/audio_input_ipc.h
index 231b7e08845..66b98953845 100644
--- a/chromium/media/audio/audio_input_ipc.h
+++ b/chromium/media/audio/audio_input_ipc.h
@@ -7,7 +7,7 @@
#include <stdint.h>
-#include "base/memory/shared_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/sync_socket.h"
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
@@ -22,9 +22,10 @@ class MEDIA_EXPORT AudioInputIPCDelegate {
// Called when an AudioInputController has been created.
// See media/mojo/interfaces/audio_data_pipe.mojom for documentation of
// |handle| and |socket_handle|.
- virtual void OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle,
- bool initially_muted) = 0;
+ virtual void OnStreamCreated(
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
+ base::SyncSocket::Handle socket_handle,
+ bool initially_muted) = 0;
// Called when state of an audio stream has changed.
virtual void OnError() = 0;
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index f4e67c240ea..1affb149dd4 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -348,6 +348,11 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStreamProxy(
// Ensure we only pass on valid output parameters.
if (output_params.IsValid()) {
+ if (params.effects() & AudioParameters::MULTIZONE) {
+ // Never turn off the multizone effect even if it is not preferred.
+ output_params.set_effects(output_params.effects() |
+ AudioParameters::MULTIZONE);
+ }
if (params.effects() != output_params.effects()) {
// Turn off effects that weren't requested.
output_params.set_effects(params.effects() & output_params.effects());
diff --git a/chromium/media/audio/audio_output_device.cc b/chromium/media/audio/audio_output_device.cc
index 575a3455ce1..09c7063dcdd 100644
--- a/chromium/media/audio/audio_output_device.cc
+++ b/chromium/media/audio/audio_output_device.cc
@@ -14,53 +14,18 @@
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
+#include "base/threading/platform_thread.h"
#include "base/threading/thread_restrictions.h"
#include "base/timer/timer.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_output_controller.h"
+#include "media/audio/audio_output_device_thread_callback.h"
#include "media/base/limits.h"
namespace media {
-// Takes care of invoking the render callback on the audio thread.
-// An instance of this class is created for each capture stream in
-// OnStreamCreated().
-class AudioOutputDevice::AudioThreadCallback
- : public AudioDeviceThread::Callback {
- public:
- AudioThreadCallback(const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
- AudioRendererSink::RenderCallback* render_callback);
- ~AudioThreadCallback() override;
-
- void MapSharedMemory() override;
-
- // Called whenever we receive notifications about pending data.
- void Process(uint32_t control_signal) override;
-
- // Returns whether the current thread is the audio device thread or not.
- // Will always return true if DCHECKs are not enabled.
- bool CurrentThreadIsAudioDeviceThread();
-
- // Sets |first_play_start_time_| to the current time unless it's already set,
- // in which case it's a no-op. The first call to this method MUST have
- // completed by the time we recieve our first Process() callback to avoid
- // data races.
- void InitializePlayStartTime();
-
- private:
- const base::TimeTicks start_time_;
- // If set, this is used to record the startup duration UMA stat.
- base::Optional<base::TimeTicks> first_play_start_time_;
- AudioRendererSink::RenderCallback* render_callback_;
- std::unique_ptr<AudioBus> output_bus_;
- uint64_t callback_num_;
-
- DISALLOW_COPY_AND_ASSIGN(AudioThreadCallback);
-};
-
AudioOutputDevice::AudioOutputDevice(
std::unique_ptr<AudioOutputIPC> ipc,
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
@@ -199,7 +164,7 @@ void AudioOutputDevice::RequestDeviceAuthorizationOnIOThread() {
FROM_HERE, auth_timeout_,
base::BindRepeating(&AudioOutputDevice::OnDeviceAuthorized, this,
OUTPUT_DEVICE_STATUS_ERROR_TIMED_OUT,
- media::AudioParameters(), std::string()));
+ AudioParameters(), std::string()));
}
}
@@ -250,6 +215,10 @@ void AudioOutputDevice::ShutDownOnIOThread() {
// Destoy the timer on the thread it's used on.
auth_timeout_action_.reset();
+ UMA_HISTOGRAM_ENUMERATION("Media.Audio.Render.StreamCallbackError2",
+ had_error_);
+ had_error_ = kNoError;
+
// We can run into an issue where ShutDownOnIOThread is called right after
// OnStreamCreated is called in cases where Start/Stop are called before we
// get the OnStreamCreated callback. To handle that corner case, we call
@@ -263,9 +232,6 @@ void AudioOutputDevice::ShutDownOnIOThread() {
audio_thread_.reset();
audio_callback_.reset();
stopping_hack_ = false;
-
- UMA_HISTOGRAM_BOOLEAN("Media.Audio.Render.StreamCallbackError",
- had_callback_error_);
}
void AudioOutputDevice::SetVolumeOnIOThread(double volume) {
@@ -283,7 +249,6 @@ void AudioOutputDevice::OnError() {
if (state_ == IDLE)
return;
- had_callback_error_ = true;
// Don't dereference the callback object if the audio thread
// is stopped or stopping. That could mean that the callback
// object has been deleted.
@@ -295,7 +260,7 @@ void AudioOutputDevice::OnError() {
void AudioOutputDevice::OnDeviceAuthorized(
OutputDeviceStatus device_status,
- const media::AudioParameters& output_params,
+ const AudioParameters& output_params,
const std::string& matched_device_id) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
@@ -357,19 +322,20 @@ void AudioOutputDevice::OnDeviceAuthorized(
}
}
-void AudioOutputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle,
- bool playing_automatically) {
+void AudioOutputDevice::OnStreamCreated(
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ base::SyncSocket::Handle socket_handle,
+ bool playing_automatically) {
TRACE_EVENT0("audio", "AudioOutputDevice::OnStreamCreated")
DCHECK(io_task_runner_->BelongsToCurrentThread());
- DCHECK(base::SharedMemory::IsHandleValid(handle));
+ DCHECK(shared_memory_region.IsValid());
#if defined(OS_WIN)
DCHECK(socket_handle);
#else
DCHECK_GE(socket_handle, 0);
#endif
- DCHECK_GT(handle.GetSize(), 0u);
+ DCHECK_GT(shared_memory_region.GetSize(), 0u);
if (state_ != STREAM_CREATION_REQUESTED)
return;
@@ -394,12 +360,14 @@ void AudioOutputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
DCHECK(!audio_thread_);
DCHECK(!audio_callback_);
- audio_callback_.reset(new AudioOutputDevice::AudioThreadCallback(
- audio_parameters_, handle, callback_));
+ audio_callback_.reset(new AudioOutputDeviceThreadCallback(
+ audio_parameters_, std::move(shared_memory_region), callback_,
+ std::make_unique<AudioOutputDeviceThreadCallback::Metrics>()));
if (playing_automatically)
audio_callback_->InitializePlayStartTime();
audio_thread_.reset(new AudioDeviceThread(
- audio_callback_.get(), socket_handle, "AudioOutputDevice"));
+ audio_callback_.get(), socket_handle, "AudioOutputDevice",
+ base::ThreadPriority::REALTIME_AUDIO));
}
}
@@ -421,101 +389,14 @@ void AudioOutputDevice::NotifyRenderCallbackOfError() {
base::AutoLock auto_lock(audio_thread_lock_);
// Avoid signaling error if Initialize() hasn't been called yet, or if
// Stop() has already been called.
- if (callback_ && !stopping_hack_)
+ if (callback_ && !stopping_hack_) {
+ // Update |had_error_| for UMA stats.
+ if (audio_callback_)
+ had_error_ = kErrorDuringRendering;
+ else
+ had_error_ = kErrorDuringCreation;
callback_->OnRenderError();
-}
-
-// AudioOutputDevice::AudioThreadCallback
-
-AudioOutputDevice::AudioThreadCallback::AudioThreadCallback(
- const AudioParameters& audio_parameters,
- base::SharedMemoryHandle memory,
- AudioRendererSink::RenderCallback* render_callback)
- : AudioDeviceThread::Callback(
- audio_parameters,
- memory,
- /*read only*/ false,
- ComputeAudioOutputBufferSize(audio_parameters),
- /*segment count*/ 1),
- start_time_(base::TimeTicks::Now()),
- first_play_start_time_(base::nullopt),
- render_callback_(render_callback),
- callback_num_(0) {}
-
-AudioOutputDevice::AudioThreadCallback::~AudioThreadCallback() {
- UMA_HISTOGRAM_LONG_TIMES("Media.Audio.Render.OutputStreamDuration",
- base::TimeTicks::Now() - start_time_);
-}
-
-void AudioOutputDevice::AudioThreadCallback::MapSharedMemory() {
- CHECK_EQ(total_segments_, 1u);
- CHECK(shared_memory_.Map(memory_length_));
-
- AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_.memory());
- output_bus_ = AudioBus::WrapMemory(audio_parameters_, buffer->audio);
- output_bus_->set_is_bitstream_format(audio_parameters_.IsBitstreamFormat());
-}
-
-// Called whenever we receive notifications about pending data.
-void AudioOutputDevice::AudioThreadCallback::Process(uint32_t control_signal) {
- callback_num_++;
-
- // Read and reset the number of frames skipped.
- AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_.memory());
- uint32_t frames_skipped = buffer->params.frames_skipped;
- buffer->params.frames_skipped = 0;
-
- base::TimeDelta delay =
- base::TimeDelta::FromMicroseconds(buffer->params.delay_us);
-
- base::TimeTicks delay_timestamp =
- base::TimeTicks() +
- base::TimeDelta::FromMicroseconds(buffer->params.delay_timestamp_us);
-
- TRACE_EVENT_BEGIN2("audio", "AudioOutputDevice::FireRenderCallback",
- "callback_num", callback_num_, "frames skipped",
- frames_skipped);
- DVLOG(4) << __func__ << " delay:" << delay << " delay_timestamp:" << delay
- << " frames_skipped:" << frames_skipped;
-
- // When playback starts, we get an immediate callback to Process to make sure
- // that we have some data, we'll get another one after the device is awake and
- // ingesting data, which is what we want to track with this trace.
- if (callback_num_ == 2) {
- if (first_play_start_time_) {
- UMA_HISTOGRAM_TIMES("Media.Audio.Render.OutputDeviceStartTime",
- base::TimeTicks::Now() - *first_play_start_time_);
- }
- TRACE_EVENT_ASYNC_END0("audio", "StartingPlayback", this);
- }
-
- // Update the audio-delay measurement, inform about the number of skipped
- // frames, and ask client to render audio. Since |output_bus_| is wrapping
- // the shared memory the Render() call is writing directly into the shared
- // memory.
- render_callback_->Render(delay, delay_timestamp, frames_skipped,
- output_bus_.get());
-
- if (audio_parameters_.IsBitstreamFormat()) {
- buffer->params.bitstream_data_size = output_bus_->GetBitstreamDataSize();
- buffer->params.bitstream_frames = output_bus_->GetBitstreamFrames();
}
- TRACE_EVENT_END2("audio", "AudioOutputDevice::FireRenderCallback",
- "timestamp (ms)",
- (delay_timestamp - base::TimeTicks()).InMillisecondsF(),
- "delay (ms)", delay.InMillisecondsF());
-}
-
-bool AudioOutputDevice::AudioThreadCallback::
- CurrentThreadIsAudioDeviceThread() {
- return thread_checker_.CalledOnValidThread();
-}
-
-void AudioOutputDevice::AudioThreadCallback::InitializePlayStartTime() {
- if (!first_play_start_time_.has_value())
- first_play_start_time_ = base::TimeTicks::Now();
}
} // namespace media
diff --git a/chromium/media/audio/audio_output_device.h b/chromium/media/audio/audio_output_device.h
index 43f95ae46f2..fd42339c084 100644
--- a/chromium/media/audio/audio_output_device.h
+++ b/chromium/media/audio/audio_output_device.h
@@ -17,7 +17,8 @@
// is done by using shared memory in combination with a sync socket pair
// to generate a low latency transport. The AudioOutputDevice user registers an
// AudioOutputDevice::RenderCallback at construction and will be polled by the
-// AudioOutputDevice for audio to be played out by the underlying audio layers.
+// AudioOutputController for audio to be played out by the underlying audio
+// layers.
//
// State sequences.
//
@@ -51,10 +52,10 @@
// The thread within which this class receives all the IPC messages and
// IPC communications can only happen in this thread.
// 4. Audio transport thread (See AudioDeviceThread).
-// Responsible for calling the AudioThreadCallback implementation that in
-// turn calls AudioRendererSink::RenderCallback which feeds audio samples to
-// the audio layer in the browser process using sync sockets and shared
-// memory.
+// Responsible for calling the AudioOutputDeviceThreadCallback
+// implementation that in turn calls AudioRendererSink::RenderCallback
+// which feeds audio samples to the audio layer in the browser process using
+// sync sockets and shared memory.
//
// Implementation notes:
// - The user must call Stop() before deleting the class instance.
@@ -67,7 +68,7 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/synchronization/waitable_event.h"
#include "base/time/time.h"
#include "media/audio/audio_device_thread.h"
@@ -83,6 +84,7 @@ class SingleThreadTaskRunner;
}
namespace media {
+class AudioOutputDeviceThreadCallback;
class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
public AudioOutputIPCDelegate {
@@ -114,9 +116,9 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// AudioOutputIPCDelegate methods.
void OnError() override;
void OnDeviceAuthorized(OutputDeviceStatus device_status,
- const media::AudioParameters& output_params,
+ const AudioParameters& output_params,
const std::string& matched_device_id) override;
- void OnStreamCreated(base::SharedMemoryHandle handle,
+ void OnStreamCreated(base::UnsafeSharedMemoryRegion shared_memory_region,
base::SyncSocket::Handle socket_handle,
bool play_automatically) override;
void OnIPCClosed() override;
@@ -136,6 +138,16 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// request. Can Play()/Pause()/Stop().
};
+ // This enum is used for UMA, so the only allowed operation on this definition
+ // is to add new states to the bottom, update kMaxValue, and update the
+ // histogram "Media.Audio.Render.StreamCallbackError2".
+ enum Error {
+ kNoError = 0,
+ kErrorDuringCreation = 1,
+ kErrorDuringRendering = 2,
+ kMaxValue = kErrorDuringRendering
+ };
+
// Methods called on IO thread ----------------------------------------------
// The following methods are tasks posted on the IO thread that need to
// be executed on that thread. They use AudioOutputIPC to send IPC messages
@@ -152,7 +164,7 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// Process device authorization result on the IO thread.
void ProcessDeviceAuthorizationOnIOThread(
OutputDeviceStatus device_status,
- const media::AudioParameters& output_params,
+ const AudioParameters& output_params,
const std::string& matched_device_id,
bool timed_out);
@@ -173,7 +185,7 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
StartupState state_;
// For UMA stats. May only be accessed on the IO thread.
- bool had_callback_error_ = false;
+ Error had_error_ = kNoError;
// Last set volume.
double volume_ = 1.0;
@@ -189,13 +201,10 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// received in OnDeviceAuthorized().
std::string matched_device_id_;
- // Our audio thread callback class. See source file for details.
- class AudioThreadCallback;
-
// In order to avoid a race between OnStreamCreated and Stop(), we use this
// guard to control stopping and starting the audio thread.
base::Lock audio_thread_lock_;
- std::unique_ptr<AudioOutputDevice::AudioThreadCallback> audio_callback_;
+ std::unique_ptr<AudioOutputDeviceThreadCallback> audio_callback_;
std::unique_ptr<AudioDeviceThread> audio_thread_;
// Temporary hack to ignore OnStreamCreated() due to the user calling Stop()
diff --git a/chromium/media/audio/audio_output_device_thread_callback.cc b/chromium/media/audio/audio_output_device_thread_callback.cc
new file mode 100644
index 00000000000..c19252afbb9
--- /dev/null
+++ b/chromium/media/audio/audio_output_device_thread_callback.cc
@@ -0,0 +1,139 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/audio/audio_output_device_thread_callback.h"
+
+#include <utility>
+
+#include "base/metrics/histogram_macros.h"
+#include "base/trace_event/trace_event.h"
+
+namespace media {
+
+AudioOutputDeviceThreadCallback::Metrics::Metrics()
+ : first_play_start_time_(base::nullopt) {}
+
+AudioOutputDeviceThreadCallback::Metrics::~Metrics() = default;
+
+void AudioOutputDeviceThreadCallback::Metrics::OnCreated() {
+ start_time_ = base::TimeTicks::Now();
+}
+
+void AudioOutputDeviceThreadCallback::Metrics::OnProcess() {
+ if (first_play_start_time_) {
+ UMA_HISTOGRAM_TIMES("Media.Audio.Render.OutputDeviceStartTime",
+ base::TimeTicks::Now() - *first_play_start_time_);
+ }
+}
+
+void AudioOutputDeviceThreadCallback::Metrics::OnInitializePlayStartTime() {
+ if (!first_play_start_time_.has_value())
+ first_play_start_time_ = base::TimeTicks::Now();
+}
+
+void AudioOutputDeviceThreadCallback::Metrics::OnDestroyed() {
+ DCHECK(!start_time_.is_null());
+ UMA_HISTOGRAM_LONG_TIMES("Media.Audio.Render.OutputStreamDuration",
+ base::TimeTicks::Now() - start_time_);
+}
+
+AudioOutputDeviceThreadCallback::AudioOutputDeviceThreadCallback(
+ const media::AudioParameters& audio_parameters,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ media::AudioRendererSink::RenderCallback* render_callback,
+ std::unique_ptr<Metrics> metrics)
+ : media::AudioDeviceThread::Callback(
+ audio_parameters,
+ ComputeAudioOutputBufferSize(audio_parameters),
+ /*segment count*/ 1),
+ shared_memory_region_(std::move(shared_memory_region)),
+ render_callback_(render_callback),
+ callback_num_(0),
+ metrics_(std::move(metrics)) {
+ // CHECK that the shared memory is large enough. The memory allocated must be
+ // at least as large as expected.
+ CHECK(memory_length_ <= shared_memory_region_.GetSize());
+ if (metrics_)
+ metrics_->OnCreated();
+}
+
+AudioOutputDeviceThreadCallback::~AudioOutputDeviceThreadCallback() {
+ if (metrics_)
+ metrics_->OnDestroyed();
+}
+
+void AudioOutputDeviceThreadCallback::MapSharedMemory() {
+ CHECK_EQ(total_segments_, 1u);
+ shared_memory_mapping_ = shared_memory_region_.MapAt(0, memory_length_);
+ CHECK(shared_memory_mapping_.IsValid());
+
+ media::AudioOutputBuffer* buffer =
+ reinterpret_cast<media::AudioOutputBuffer*>(
+ shared_memory_mapping_.memory());
+ output_bus_ = media::AudioBus::WrapMemory(audio_parameters_, buffer->audio);
+ output_bus_->set_is_bitstream_format(audio_parameters_.IsBitstreamFormat());
+}
+
+// Called whenever we receive notifications about pending data.
+void AudioOutputDeviceThreadCallback::Process(uint32_t control_signal) {
+ callback_num_++;
+
+ // Read and reset the number of frames skipped.
+ media::AudioOutputBuffer* buffer =
+ reinterpret_cast<media::AudioOutputBuffer*>(
+ shared_memory_mapping_.memory());
+ uint32_t frames_skipped = buffer->params.frames_skipped;
+ buffer->params.frames_skipped = 0;
+
+ TRACE_EVENT_BEGIN2("audio", "AudioOutputDevice::FireRenderCallback",
+ "callback_num", callback_num_, "frames skipped",
+ frames_skipped);
+
+ base::TimeDelta delay =
+ base::TimeDelta::FromMicroseconds(buffer->params.delay_us);
+
+ base::TimeTicks delay_timestamp =
+ base::TimeTicks() +
+ base::TimeDelta::FromMicroseconds(buffer->params.delay_timestamp_us);
+
+ DVLOG(4) << __func__ << " delay:" << delay << " delay_timestamp:" << delay
+ << " frames_skipped:" << frames_skipped;
+
+ // When playback starts, we get an immediate callback to Process to make sure
+ // that we have some data, we'll get another one after the device is awake and
+ // ingesting data, which is what we want to track with this trace.
+ if (callback_num_ == 2) {
+ if (metrics_)
+ metrics_->OnProcess();
+ TRACE_EVENT_ASYNC_END0("audio", "StartingPlayback", this);
+ }
+
+ // Update the audio-delay measurement, inform about the number of skipped
+ // frames, and ask client to render audio. Since |output_bus_| is wrapping
+ // the shared memory the Render() call is writing directly into the shared
+ // memory.
+ render_callback_->Render(delay, delay_timestamp, frames_skipped,
+ output_bus_.get());
+
+ if (audio_parameters_.IsBitstreamFormat()) {
+ buffer->params.bitstream_data_size = output_bus_->GetBitstreamDataSize();
+ buffer->params.bitstream_frames = output_bus_->GetBitstreamFrames();
+ }
+
+ TRACE_EVENT_END2("audio", "AudioOutputDevice::FireRenderCallback",
+ "timestamp (ms)",
+ (delay_timestamp - base::TimeTicks()).InMillisecondsF(),
+ "delay (ms)", delay.InMillisecondsF());
+}
+
+bool AudioOutputDeviceThreadCallback::CurrentThreadIsAudioDeviceThread() {
+ return thread_checker_.CalledOnValidThread();
+}
+
+void AudioOutputDeviceThreadCallback::InitializePlayStartTime() {
+ if (metrics_)
+ metrics_->OnInitializePlayStartTime();
+}
+
+} // namespace media
diff --git a/chromium/media/audio/audio_output_device_thread_callback.h b/chromium/media/audio/audio_output_device_thread_callback.h
new file mode 100644
index 00000000000..2a529bf9082
--- /dev/null
+++ b/chromium/media/audio/audio_output_device_thread_callback.h
@@ -0,0 +1,74 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_AUDIO_AUDIO_OUTPUT_DEVICE_THREAD_CALLBACK_H_
+#define MEDIA_AUDIO_AUDIO_OUTPUT_DEVICE_THREAD_CALLBACK_H_
+
+#include <memory>
+
+#include "base/memory/unsafe_shared_memory_region.h"
+#include "base/optional.h"
+#include "media/audio/audio_device_thread.h"
+#include "media/base/audio_renderer_sink.h"
+
+namespace media {
+
+// Takes care of invoking the render callback on the audio thread.
+// An instance of this class is created for each capture stream on output device
+// stream created.
+class MEDIA_EXPORT AudioOutputDeviceThreadCallback
+ : public media::AudioDeviceThread::Callback {
+ public:
+ class Metrics {
+ public:
+ Metrics();
+ ~Metrics();
+
+ void OnCreated();
+ void OnProcess();
+ void OnInitializePlayStartTime();
+ void OnDestroyed();
+
+ private:
+ base::TimeTicks start_time_;
+ // If set, this is used to record the startup duration UMA stat.
+ base::Optional<base::TimeTicks> first_play_start_time_;
+ };
+
+ AudioOutputDeviceThreadCallback(
+ const media::AudioParameters& audio_parameters,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ media::AudioRendererSink::RenderCallback* render_callback,
+ std::unique_ptr<Metrics> metrics = nullptr);
+ ~AudioOutputDeviceThreadCallback() override;
+
+ void MapSharedMemory() override;
+
+ // Called whenever we receive notifications about pending data.
+ void Process(uint32_t control_signal) override;
+
+ // Returns whether the current thread is the audio device thread or not.
+ // Will always return true if DCHECKs are not enabled.
+ bool CurrentThreadIsAudioDeviceThread();
+
+ // Sets |first_play_start_time_| to the current time unless it's already set,
+ // in which case it's a no-op. The first call to this method MUST have
+ // completed by the time we recieve our first Process() callback to avoid
+ // data races.
+ void InitializePlayStartTime();
+
+ private:
+ base::UnsafeSharedMemoryRegion shared_memory_region_;
+ base::WritableSharedMemoryMapping shared_memory_mapping_;
+ media::AudioRendererSink::RenderCallback* render_callback_;
+ std::unique_ptr<media::AudioBus> output_bus_;
+ uint64_t callback_num_;
+ std::unique_ptr<Metrics> metrics_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioOutputDeviceThreadCallback);
+};
+
+} // namespace media
+
+#endif // MEDIA_AUDIO_AUDIO_OUTPUT_DEVICE_THREAD_CALLBACK_H_
diff --git a/chromium/media/audio/audio_output_device_unittest.cc b/chromium/media/audio/audio_output_device_unittest.cc
index 1c8dc659447..957955072bb 100644
--- a/chromium/media/audio/audio_output_device_unittest.cc
+++ b/chromium/media/audio/audio_output_device_unittest.cc
@@ -13,7 +13,8 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/single_thread_task_runner.h"
#include "base/sync_socket.h"
#include "base/task_runner.h"
@@ -21,12 +22,12 @@
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/audio/audio_sync_reader.h"
-#include "mojo/public/cpp/system/platform_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::CancelableSyncSocket;
-using base::SharedMemory;
+using base::UnsafeSharedMemoryRegion;
+using base::WritableSharedMemoryMapping;
using base::SyncSocket;
using testing::_;
using testing::DoAll;
@@ -85,25 +86,6 @@ class MockAudioOutputIPC : public AudioOutputIPC {
MOCK_METHOD1(SetVolume, void(double volume));
};
-// Converts a new-style shared memory region to a old-style shared memory
-// handle using a mojo::ScopedSharedBufferHandle that supports both types.
-// TODO(https://crbug.com/844508): get rid of this when AudioOutputDevice shared
-// memory refactor is done.
-base::SharedMemoryHandle ToSharedMemoryHandle(
- base::UnsafeSharedMemoryRegion region) {
- mojo::ScopedSharedBufferHandle buffer_handle =
- mojo::WrapUnsafeSharedMemoryRegion(std::move(region));
- base::SharedMemoryHandle memory_handle;
- mojo::UnwrappedSharedMemoryHandleProtection protection;
- size_t memory_length = 0;
- auto result = mojo::UnwrapSharedMemoryHandle(
- std::move(buffer_handle), &memory_handle, &memory_length, &protection);
- DCHECK_EQ(result, MOJO_RESULT_OK);
- DCHECK_EQ(protection,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
- return memory_handle;
-}
-
} // namespace.
class AudioOutputDeviceTest : public testing::Test {
@@ -131,7 +113,8 @@ class AudioOutputDeviceTest : public testing::Test {
private:
int CalculateMemorySize();
- SharedMemory shared_memory_;
+ UnsafeSharedMemoryRegion shared_memory_region_;
+ WritableSharedMemoryMapping shared_memory_mapping_;
CancelableSyncSocket browser_socket_;
CancelableSyncSocket renderer_socket_;
@@ -207,8 +190,11 @@ void AudioOutputDeviceTest::CallOnStreamCreated() {
const uint32_t kMemorySize =
ComputeAudioOutputBufferSize(default_audio_parameters_);
- ASSERT_TRUE(shared_memory_.CreateAndMapAnonymous(kMemorySize));
- memset(shared_memory_.memory(), 0xff, kMemorySize);
+ shared_memory_region_ = base::UnsafeSharedMemoryRegion::Create(kMemorySize);
+ ASSERT_TRUE(shared_memory_region_.IsValid());
+ shared_memory_mapping_ = shared_memory_region_.Map();
+ ASSERT_TRUE(shared_memory_mapping_.IsValid());
+ memset(shared_memory_mapping_.memory(), 0xff, kMemorySize);
ASSERT_TRUE(CancelableSyncSocket::CreatePair(&browser_socket_,
&renderer_socket_));
@@ -219,14 +205,12 @@ void AudioOutputDeviceTest::CallOnStreamCreated() {
SyncSocket::TransitDescriptor audio_device_socket_descriptor;
ASSERT_TRUE(renderer_socket_.PrepareTransitDescriptor(
base::GetCurrentProcessHandle(), &audio_device_socket_descriptor));
- base::SharedMemoryHandle duplicated_memory_handle =
- shared_memory_.handle().Duplicate();
- ASSERT_TRUE(duplicated_memory_handle.IsValid());
+ base::UnsafeSharedMemoryRegion duplicated_memory_region =
+ shared_memory_region_.Duplicate();
+ ASSERT_TRUE(duplicated_memory_region.IsValid());
- // TODO(erikchen): This appears to leak the SharedMemoryHandle.
- // https://crbug.com/640840.
audio_device_->OnStreamCreated(
- duplicated_memory_handle,
+ std::move(duplicated_memory_region),
SyncSocket::UnwrapHandle(audio_device_socket_descriptor),
/*playing_automatically*/ false);
task_env_.FastForwardBy(base::TimeDelta());
@@ -369,12 +353,11 @@ struct TestEnvironment {
time_stamp = base::TimeTicks::Now();
#if defined(OS_FUCHSIA)
- // Raise the timeout limits to reduce bot flakiness.
- // Fuchsia's task scheduler suffers from bad jitter on systems running many
- // tests simultaneously on nested virtualized deployments (e.g. test bots),
- // leading some read operations to randomly timeout.
+ // TODO(https://crbug.com/838367): Fuchsia bots use nested virtualization,
+ // which can result in unusually long scheduling delays, so allow a longer
+ // timeout.
reader->set_max_wait_timeout_for_test(
- base::TimeDelta::FromMilliseconds(50));
+ base::TimeDelta::FromMilliseconds(250));
#endif
}
@@ -412,9 +395,9 @@ TEST_F(AudioOutputDeviceTest, VerifyDataFlow) {
Mock::VerifyAndClear(ipc);
audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
kDefaultDeviceId);
- audio_device->OnStreamCreated(
- ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
- env.renderer_socket.Release(), /*playing_automatically*/ false);
+ audio_device->OnStreamCreated(env.reader->TakeSharedMemoryRegion(),
+ env.renderer_socket.Release(),
+ /*playing_automatically*/ false);
task_env_.RunUntilIdle();
// At this point, the callback thread should be running. Send some data over
@@ -473,9 +456,9 @@ TEST_F(AudioOutputDeviceTest, CreateNondefaultDevice) {
Mock::VerifyAndClear(ipc);
audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
kNonDefaultDeviceId);
- audio_device->OnStreamCreated(
- ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
- env.renderer_socket.Release(), /*playing_automatically*/ false);
+ audio_device->OnStreamCreated(env.reader->TakeSharedMemoryRegion(),
+ env.renderer_socket.Release(),
+ /*playing_automatically*/ false);
audio_device->Stop();
EXPECT_CALL(*ipc, CloseStream());
@@ -509,9 +492,9 @@ TEST_F(AudioOutputDeviceTest, CreateBitStreamStream) {
Mock::VerifyAndClear(ipc);
audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
kNonDefaultDeviceId);
- audio_device->OnStreamCreated(
- ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
- env.renderer_socket.Release(), /*playing_automatically*/ false);
+ audio_device->OnStreamCreated(env.reader->TakeSharedMemoryRegion(),
+ env.renderer_socket.Release(),
+ /*playing_automatically*/ false);
task_env_.RunUntilIdle();
// At this point, the callback thread should be running. Send some data over
@@ -539,7 +522,9 @@ TEST_F(AudioOutputDeviceTest, CreateBitStreamStream) {
EXPECT_EQ(kBitstreamFrames, test_bus->GetBitstreamFrames());
EXPECT_EQ(kBitstreamDataSize, test_bus->GetBitstreamDataSize());
for (size_t i = 0; i < kBitstreamDataSize / sizeof(float); ++i) {
- EXPECT_EQ(kAudioData, test_bus->channel(0)[i]);
+ // Note: if all of these fail, the bots will behave strangely due to the
+ // large amount of text output. Assert is used to avoid this.
+ ASSERT_EQ(kAudioData, test_bus->channel(0)[i]);
}
}
diff --git a/chromium/media/audio/audio_output_ipc.h b/chromium/media/audio/audio_output_ipc.h
index 897f7825ad6..d287ac0c7e5 100644
--- a/chromium/media/audio/audio_output_ipc.h
+++ b/chromium/media/audio/audio_output_ipc.h
@@ -7,7 +7,7 @@
#include <string>
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/sync_socket.h"
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
@@ -35,9 +35,10 @@ class MEDIA_EXPORT AudioOutputIPCDelegate {
// |handle| and |socket_handle|. |playing_automatically| indicates if the
// AudioOutputIPCDelegate is playing right away due to an earlier call to
// Play();
- virtual void OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle,
- bool playing_automatically) = 0;
+ virtual void OnStreamCreated(
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ base::SyncSocket::Handle socket_handle,
+ bool playing_automatically) = 0;
// Called when the AudioOutputIPC object is going away and/or when the IPC
// channel has been closed and no more ipc requests can be made.
diff --git a/chromium/media/audio/audio_output_resampler.cc b/chromium/media/audio/audio_output_resampler.cc
index bb2e6766b0e..5c0f5f447c9 100644
--- a/chromium/media/audio/audio_output_resampler.cc
+++ b/chromium/media/audio/audio_output_resampler.cc
@@ -262,8 +262,7 @@ AudioOutputResampler::AudioOutputResampler(
reinitialize_timer_(FROM_HERE,
close_delay_,
base::Bind(&AudioOutputResampler::Reinitialize,
- base::Unretained(this)),
- false),
+ base::Unretained(this))),
register_debug_recording_source_callback_(
register_debug_recording_source_callback),
weak_factory_(this) {
diff --git a/chromium/media/audio/audio_output_resampler.h b/chromium/media/audio/audio_output_resampler.h
index ac59d5b484c..556d9e36f06 100644
--- a/chromium/media/audio/audio_output_resampler.h
+++ b/chromium/media/audio/audio_output_resampler.h
@@ -98,7 +98,7 @@ class MEDIA_EXPORT AudioOutputResampler : public AudioOutputDispatcher {
// states by clearing the dispatcher if all proxies have been closed and none
// have been created within |close_delay_|. Without this, audio may be lost
// to a fake stream indefinitely for transient errors.
- base::Timer reinitialize_timer_;
+ base::RetainingOneShotTimer reinitialize_timer_;
// Callback for registering a debug recording source.
RegisterDebugRecordingSourceCallback
diff --git a/chromium/media/audio/audio_output_unittest.cc b/chromium/media/audio/audio_output_unittest.cc
index 22cfcd4a77a..4503fc71fb1 100644
--- a/chromium/media/audio/audio_output_unittest.cc
+++ b/chromium/media/audio/audio_output_unittest.cc
@@ -53,7 +53,7 @@ class AudioOutputTest : public ::testing::Test {
}
protected:
- base::MessageLoop message_loop_;
+ base::MessageLoopForIO message_loop_;
std::unique_ptr<AudioManager> audio_manager_;
std::unique_ptr<AudioDeviceInfoAccessorForTests> audio_manager_device_info_;
AudioParameters stream_params_;
diff --git a/chromium/media/audio/audio_thread_impl.cc b/chromium/media/audio/audio_thread_impl.cc
index 1a64cc14186..96109ef0280 100644
--- a/chromium/media/audio/audio_thread_impl.cc
+++ b/chromium/media/audio/audio_thread_impl.cc
@@ -10,10 +10,14 @@
namespace media {
AudioThreadImpl::AudioThreadImpl() : thread_("AudioThread") {
+ base::Thread::Options thread_options;
#if defined(OS_WIN)
thread_.init_com_with_mta(true);
+#elif defined(OS_FUCHSIA)
+ // FIDL-based APIs require async_t, which is initialized on IO thread.
+ thread_options.message_loop_type = base::MessageLoop::TYPE_IO;
#endif
- CHECK(thread_.Start());
+ CHECK(thread_.StartWithOptions(thread_options));
#if defined(OS_MACOSX)
// On Mac, the audio task runner must belong to the main thread.
diff --git a/chromium/media/audio/cras/audio_manager_cras.cc b/chromium/media/audio/cras/audio_manager_cras.cc
index be523527018..e84164d9748 100644
--- a/chromium/media/audio/cras/audio_manager_cras.cc
+++ b/chromium/media/audio/cras/audio_manager_cras.cc
@@ -186,6 +186,10 @@ AudioParameters AudioManagerCras::GetInputStreamParameters(
if (HasKeyboardMic(devices))
params.set_effects(AudioParameters::KEYBOARD_MIC);
+ if (GetSystemAecSupportedPerBoard())
+ params.set_effects(params.effects() |
+ AudioParameters::EXPERIMENTAL_ECHO_CANCELLER);
+
return params;
}
@@ -310,6 +314,27 @@ int AudioManagerCras::GetDefaultOutputBufferSizePerBoard() {
return static_cast<int>(buffer_size);
}
+bool AudioManagerCras::GetSystemAecSupportedPerBoard() {
+ DCHECK(GetTaskRunner()->BelongsToCurrentThread());
+ bool system_aec_supported = false;
+ base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ if (main_task_runner_->BelongsToCurrentThread()) {
+ // Unittest may use the same thread for audio thread.
+ GetSystemAecSupportedOnMainThread(&system_aec_supported, &event);
+ } else {
+ // Using base::Unretained is safe here because we wait for callback be
+ // executed in main thread before local variables are destructed.
+ main_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&AudioManagerCras::GetSystemAecSupportedOnMainThread,
+ weak_this_, base::Unretained(&system_aec_supported),
+ base::Unretained(&event)));
+ }
+ WaitEventOrShutdown(&event);
+ return system_aec_supported;
+}
+
AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
@@ -466,6 +491,17 @@ void AudioManagerCras::GetDefaultOutputBufferSizeOnMainThread(
event->Signal();
}
+void AudioManagerCras::GetSystemAecSupportedOnMainThread(
+ bool* system_aec_supported,
+ base::WaitableEvent* event) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ if (chromeos::CrasAudioHandler::IsInitialized()) {
+ *system_aec_supported =
+ chromeos::CrasAudioHandler::Get()->system_aec_supported();
+ }
+ event->Signal();
+}
+
void AudioManagerCras::WaitEventOrShutdown(base::WaitableEvent* event) {
base::WaitableEvent* waitables[] = {event, &on_shutdown_};
base::WaitableEvent::WaitMany(waitables, arraysize(waitables));
diff --git a/chromium/media/audio/cras/audio_manager_cras.h b/chromium/media/audio/cras/audio_manager_cras.h
index 2bce9b04f30..e994735ee22 100644
--- a/chromium/media/audio/cras/audio_manager_cras.h
+++ b/chromium/media/audio/cras/audio_manager_cras.h
@@ -79,6 +79,9 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerBase {
// Get default output buffer size for this board.
int GetDefaultOutputBufferSizePerBoard();
+ // Get if system AEC is supported or not for this board.
+ bool GetSystemAecSupportedPerBoard();
+
void GetAudioDeviceNamesImpl(bool is_input, AudioDeviceNames* device_names);
std::string GetHardwareDeviceFromDeviceId(
@@ -97,6 +100,8 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerBase {
base::WaitableEvent* event);
void GetDefaultOutputBufferSizeOnMainThread(int32_t* buffer_size,
base::WaitableEvent* event);
+ void GetSystemAecSupportedOnMainThread(bool* system_aec_supported,
+ base::WaitableEvent* event);
void WaitEventOrShutdown(base::WaitableEvent* event);
diff --git a/chromium/media/audio/cras/cras_input.cc b/chromium/media/audio/cras/cras_input.cc
index 435c9101bd1..1088581e881 100644
--- a/chromium/media/audio/cras/cras_input.cc
+++ b/chromium/media/audio/cras/cras_input.cc
@@ -121,6 +121,10 @@ void CrasInputStream::Close() {
audio_manager_->ReleaseInputStream(this);
}
+inline bool CrasInputStream::UseCrasAec() const {
+ return params_.effects() & AudioParameters::ECHO_CANCELLER;
+}
+
void CrasInputStream::Start(AudioInputCallback* callback) {
DCHECK(client_);
DCHECK(callback);
@@ -204,6 +208,9 @@ void CrasInputStream::Start(AudioInputCallback* callback) {
return;
}
+ if (UseCrasAec())
+ cras_client_stream_params_enable_aec(stream_params);
+
// Before starting the stream, save the number of bytes in a frame for use in
// the callback.
bytes_per_frame_ = cras_client_format_bytes_per_frame(audio_format);
diff --git a/chromium/media/audio/cras/cras_input.h b/chromium/media/audio/cras/cras_input.h
index e7917e564d7..fe37f850b5d 100644
--- a/chromium/media/audio/cras/cras_input.h
+++ b/chromium/media/audio/cras/cras_input.h
@@ -78,6 +78,9 @@ class MEDIA_EXPORT CrasInputStream : public AgcAudioStream<AudioInputStream> {
// Convert from a volume ratio to dB.
double GetDecibelsFromVolumeRatio(double volume_ratio) const;
+ // Return true to use AEC in CRAS for this input stream.
+ inline bool UseCrasAec() const;
+
// Non-refcounted pointer back to the audio manager.
// The AudioManager indirectly holds on to stream objects, so we don't
// want circular references. Additionally, stream objects live on the audio
diff --git a/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc b/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
index 45d7ed093d6..998df62bace 100644
--- a/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
+++ b/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
@@ -6,8 +6,6 @@
#include <memory>
-#include <media/audio.h>
-
#include "media/audio/fuchsia/audio_output_stream_fuchsia.h"
namespace media {
@@ -15,16 +13,14 @@ namespace media {
AudioManagerFuchsia::AudioManagerFuchsia(
std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory)
- : AudioManagerBase(std::move(audio_thread), audio_log_factory),
- fuchsia_audio_manager_(fuchsia_audio_manager_create()) {}
+ : AudioManagerBase(std::move(audio_thread), audio_log_factory) {}
-AudioManagerFuchsia::~AudioManagerFuchsia() {
- fuchsia_audio_manager_free(fuchsia_audio_manager_);
-}
+AudioManagerFuchsia::~AudioManagerFuchsia() = default;
bool AudioManagerFuchsia::HasAudioOutputDevices() {
- return fuchsia_audio_manager_get_output_devices(fuchsia_audio_manager_,
- nullptr, 0) > 0;
+ // TODO(crbug.com/852834): Fuchsia currently doesn't provide an API for device
+ // enumeration. Update this method when that functionality is implemented.
+ return true;
}
bool AudioManagerFuchsia::HasAudioInputDevices() {
@@ -40,33 +36,9 @@ void AudioManagerFuchsia::GetAudioInputDeviceNames(
void AudioManagerFuchsia::GetAudioOutputDeviceNames(
AudioDeviceNames* device_names) {
- device_names->clear();
-
- std::vector<fuchsia_audio_device_description> descriptions;
- descriptions.resize(16);
- bool try_again = true;
- while (try_again) {
- int result = fuchsia_audio_manager_get_output_devices(
- fuchsia_audio_manager_, descriptions.data(), descriptions.size());
- if (result < 0) {
- LOG(ERROR) << "fuchsia_audio_manager_get_output_devices() returned "
- << result;
- device_names->clear();
- return;
- }
-
- // Try again if the buffer was too small.
- try_again = static_cast<size_t>(result) > descriptions.size();
- descriptions.resize(result);
- }
-
- // Create default device if we have any output devices present.
- if (!descriptions.empty())
- device_names->push_back(AudioDeviceName::CreateDefault());
-
- for (auto& desc : descriptions) {
- device_names->push_back(AudioDeviceName(desc.name, desc.id));
- }
+ // TODO(crbug.com/852834): Fuchsia currently doesn't provide an API for device
+ // enumeration. Update this method when that functionality is implemented.
+ *device_names = {AudioDeviceName::CreateDefault()};
}
AudioParameters AudioManagerFuchsia::GetInputStreamParameters(
@@ -78,32 +50,11 @@ AudioParameters AudioManagerFuchsia::GetInputStreamParameters(
AudioParameters AudioManagerFuchsia::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
- fuchsia_audio_parameters device_params;
- int result = fuchsia_audio_manager_get_output_device_default_parameters(
- fuchsia_audio_manager_,
- output_device_id == AudioDeviceDescription::kDefaultDeviceId
- ? nullptr
- : const_cast<char*>(output_device_id.c_str()),
- &device_params);
- if (result < 0) {
- LOG(ERROR) << "fuchsia_audio_manager_get_default_output_device_parameters()"
- " returned "
- << result;
-
- return AudioParameters();
- }
-
- int user_buffer_size = GetUserBufferSize();
- if (user_buffer_size > 0)
- device_params.buffer_size = user_buffer_size;
-
- int sample_rate = input_params.sample_rate();
- if (sample_rate < 8000 || sample_rate > 96000)
- sample_rate = device_params.sample_rate;
-
+ // TODO(crbug.com/852834): Fuchsia currently doesn't provide an API to get
+ // device configuration. Update this method when that functionality is
+ // implemented.
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- GuessChannelLayout(device_params.num_channels),
- sample_rate, device_params.buffer_size);
+ CHANNEL_LAYOUT_STEREO, 48000, 480);
}
const char* AudioManagerFuchsia::GetName() {
@@ -122,7 +73,13 @@ AudioOutputStream* AudioManagerFuchsia::MakeLowLatencyOutputStream(
const std::string& device_id,
const LogCallback& log_callback) {
DCHECK_EQ(AudioParameters::AUDIO_PCM_LOW_LATENCY, params.format());
- return new AudioOutputStreamFuchsia(this, device_id, params);
+
+ if (!device_id.empty() &&
+ device_id != AudioDeviceDescription::kDefaultDeviceId) {
+ return nullptr;
+ }
+
+ return new AudioOutputStreamFuchsia(this, params);
}
AudioInputStream* AudioManagerFuchsia::MakeLinearInputStream(
diff --git a/chromium/media/audio/fuchsia/audio_manager_fuchsia.h b/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
index 92feaf3388f..7ce59254984 100644
--- a/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
+++ b/chromium/media/audio/fuchsia/audio_manager_fuchsia.h
@@ -5,8 +5,6 @@
#ifndef MEDIA_AUDIO_FUCHSIA_AUDIO_MANAGER_FUCHSIA_H_
#define MEDIA_AUDIO_FUCHSIA_AUDIO_MANAGER_FUCHSIA_H_
-#include <media/audio.h>
-
#include "media/audio/audio_manager_base.h"
namespace media {
@@ -43,18 +41,12 @@ class AudioManagerFuchsia : public AudioManagerBase {
const std::string& device_id,
const LogCallback& log_callback) override;
- fuchsia_audio_manager* GetFuchsiaAudioManager() const {
- return fuchsia_audio_manager_;
- }
-
protected:
AudioParameters GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) override;
private:
- fuchsia_audio_manager* fuchsia_audio_manager_;
-
DISALLOW_COPY_AND_ASSIGN(AudioManagerFuchsia);
};
diff --git a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
index 05b4c3c6885..818d73c9aa0 100644
--- a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
+++ b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
@@ -4,9 +4,9 @@
#include "media/audio/fuchsia/audio_output_stream_fuchsia.h"
-#include <media/audio.h>
#include <zircon/syscalls.h>
+#include "base/fuchsia/component_context.h"
#include "media/audio/fuchsia/audio_manager_fuchsia.h"
#include "media/base/audio_sample_types.h"
#include "media/base/audio_timestamp_helper.h"
@@ -15,43 +15,53 @@ namespace media {
AudioOutputStreamFuchsia::AudioOutputStreamFuchsia(
AudioManagerFuchsia* manager,
- const std::string& device_id,
const AudioParameters& parameters)
: manager_(manager),
- device_id_(device_id),
parameters_(parameters),
- audio_bus_(AudioBus::Create(parameters)),
- buffer_(parameters_.frames_per_buffer() * parameters_.channels()) {}
+ audio_bus_(AudioBus::Create(parameters)) {}
AudioOutputStreamFuchsia::~AudioOutputStreamFuchsia() {
// Close() must be called first.
- DCHECK(!stream_);
+ DCHECK(!audio_renderer_);
}
bool AudioOutputStreamFuchsia::Open() {
- DCHECK(!stream_);
-
- fuchsia_audio_parameters fuchsia_params;
- fuchsia_params.sample_rate = parameters_.sample_rate();
- fuchsia_params.num_channels = parameters_.channels();
- fuchsia_params.buffer_size = parameters_.frames_per_buffer();
-
- int result = fuchsia_audio_manager_create_output_stream(
- manager_->GetFuchsiaAudioManager(), const_cast<char*>(device_id_.c_str()),
- &fuchsia_params, &stream_);
- if (result < 0) {
- DLOG(ERROR) << "Failed to open audio output " << device_id_
- << " error code: " << result;
- DCHECK(!stream_);
- return false;
- }
-
+ DCHECK(!audio_renderer_);
+
+ // Connect |audio_renderer_| to the audio service.
+ fuchsia::media::AudioPtr audio_server =
+ base::fuchsia::ComponentContext::GetDefault()
+ ->ConnectToService<fuchsia::media::Audio>();
+ audio_server->CreateRendererV2(audio_renderer_.NewRequest());
+ audio_renderer_.set_error_handler(
+ fit::bind_member(this, &AudioOutputStreamFuchsia::OnRendererError));
+
+ // Inform the |audio_renderer_| of the format required by the caller.
+ fuchsia::media::AudioPcmFormat format;
+ format.sample_format = fuchsia::media::AudioSampleFormat::FLOAT;
+ format.channels = parameters_.channels();
+ format.frames_per_second = parameters_.sample_rate();
+ audio_renderer_->SetPcmFormat(std::move(format));
+
+ // Use number of samples to specify media position.
+ audio_renderer_->SetPtsUnits(parameters_.sample_rate(), 1);
+
+ // Setup OnMinLeadTimeChanged event listener. This event is used to get
+ // |min_lead_time_|, which indicates how far ahead audio samples need to be
+ // sent to the renderer.
+ audio_renderer_.events().OnMinLeadTimeChanged =
+ fit::bind_member(this, &AudioOutputStreamFuchsia::OnMinLeadTimeChanged);
+ audio_renderer_->EnableMinLeadTimeEvents(true);
+
+ // The renderer may fail initialization asynchronously, which is handled in
+ // OnRendererError().
return true;
}
void AudioOutputStreamFuchsia::Start(AudioSourceCallback* callback) {
DCHECK(!callback_);
- DCHECK(started_time_.is_null());
+ DCHECK(reference_time_.is_null());
+ DCHECK(!timer_.IsRunning());
callback_ = callback;
PumpSamples();
@@ -59,7 +69,8 @@ void AudioOutputStreamFuchsia::Start(AudioSourceCallback* callback) {
void AudioOutputStreamFuchsia::Stop() {
callback_ = nullptr;
- started_time_ = base::TimeTicks();
+ reference_time_ = base::TimeTicks();
+ audio_renderer_->FlushNoReply();
timer_.Stop();
}
@@ -74,95 +85,140 @@ void AudioOutputStreamFuchsia::GetVolume(double* volume) {
void AudioOutputStreamFuchsia::Close() {
Stop();
-
- if (stream_) {
- fuchsia_audio_output_stream_free(stream_);
- stream_ = nullptr;
- }
+ audio_renderer_.Unbind();
// Signal to the manager that we're closed and can be removed. This should be
- // the last call in the function as it deletes "this".
+ // the last call in the function as it deletes |this|.
manager_->ReleaseOutputStream(this);
}
base::TimeTicks AudioOutputStreamFuchsia::GetCurrentStreamTime() {
- DCHECK(!started_time_.is_null());
- return started_time_ +
+ DCHECK(!reference_time_.is_null());
+ return reference_time_ +
AudioTimestampHelper::FramesToTime(stream_position_samples_,
parameters_.sample_rate());
}
-bool AudioOutputStreamFuchsia::UpdatePresentationDelay() {
- int result = fuchsia_audio_output_stream_get_min_delay(
- stream_, &presentation_delay_ns_);
- if (result != ZX_OK) {
- DLOG(ERROR) << "fuchsia_audio_output_stream_get_min_delay() failed: "
- << result;
- callback_->OnError();
+size_t AudioOutputStreamFuchsia::GetMinBufferSize() {
+ // Ensure that |payload_buffer_| fits enough packets to cover min_lead_time_
+ // plus one extra packet.
+ int min_packets = (AudioTimestampHelper::TimeToFrames(
+ min_lead_time_, parameters_.sample_rate()) +
+ parameters_.frames_per_buffer() - 1) /
+ parameters_.frames_per_buffer() +
+ 1;
+
+ return parameters_.GetBytesPerBuffer(kSampleFormatF32) * min_packets;
+}
+
+bool AudioOutputStreamFuchsia::InitializePayloadBuffer() {
+ size_t buffer_size = GetMinBufferSize();
+ if (!payload_buffer_.CreateAndMapAnonymous(buffer_size)) {
+ LOG(WARNING) << "Failed to allocate VMO of size " << buffer_size;
return false;
}
+ payload_buffer_pos_ = 0;
+ audio_renderer_->SetPayloadBuffer(
+ zx::vmo(payload_buffer_.handle().Duplicate().GetHandle()));
+
return true;
}
+void AudioOutputStreamFuchsia::OnMinLeadTimeChanged(int64_t min_lead_time) {
+ min_lead_time_ = base::TimeDelta::FromNanoseconds(min_lead_time);
+
+ // When min_lead_time_ increases we may need to reallocate |payload_buffer_|.
+ // Code below just unmaps the current buffer. The new buffer will be allocated
+ // lated in PumpSamples(). This is necessary because VMO allocation may fail
+ // and it's not possible to report that error here - OnMinLeadTimeChanged()
+ // may be invoked before Start().
+ if (payload_buffer_.mapped_size() > 0 &&
+ GetMinBufferSize() > payload_buffer_.mapped_size()) {
+ payload_buffer_.Unmap();
+ }
+}
+
+void AudioOutputStreamFuchsia::OnRendererError() {
+ LOG(WARNING) << "AudioRenderer has failed.";
+ Stop();
+ callback_->OnError();
+}
+
void AudioOutputStreamFuchsia::PumpSamples() {
- DCHECK(stream_);
+ DCHECK(audio_renderer_);
+
+ // Allocate payload buffer if necessary.
+ if (!payload_buffer_.mapped_size() && !InitializePayloadBuffer()) {
+ Stop();
+ callback_->OnError();
+ return;
+ }
base::TimeTicks now = base::TimeTicks::Now();
- // Reset stream position if:
- // 1. The stream wasn't previously running.
- // 2. We missed timer deadline, e.g. after the system was suspended.
- if (started_time_.is_null() || now > GetCurrentStreamTime()) {
- if (!UpdatePresentationDelay())
- return;
+ base::TimeDelta delay;
+ if (reference_time_.is_null()) {
+ delay = min_lead_time_;
+ } else {
+ auto stream_time = GetCurrentStreamTime();
- started_time_ = base::TimeTicks();
- }
+ // Adjust stream position if we missed timer deadline.
+ if (now + min_lead_time_ > stream_time) {
+ stream_position_samples_ += AudioTimestampHelper::TimeToFrames(
+ now + min_lead_time_ - stream_time, parameters_.sample_rate());
+ }
- base::TimeDelta delay =
- base::TimeDelta::FromMicroseconds(presentation_delay_ns_ / 1000);
- if (!started_time_.is_null())
- delay += GetCurrentStreamTime() - now;
+ delay = stream_time - now;
+ }
int frames_filled = callback_->OnMoreData(delay, now, 0, audio_bus_.get());
DCHECK_EQ(frames_filled, audio_bus_->frames());
audio_bus_->Scale(volume_);
+
+ size_t packet_size = parameters_.GetBytesPerBuffer(kSampleFormatF32);
+ DCHECK_LE(payload_buffer_pos_ + packet_size, payload_buffer_.mapped_size());
+
audio_bus_->ToInterleaved<media::Float32SampleTypeTraits>(
- audio_bus_->frames(), buffer_.data());
-
- do {
- zx_time_t presentation_time = FUCHSIA_AUDIO_NO_TIMESTAMP;
- if (started_time_.is_null()) {
- // Presentation time (PTS) needs to be specified only for the first frame
- // after stream is started or restarted. Mixer will calculate PTS for all
- // following frames. 1us is added to account for the time passed between
- // zx_clock_get() and fuchsia_audio_output_stream_write().
- zx_time_t zx_now = zx_clock_get(ZX_CLOCK_MONOTONIC);
- presentation_time = zx_now + presentation_delay_ns_ + ZX_USEC(1);
- started_time_ = base::TimeTicks::FromZxTime(zx_now);
- stream_position_samples_ = 0;
- }
- int result = fuchsia_audio_output_stream_write(
- stream_, buffer_.data(), buffer_.size(), presentation_time);
- if (result == ZX_ERR_IO_MISSED_DEADLINE) {
- DLOG(ERROR) << "AudioOutputStreamFuchsia::PumpSamples() missed deadline, "
- "resetting PTS.";
- if (!UpdatePresentationDelay())
- return;
- started_time_ = base::TimeTicks();
- } else if (result != ZX_OK) {
- DLOG(ERROR) << "fuchsia_audio_output_stream_write() returned " << result;
- callback_->OnError();
- }
- } while (started_time_.is_null());
+ audio_bus_->frames(),
+ reinterpret_cast<float*>(
+ reinterpret_cast<uint8_t*>(payload_buffer_.memory()) +
+ payload_buffer_pos_));
+
+ fuchsia::media::AudioPacket packet;
+ packet.timestamp = stream_position_samples_;
+ packet.payload_offset = payload_buffer_pos_;
+ packet.payload_size = packet_size;
+ packet.flags = 0;
+
+ audio_renderer_->SendPacketNoReply(std::move(packet));
stream_position_samples_ += frames_filled;
+ payload_buffer_pos_ =
+ (payload_buffer_pos_ + packet_size) % payload_buffer_.mapped_size();
+
+ if (reference_time_.is_null()) {
+ audio_renderer_->Play(
+ fuchsia::media::kNoTimestamp, stream_position_samples_ - frames_filled,
+ [this](int64_t reference_time, int64_t media_time) {
+ if (!callback_)
+ return;
+
+ reference_time_ = base::TimeTicks::FromZxTime(reference_time);
+ stream_position_samples_ = media_time;
+
+ SchedulePumpSamples(base::TimeTicks::Now());
+ });
+ } else {
+ SchedulePumpSamples(now);
+ }
+}
- timer_.Start(FROM_HERE,
- GetCurrentStreamTime() - base::TimeTicks::Now() -
- parameters_.GetBufferDuration() / 2,
+void AudioOutputStreamFuchsia::SchedulePumpSamples(base::TimeTicks now) {
+ base::TimeTicks next_pump_time = GetCurrentStreamTime() - min_lead_time_ -
+ parameters_.GetBufferDuration() / 2;
+ timer_.Start(FROM_HERE, next_pump_time - now,
base::Bind(&AudioOutputStreamFuchsia::PumpSamples,
base::Unretained(this)));
}
diff --git a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.h b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.h
index 94da3dc86f5..a6eff582d67 100644
--- a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.h
+++ b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.h
@@ -5,8 +5,9 @@
#ifndef MEDIA_AUDIO_FUCHSIA_AUDIO_OUTPUT_STREAM_FUCHSIA_H_
#define MEDIA_AUDIO_FUCHSIA_AUDIO_OUTPUT_STREAM_FUCHSIA_H_
-#include <media/audio.h>
+#include <fuchsia/media/cpp/fidl.h>
+#include "base/memory/shared_memory.h"
#include "base/timer/timer.h"
#include "media/audio/audio_io.h"
#include "media/base/audio_parameters.h"
@@ -19,7 +20,6 @@ class AudioOutputStreamFuchsia : public AudioOutputStream {
public:
// Caller must ensure that manager outlives the stream.
AudioOutputStreamFuchsia(AudioManagerFuchsia* manager,
- const std::string& device_id,
const AudioParameters& parameters);
// AudioOutputStream interface.
@@ -33,35 +33,53 @@ class AudioOutputStreamFuchsia : public AudioOutputStream {
private:
~AudioOutputStreamFuchsia() override;
+ // Returns minimum |payload_buffer_| size for the current |min_lead_time_|.
+ size_t GetMinBufferSize();
+
+ // Allocates and maps |payload_buffer_|.
+ bool InitializePayloadBuffer();
+
base::TimeTicks GetCurrentStreamTime();
- // Updates |presentation_delay_ns_|.
- bool UpdatePresentationDelay();
+ // Event handler for |audio_renderer_|.
+ void OnMinLeadTimeChanged(int64_t min_lead_time);
+
+ // Error handler for |audio_renderer_|.
+ void OnRendererError();
// Requests data from AudioSourceCallback, passes it to the mixer and
// schedules |timer_| for the next call.
void PumpSamples();
+ // Schedules |timer_| to call PumpSamples() when appropriate for the next
+ // packet.
+ void SchedulePumpSamples(base::TimeTicks now);
+
AudioManagerFuchsia* manager_;
- std::string device_id_;
AudioParameters parameters_;
- // These are used only in PumpSamples(). They are kept here to avoid
+ // Audio renderer connection.
+ fuchsia::media::AudioRenderer2Ptr audio_renderer_;
+
+ // |audio_bus_| is used only in PumpSamples(). It is kept here to avoid
// reallocating the memory every time.
std::unique_ptr<AudioBus> audio_bus_;
- std::vector<float> buffer_;
- fuchsia_audio_output_stream* stream_ = nullptr;
+ base::SharedMemory payload_buffer_;
+ size_t payload_buffer_pos_ = 0;
+
AudioSourceCallback* callback_ = nullptr;
double volume_ = 1.0;
- base::TimeTicks started_time_;
- int64_t stream_position_samples_ = 0;
+ base::TimeTicks reference_time_;
+
+ int64_t stream_position_samples_;
- // Total presentation delay for the stream. This value is returned by
- // fuchsia_audio_output_stream_get_min_delay()
- zx_duration_t presentation_delay_ns_ = 0;
+ // Current min lead time for the stream. This value is updated by
+ // AudioRenderer::OnMinLeadTimeChanged event. Assume 50ms until we get the
+ // first OnMinLeadTimeChanged event.
+ base::TimeDelta min_lead_time_ = base::TimeDelta::FromMilliseconds(50);
// Timer that's scheduled to call PumpSamples().
base::OneShotTimer timer_;
diff --git a/chromium/media/audio/mac/audio_manager_mac.cc b/chromium/media/audio/mac/audio_manager_mac.cc
index 66a5622f2d3..74dd21d45b4 100644
--- a/chromium/media/audio/mac/audio_manager_mac.cc
+++ b/chromium/media/audio/mac/audio_manager_mac.cc
@@ -131,7 +131,11 @@ static void GetAudioDeviceInfo(bool is_input,
std::vector<AudioObjectID> device_ids =
core_audio_mac::GetAllAudioDeviceIDs();
for (AudioObjectID device_id : device_ids) {
- if (core_audio_mac::GetNumStreams(device_id, is_input) == 0)
+ const bool is_valid_for_direction =
+ (is_input ? core_audio_mac::IsInputDevice(device_id)
+ : core_audio_mac::IsOutputDevice(device_id));
+
+ if (!is_valid_for_direction)
continue;
base::Optional<std::string> unique_id =
diff --git a/chromium/media/audio/mac/core_audio_util_mac.cc b/chromium/media/audio/mac/core_audio_util_mac.cc
index 3c050b4bc69..fa6dcf24539 100644
--- a/chromium/media/audio/mac/core_audio_util_mac.cc
+++ b/chromium/media/audio/mac/core_audio_util_mac.cc
@@ -4,6 +4,8 @@
#include "media/audio/mac/core_audio_util_mac.h"
+#include <IOKit/audio/IOAudioTypes.h>
+
#include <utility>
#include "base/mac/mac_logging.h"
@@ -84,7 +86,7 @@ uint32_t GetDevicePropertySize(AudioObjectID device_id,
return size;
}
-std::vector<AudioObjectID> GetAudioDeviceIDs(
+std::vector<AudioObjectID> GetAudioObjectIDs(
AudioObjectID audio_object_id,
AudioObjectPropertySelector property_selector) {
AudioObjectPropertyAddress property_address = {
@@ -113,7 +115,7 @@ std::vector<AudioObjectID> GetAudioDeviceIDs(
nullptr /* inQualifierData */, &size, device_ids.data());
if (result != noErr) {
OSSTATUS_DLOG(WARNING, result)
- << "Failed to read devuce IDs for property " << property_selector
+ << "Failed to read object IDs from property " << property_selector
<< " for device/object " << audio_object_id;
return {};
}
@@ -206,12 +208,12 @@ base::Optional<std::string> TranslateDeviceSource(AudioObjectID device_id,
} // namespace
std::vector<AudioObjectID> GetAllAudioDeviceIDs() {
- return GetAudioDeviceIDs(kAudioObjectSystemObject,
+ return GetAudioObjectIDs(kAudioObjectSystemObject,
kAudioHardwarePropertyDevices);
}
std::vector<AudioObjectID> GetRelatedDeviceIDs(AudioObjectID device_id) {
- return GetAudioDeviceIDs(device_id, kAudioDevicePropertyRelatedDevices);
+ return GetAudioObjectIDs(device_id, kAudioDevicePropertyRelatedDevices);
}
base::Optional<std::string> GetDeviceUniqueID(AudioObjectID device_id) {
@@ -307,5 +309,50 @@ bool IsPrivateAggregateDevice(AudioObjectID device_id) {
return is_private;
}
+bool IsInputDevice(AudioObjectID device_id) {
+ std::vector<AudioObjectID> streams =
+ GetAudioObjectIDs(device_id, kAudioDevicePropertyStreams);
+
+ int num_undefined_input_streams = 0;
+ int num_defined_input_streams = 0;
+ int num_output_streams = 0;
+
+ for (auto stream_id : streams) {
+ auto direction =
+ GetDeviceUint32Property(stream_id, kAudioStreamPropertyDirection,
+ kAudioObjectPropertyScopeGlobal);
+ DCHECK(direction.has_value());
+ const UInt32 kDirectionOutput = 0;
+ const UInt32 kDirectionInput = 1;
+ if (direction == kDirectionOutput) {
+ ++num_output_streams;
+ } else if (direction == kDirectionInput) {
+ // Filter input streams based on what terminal it claims to be attached
+ // to. Note that INPUT_UNDEFINED comes from a set of terminals declared
+ // in IOKit. CoreAudio defines a number of terminals in
+ // AudioHardwareBase.h but none of them match any of the values I've
+ // seen used in practice, though I've only tested a few devices.
+ auto terminal =
+ GetDeviceUint32Property(stream_id, kAudioStreamPropertyTerminalType,
+ kAudioObjectPropertyScopeGlobal);
+ if (terminal.has_value() && terminal == INPUT_UNDEFINED) {
+ ++num_undefined_input_streams;
+ } else {
+ ++num_defined_input_streams;
+ }
+ }
+ }
+
+ // I've only seen INPUT_UNDEFINED introduced by the VoiceProcessing AudioUnit,
+ // but to err on the side of caution, let's allow a device with only undefined
+ // input streams and no output streams as well.
+ return num_defined_input_streams > 0 ||
+ (num_undefined_input_streams > 0 && num_output_streams == 0);
+}
+
+bool IsOutputDevice(AudioObjectID device_id) {
+ return GetNumStreams(device_id, false) > 0;
+}
+
} // namespace core_audio_mac
} // namespace media
diff --git a/chromium/media/audio/mac/core_audio_util_mac.h b/chromium/media/audio/mac/core_audio_util_mac.h
index 39c37566104..b8301be3ecc 100644
--- a/chromium/media/audio/mac/core_audio_util_mac.h
+++ b/chromium/media/audio/mac/core_audio_util_mac.h
@@ -52,6 +52,16 @@ base::Optional<uint32_t> GetDeviceTransportType(AudioObjectID device_id);
// AudioUnit.
bool IsPrivateAggregateDevice(AudioObjectID device_id);
+// Returns whether or not the |device_id| corresponds to a device that has valid
+// input streams. When the VoiceProcessing AudioUnit is active, some output
+// devices get an input stream as well. This function tries to filter those out,
+// based on the value of the stream's kAudioStreamPropertyTerminalType value.
+bool IsInputDevice(AudioObjectID device_id);
+
+// Returns whether or not the |device_id| corresponds to a device with output
+// streams.
+bool IsOutputDevice(AudioObjectID device_id);
+
} // namespace core_audio_mac
} // namespace media
diff --git a/chromium/media/audio/mac/coreaudio_dispatch_override.cc b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
index 2ac812b0f3f..34d1ad2a0fb 100644
--- a/chromium/media/audio/mac/coreaudio_dispatch_override.cc
+++ b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
@@ -22,6 +22,9 @@ struct dyld_interpose_tuple {
const void* replacement;
const void* replacee;
};
+
+using DispatchGetGlobalQueueFunc = dispatch_queue_t (*)(long id,
+ unsigned long flags);
} // namespace
// This method, and the tuple above, is defined in dyld_priv.h; see:
@@ -175,8 +178,13 @@ bool InitializeCoreAudioDispatchOverride() {
const auto* header = reinterpret_cast<const mach_header*>(info.dli_fbase);
g_pause_resume_queue =
dispatch_queue_create("org.chromium.CoreAudioPauseResumeQueue", nullptr);
- dyld_interpose_tuple interposition(&GetGlobalQueueOverride,
- &dispatch_get_global_queue);
+ // The reinterpret_cast<> is needed because in the macOS 10.14 SDK, the return
+ // type of dispatch_get_global_queue changed to return a subtype of
+ // dispatch_queue_t* instead of dispatch_queue_t* itself, and T(*)(...) isn't
+ // automatically converted to U(*)(...) even if U is a superclass of T.
+ dyld_interpose_tuple interposition(
+ &GetGlobalQueueOverride,
+ reinterpret_cast<DispatchGetGlobalQueueFunc>(&dispatch_get_global_queue));
dyld_dynamic_interpose(header, &interposition, 1);
g_dispatch_override_installed = true;
LogInitResult(RESULT_INITIALIZED);
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.cc b/chromium/media/audio/pulse/audio_manager_pulse.cc
index fc0ec5d6c4d..3cd51959861 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.cc
+++ b/chromium/media/audio/pulse/audio_manager_pulse.cc
@@ -4,6 +4,9 @@
#include "media/audio/pulse/audio_manager_pulse.h"
+#include <algorithm>
+#include <utility>
+
#include "base/command_line.h"
#include "base/environment.h"
#include "base/logging.h"
@@ -40,7 +43,8 @@ AudioManagerPulse::AudioManagerPulse(std::unique_ptr<AudioThread> audio_thread,
input_context_(pa_context),
devices_(NULL),
native_input_sample_rate_(0),
- native_channel_count_(0) {
+ native_channel_count_(0),
+ default_source_is_monitor_(false) {
DCHECK(input_mainloop_);
DCHECK(input_context_);
SetMaxOutputStreamsAllowed(kMaxOutputStreams);
@@ -107,6 +111,12 @@ AudioParameters AudioManagerPulse::GetInputStreamParameters(
// TODO(xians): add support for querying native channel layout for pulse.
UpdateNativeAudioHardwareInfo();
+ // We don't want to accidentally open a monitor device, so return invalid
+ // parameters for those.
+ if (device_id == AudioDeviceDescription::kDefaultDeviceId &&
+ default_source_is_monitor_) {
+ return AudioParameters();
+ }
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
CHANNEL_LAYOUT_STEREO, native_input_sample_rate_,
buffer_size);
@@ -149,6 +159,46 @@ AudioInputStream* AudioManagerPulse::MakeLowLatencyInputStream(
return MakeInputStream(params, device_id);
}
+std::string AudioManagerPulse::GetDefaultInputDeviceID() {
+#if defined(OS_CHROMEOS)
+ return AudioManagerBase::GetDefaultInputDeviceID();
+#else
+ return pulse::GetRealDefaultDeviceId(input_mainloop_, input_context_,
+ pulse::RequestType::INPUT);
+#endif
+}
+
+std::string AudioManagerPulse::GetDefaultOutputDeviceID() {
+#if defined(OS_CHROMEOS)
+ return AudioManagerBase::GetDefaultOutputDeviceID();
+#else
+ return pulse::GetRealDefaultDeviceId(input_mainloop_, input_context_,
+ pulse::RequestType::OUTPUT);
+#endif
+}
+
+std::string AudioManagerPulse::GetAssociatedOutputDeviceID(
+ const std::string& input_device_id) {
+#if defined(OS_CHROMEOS)
+ return AudioManagerBase::GetAssociatedOutputDeviceID(input_device_id);
+#else
+ DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
+ DCHECK(input_mainloop_);
+ DCHECK(input_context_);
+ std::string input =
+ (input_device_id == AudioDeviceDescription::kDefaultDeviceId)
+ ? pulse::GetRealDefaultDeviceId(input_mainloop_, input_context_,
+ pulse::RequestType::INPUT)
+ : input_device_id;
+
+ std::string input_bus =
+ pulse::GetBusOfInput(input_mainloop_, input_context_, input);
+ return input_bus.empty() ? ""
+ : pulse::GetOutputCorrespondingTo(
+ input_mainloop_, input_context_, input_bus);
+#endif
+}
+
AudioParameters AudioManagerPulse::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
@@ -203,6 +253,10 @@ void AudioManagerPulse::UpdateNativeAudioHardwareInfo() {
pa_operation* operation = pa_context_get_server_info(
input_context_, AudioHardwareInfoCallback, this);
WaitForOperationCompletion(input_mainloop_, operation);
+ operation = pa_context_get_source_info_by_name(
+ input_context_, default_source_name_.c_str(), DefaultSourceInfoCallback,
+ this);
+ WaitForOperationCompletion(input_mainloop_, operation);
}
void AudioManagerPulse::InputDevicesInfoCallback(pa_context* context,
@@ -216,11 +270,22 @@ void AudioManagerPulse::InputDevicesInfoCallback(pa_context* context,
return;
}
- // Exclude the output devices.
- if (info->monitor_of_sink == PA_INVALID_INDEX) {
- manager->devices_->push_back(AudioDeviceName(info->description,
- info->name));
+ // Exclude output monitor (i.e. loopback) devices.
+ if (info->monitor_of_sink != PA_INVALID_INDEX)
+ return;
+
+ // If the device has ports, but none of them are available, skip it.
+ if (info->n_ports > 0) {
+ uint32_t port = 0;
+ for (; port != info->n_ports; ++port) {
+ if (info->ports[port]->available != PA_PORT_AVAILABLE_NO)
+ break;
+ }
+ if (port == info->n_ports)
+ return;
}
+
+ manager->devices_->push_back(AudioDeviceName(info->description, info->name));
}
void AudioManagerPulse::OutputDevicesInfoCallback(pa_context* context,
@@ -245,7 +310,24 @@ void AudioManagerPulse::AudioHardwareInfoCallback(pa_context* context,
manager->native_input_sample_rate_ = info->sample_spec.rate;
manager->native_channel_count_ = info->sample_spec.channels;
+ manager->default_source_name_ = info->default_source_name;
pa_threaded_mainloop_signal(manager->input_mainloop_, 0);
}
+void AudioManagerPulse::DefaultSourceInfoCallback(pa_context* context,
+ const pa_source_info* info,
+ int eol,
+ void* user_data) {
+ AudioManagerPulse* manager = reinterpret_cast<AudioManagerPulse*>(user_data);
+ if (eol) {
+ // Signal the pulse object that it is done.
+ pa_threaded_mainloop_signal(manager->input_mainloop_, 0);
+ return;
+ }
+
+ DCHECK(info);
+ manager->default_source_is_monitor_ =
+ info->monitor_of_sink != PA_INVALID_INDEX;
+}
+
} // namespace media
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.h b/chromium/media/audio/pulse/audio_manager_pulse.h
index 316eec4a534..2e5c42468ac 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.h
+++ b/chromium/media/audio/pulse/audio_manager_pulse.h
@@ -6,6 +6,8 @@
#define MEDIA_AUDIO_PULSE_AUDIO_MANAGER_PULSE_H_
#include <pulse/pulseaudio.h>
+
+#include <memory>
#include <string>
#include "base/compiler_specific.h"
@@ -48,6 +50,12 @@ class MEDIA_EXPORT AudioManagerPulse : public AudioManagerBase {
const AudioParameters& params,
const std::string& device_id,
const LogCallback& log_callback) override;
+ std::string GetDefaultInputDeviceID() override;
+ std::string GetDefaultOutputDeviceID() override;
+ std::string GetAssociatedOutputDeviceID(
+ const std::string& input_device_id) override;
+
+ bool DefaultSourceIsMonitor() const { return default_source_is_monitor_; }
protected:
void ShutdownOnAudioThread() override;
@@ -72,6 +80,11 @@ class MEDIA_EXPORT AudioManagerPulse : public AudioManagerBase {
const pa_server_info* info,
void* user_data);
+ static void DefaultSourceInfoCallback(pa_context* context,
+ const pa_source_info* info,
+ int eol,
+ void* user_data);
+
// Called by MakeLinearOutputStream and MakeLowLatencyOutputStream.
AudioOutputStream* MakeOutputStream(const AudioParameters& params,
const std::string& device_id);
@@ -88,6 +101,8 @@ class MEDIA_EXPORT AudioManagerPulse : public AudioManagerBase {
AudioDeviceNames* devices_;
int native_input_sample_rate_;
int native_channel_count_;
+ std::string default_source_name_;
+ bool default_source_is_monitor_;
DISALLOW_COPY_AND_ASSIGN(AudioManagerPulse);
};
diff --git a/chromium/media/audio/pulse/pulse.sigs b/chromium/media/audio/pulse/pulse.sigs
index 522efcb57a4..8b5888786a9 100644
--- a/chromium/media/audio/pulse/pulse.sigs
+++ b/chromium/media/audio/pulse/pulse.sigs
@@ -9,7 +9,7 @@ pa_mainloop_api* pa_threaded_mainloop_get_api(pa_threaded_mainloop* m);
void pa_threaded_mainloop_free(pa_threaded_mainloop* m);
pa_threaded_mainloop* pa_threaded_mainloop_new();
void pa_threaded_mainloop_lock(pa_threaded_mainloop* m);
-int pa_threaded_mainloop_in_thread(pa_threaded_mainloop* m);
+int pa_threaded_mainloop_in_thread(pa_threaded_mainloop* m);
void pa_threaded_mainloop_signal(pa_threaded_mainloop* m, int wait_for_accept);
int pa_threaded_mainloop_start(pa_threaded_mainloop* m);
void pa_threaded_mainloop_stop(pa_threaded_mainloop* m);
@@ -21,8 +21,9 @@ int pa_context_connect(pa_context* c, const char* server, pa_context_flags_t fla
void pa_context_disconnect(pa_context* c);
pa_operation* pa_context_get_server_info(pa_context* c, pa_server_info_cb_t cb, void* userdata);
pa_operation* pa_context_get_source_info_by_index(pa_context* c, uint32_t idx, pa_source_info_cb_t cb, void* userdata);
+pa_operation* pa_context_get_source_info_by_name(pa_context* c, const char* name, pa_source_info_cb_t cb, void *userdata);
pa_operation* pa_context_get_source_info_list(pa_context* c, pa_source_info_cb_t cb, void* userdata);
-pa_operation* pa_context_get_sink_info_list(pa_context* c, pa_sink_info_cb_t cb, void* userdata);
+pa_operation* pa_context_get_sink_info_list(pa_context* c, pa_sink_info_cb_t cb, void* userdata);
pa_context_state_t pa_context_get_state(pa_context* c);
pa_context* pa_context_new(pa_mainloop_api* mainloop, const char* name);
pa_operation* pa_context_set_source_volume_by_index(pa_context* c, uint32_t idx, const pa_cvolume* volume, pa_context_success_cb_t cb, void* userdata);
@@ -43,7 +44,9 @@ pa_stream_state_t pa_stream_get_state(pa_stream* p);
pa_stream* pa_stream_new(pa_context* c, const char* name, const pa_sample_spec* ss, const pa_channel_map * map);
pa_stream* pa_stream_new_with_proplist(pa_context* c, const char* name, const pa_sample_spec* ss, const pa_channel_map* map, pa_proplist* p);
pa_proplist* pa_proplist_new(void);
+int pa_proplist_contains(pa_proplist* p, const char* key);
void pa_proplist_free(pa_proplist* p);
+const char* pa_proplist_gets(pa_proplist* p, const char* key);
int pa_proplist_sets(pa_proplist* p, const char* key, const char* value);
size_t pa_stream_readable_size(pa_stream *p);
int pa_stream_peek(pa_stream* p, const void** data, size_t* nbytes);
diff --git a/chromium/media/audio/pulse/pulse_input.cc b/chromium/media/audio/pulse/pulse_input.cc
index a342eb76ab9..f2b6d929cf1 100644
--- a/chromium/media/audio/pulse/pulse_input.cc
+++ b/chromium/media/audio/pulse/pulse_input.cc
@@ -52,6 +52,10 @@ PulseAudioInputStream::~PulseAudioInputStream() {
bool PulseAudioInputStream::Open() {
DCHECK(thread_checker_.CalledOnValidThread());
+ if (device_name_ == AudioDeviceDescription::kDefaultDeviceId &&
+ audio_manager_->DefaultSourceIsMonitor())
+ return false;
+
AutoPulseLock auto_lock(pa_mainloop_);
if (!pulse::CreateInputStream(pa_mainloop_, pa_context_, &handle_, params_,
device_name_, &StreamNotifyCallback, this)) {
diff --git a/chromium/media/audio/pulse/pulse_util.cc b/chromium/media/audio/pulse/pulse_util.cc
index 372e73dccb0..f2e686add5f 100644
--- a/chromium/media/audio/pulse/pulse_util.cc
+++ b/chromium/media/audio/pulse/pulse_util.cc
@@ -5,6 +5,9 @@
#include "media/audio/pulse/pulse_util.h"
#include <stdint.h>
+#include <string.h>
+
+#include <memory>
#include "base/files/file_path.h"
#include "base/logging.h"
@@ -93,6 +96,77 @@ class ScopedPropertyList {
DISALLOW_COPY_AND_ASSIGN(ScopedPropertyList);
};
+struct InputBusData {
+ InputBusData(pa_threaded_mainloop* loop, const std::string& name)
+ : loop_(loop), name_(name), bus_() {}
+
+ pa_threaded_mainloop* const loop_;
+ const std::string& name_;
+ std::string bus_;
+};
+
+struct OutputBusData {
+ OutputBusData(pa_threaded_mainloop* loop, const std::string& bus)
+ : loop_(loop), name_(), bus_(bus) {}
+
+ pa_threaded_mainloop* const loop_;
+ std::string name_;
+ const std::string& bus_;
+};
+
+void InputBusCallback(pa_context* context,
+ const pa_source_info* info,
+ int error,
+ void* user_data) {
+ InputBusData* data = static_cast<InputBusData*>(user_data);
+
+ if (error) {
+ // We have checked all the devices now.
+ pa_threaded_mainloop_signal(data->loop_, 0);
+ return;
+ }
+
+ if (strcmp(info->name, data->name_.c_str()) == 0 &&
+ pa_proplist_contains(info->proplist, PA_PROP_DEVICE_BUS)) {
+ data->bus_ = pa_proplist_gets(info->proplist, PA_PROP_DEVICE_BUS);
+ }
+}
+
+void OutputBusCallback(pa_context* context,
+ const pa_sink_info* info,
+ int error,
+ void* user_data) {
+ OutputBusData* data = static_cast<OutputBusData*>(user_data);
+
+ if (error) {
+ // We have checked all the devices now.
+ pa_threaded_mainloop_signal(data->loop_, 0);
+ return;
+ }
+
+ if (pa_proplist_contains(info->proplist, PA_PROP_DEVICE_BUS) &&
+ strcmp(pa_proplist_gets(info->proplist, PA_PROP_DEVICE_BUS),
+ data->bus_.c_str()) == 0) {
+ data->name_ = info->name;
+ }
+}
+
+struct DefaultDevicesData {
+ explicit DefaultDevicesData(pa_threaded_mainloop* loop) : loop_(loop) {}
+ std::string input_;
+ std::string output_;
+ pa_threaded_mainloop* const loop_;
+};
+
+void GetDefaultDeviceIdCallback(pa_context* c,
+ const pa_server_info* info,
+ void* userdata) {
+ DefaultDevicesData* data = static_cast<DefaultDevicesData*>(userdata);
+ data->input_ = info->default_source_name;
+ data->output_ = info->default_sink_name;
+ pa_threaded_mainloop_signal(data->loop_, 0);
+}
+
} // namespace
bool InitPulse(pa_threaded_mainloop** mainloop, pa_context** context) {
@@ -448,6 +522,45 @@ bool CreateOutputStream(pa_threaded_mainloop** mainloop,
return true;
}
+std::string GetBusOfInput(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ const std::string& name) {
+ DCHECK(mainloop);
+ DCHECK(context);
+ AutoPulseLock auto_lock(mainloop);
+ InputBusData data(mainloop, name);
+ pa_operation* operation =
+ pa_context_get_source_info_list(context, InputBusCallback, &data);
+ WaitForOperationCompletion(mainloop, operation);
+ return data.bus_;
+}
+
+std::string GetOutputCorrespondingTo(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ const std::string& bus) {
+ DCHECK(mainloop);
+ DCHECK(context);
+ AutoPulseLock auto_lock(mainloop);
+ OutputBusData data(mainloop, bus);
+ pa_operation* operation =
+ pa_context_get_sink_info_list(context, OutputBusCallback, &data);
+ WaitForOperationCompletion(mainloop, operation);
+ return data.name_;
+}
+
+std::string GetRealDefaultDeviceId(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ RequestType type) {
+ DCHECK(mainloop);
+ DCHECK(context);
+ AutoPulseLock auto_lock(mainloop);
+ DefaultDevicesData data(mainloop);
+ pa_operation* operation =
+ pa_context_get_server_info(context, &GetDefaultDeviceIdCallback, &data);
+ WaitForOperationCompletion(mainloop, operation);
+ return (type == RequestType::INPUT) ? data.input_ : data.output_;
+}
+
#undef RETURN_ON_FAILURE
} // namespace pulse
diff --git a/chromium/media/audio/pulse/pulse_util.h b/chromium/media/audio/pulse/pulse_util.h
index 678fc82d341..448ee7e668b 100644
--- a/chromium/media/audio/pulse/pulse_util.h
+++ b/chromium/media/audio/pulse/pulse_util.h
@@ -21,6 +21,8 @@ class AudioParameters;
namespace pulse {
+enum class RequestType : int8_t { INPUT, OUTPUT };
+
// A helper class that acquires pa_threaded_mainloop_lock() while in scope.
class AutoPulseLock {
public:
@@ -81,6 +83,16 @@ bool CreateOutputStream(pa_threaded_mainloop** mainloop,
pa_stream_request_cb_t write_callback,
void* user_data);
+// Utility functions to match up outputs and inputs.
+std::string GetBusOfInput(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ const std::string& name);
+std::string GetOutputCorrespondingTo(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ const std::string& bus);
+std::string GetRealDefaultDeviceId(pa_threaded_mainloop* mainloop,
+ pa_context* context,
+ RequestType type);
} // namespace pulse
} // namespace media
diff --git a/chromium/media/audio/test_audio_input_controller_factory.cc b/chromium/media/audio/test_audio_input_controller_factory.cc
deleted file mode 100644
index 0f1c5ebdaf9..00000000000
--- a/chromium/media/audio/test_audio_input_controller_factory.cc
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/audio/test_audio_input_controller_factory.h"
-
-#include <utility>
-
-#include "media/audio/audio_io.h"
-#include "media/audio/audio_manager.h"
-
-namespace media {
-
-TestAudioInputController::TestAudioInputController(
- TestAudioInputControllerFactory* factory,
- AudioManager* audio_manager,
- const AudioParameters& audio_parameters,
- EventHandler* event_handler,
- SyncWriter* sync_writer,
- UserInputMonitor* user_input_monitor,
- StreamType type)
- : AudioInputController(audio_manager->GetTaskRunner(),
- event_handler,
- sync_writer,
- user_input_monitor,
- audio_parameters,
- type),
- audio_parameters_(audio_parameters),
- factory_(factory),
- event_handler_(event_handler),
- sync_writer_(sync_writer) {}
-
-TestAudioInputController::~TestAudioInputController() {
- // Inform the factory so that it allows creating new instances in future.
- factory_->OnTestAudioInputControllerDestroyed(this);
-}
-
-void TestAudioInputController::Record() {
- if (factory_->delegate_)
- factory_->delegate_->TestAudioControllerOpened(this);
-}
-
-void TestAudioInputController::Close(base::OnceClosure closed_task) {
- GetTaskRunnerForTesting()->PostTask(FROM_HERE, std::move(closed_task));
- if (factory_->delegate_)
- factory_->delegate_->TestAudioControllerClosed(this);
-}
-
-TestAudioInputControllerFactory::TestAudioInputControllerFactory()
- : controller_(NULL),
- delegate_(NULL) {
-}
-
-TestAudioInputControllerFactory::~TestAudioInputControllerFactory() {
- DCHECK(!controller_);
-}
-
-AudioInputController* TestAudioInputControllerFactory::Create(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- AudioInputController::SyncWriter* sync_writer,
- AudioManager* audio_manager,
- AudioInputController::EventHandler* event_handler,
- AudioParameters params,
- UserInputMonitor* user_input_monitor,
- AudioInputController::StreamType type) {
- DCHECK(!controller_); // Only one test instance managed at a time.
- controller_ =
- new TestAudioInputController(this, audio_manager, params, event_handler,
- sync_writer, user_input_monitor, type);
- return controller_;
-}
-
-void TestAudioInputControllerFactory::OnTestAudioInputControllerDestroyed(
- TestAudioInputController* controller) {
- DCHECK_EQ(controller_, controller);
- controller_ = NULL;
-}
-
-} // namespace media
diff --git a/chromium/media/audio/test_audio_input_controller_factory.h b/chromium/media/audio/test_audio_input_controller_factory.h
deleted file mode 100644
index 37ac3c9ab04..00000000000
--- a/chromium/media/audio/test_audio_input_controller_factory.h
+++ /dev/null
@@ -1,141 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_AUDIO_TEST_AUDIO_INPUT_CONTROLLER_FACTORY_H_
-#define MEDIA_AUDIO_TEST_AUDIO_INPUT_CONTROLLER_FACTORY_H_
-
-#include "base/bind.h"
-#include "base/macros.h"
-#include "base/single_thread_task_runner.h"
-#include "media/audio/audio_input_controller.h"
-#include "media/base/audio_parameters.h"
-
-namespace media {
-
-class UserInputMonitor;
-class TestAudioInputControllerFactory;
-
-// TestAudioInputController and TestAudioInputControllerFactory are used for
-// testing consumers of AudioInputController. TestAudioInputControllerFactory
-// is a AudioInputController::Factory that creates TestAudioInputControllers.
-//
-// TestAudioInputController::Record and Close are overriden to do nothing. It is
-// expected that you'll grab the EventHandler from the TestAudioInputController
-// and invoke the callback methods when appropriate. In this way it's easy to
-// mock a AudioInputController.
-//
-// Typical usage:
-// // Create and register factory.
-// TestAudioInputControllerFactory factory;
-// AudioInputController::set_factory_for_testing(&factory);
-//
-// // Do something that triggers creation of an AudioInputController.
-// TestAudioInputController* controller = factory.last_controller();
-// DCHECK(controller);
-//
-// // Notify event handler with whatever data you want.
-// controller->event_handler()->OnCreated(...);
-//
-// // Do something that triggers AudioInputController::Record to be called.
-// controller->event_handler()->OnData(...);
-// controller->event_handler()->OnError(...);
-//
-// // Make sure consumer of AudioInputController does the right thing.
-// ...
-// // Reset factory.
-// AudioInputController::set_factory_for_testing(NULL);
-
-class TestAudioInputController : public AudioInputController {
- public:
- class Delegate {
- public:
- virtual void TestAudioControllerOpened(
- TestAudioInputController* controller) = 0;
- virtual void TestAudioControllerClosed(
- TestAudioInputController* controller) = 0;
- };
-
- TestAudioInputController(TestAudioInputControllerFactory* factory,
- AudioManager* audio_manager,
- const AudioParameters& audio_parameters,
- EventHandler* event_handler,
- SyncWriter* sync_writer,
- UserInputMonitor* user_input_monitor,
- StreamType type);
-
- // Returns the event handler installed on the AudioInputController.
- EventHandler* event_handler() const { return event_handler_; }
-
- // Returns a pointer to the audio callback for the AudioInputController.
- SyncWriter* sync_writer() const { return sync_writer_; }
-
- // Notifies the TestAudioControllerOpened() event to the delegate (if any).
- void Record() override;
-
- // Ensure that the closure is run on the audio-manager thread.
- void Close(base::OnceClosure closed_task) override;
-
- const AudioParameters& audio_parameters() const {
- return audio_parameters_;
- }
-
- protected:
- ~TestAudioInputController() override;
-
- private:
- AudioParameters audio_parameters_;
-
- // These are not owned by us and expected to be valid for this object's
- // lifetime.
- TestAudioInputControllerFactory* factory_;
- EventHandler* const event_handler_;
- SyncWriter* const sync_writer_;
-
- DISALLOW_COPY_AND_ASSIGN(TestAudioInputController);
-};
-
-typedef TestAudioInputController::Delegate TestAudioInputControllerDelegate;
-
-// Simple AudioInputController::Factory method that creates
-// TestAudioInputControllers.
-class TestAudioInputControllerFactory : public AudioInputController::Factory {
- public:
- TestAudioInputControllerFactory();
- ~TestAudioInputControllerFactory() override;
-
- // AudioInputController::Factory methods.
- AudioInputController* Create(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- AudioInputController::SyncWriter* sync_writer,
- AudioManager* audio_manager,
- AudioInputController::EventHandler* event_handler,
- AudioParameters params,
- UserInputMonitor* user_input_monitor,
- AudioInputController::StreamType type) override;
-
- void set_delegate(TestAudioInputControllerDelegate* delegate) {
- delegate_ = delegate;
- }
-
- TestAudioInputController* controller() const { return controller_; }
-
- private:
- friend class TestAudioInputController;
-
- // Invoked by a TestAudioInputController when it gets destroyed.
- void OnTestAudioInputControllerDestroyed(
- TestAudioInputController* controller);
-
- // The caller of Create owns this object.
- TestAudioInputController* controller_;
-
- // The delegate for tests for receiving audio controller events.
- TestAudioInputControllerDelegate* delegate_;
-
- DISALLOW_COPY_AND_ASSIGN(TestAudioInputControllerFactory);
-};
-
-} // namespace media
-
-#endif // MEDIA_AUDIO_TEST_AUDIO_INPUT_CONTROLLER_FACTORY_H_
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index 10d49f911e3..854bcd5f965 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -683,6 +683,14 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
TRACE_EVENT1("audio", "WASAPIAudioInputStream::PullCaptureDataAndPushToSink",
"sample rate", input_format_.nSamplesPerSec);
+ // Used for storing information when we need to accumulate before checking for
+ // glitches. We don't accumulate over loop edges (i.e. when we exit this
+ // function).
+ UINT64 last_device_position = 0;
+ UINT32 accumulated_frames = 0;
+ DWORD accumulated_discontinuity_flag = 0;
+ UINT64 accumulated_capture_time_100ns = 0;
+
// Pull data from the capture endpoint buffer until it's empty or an error
// occurs.
while (true) {
@@ -703,14 +711,23 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
HRESULT hr =
audio_capture_client_->GetBuffer(&data_ptr, &num_frames_to_read, &flags,
&device_position, &capture_time_100ns);
- if (hr == AUDCLNT_S_BUFFER_EMPTY)
+ accumulated_frames += num_frames_to_read;
+ accumulated_discontinuity_flag |=
+ flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY;
+ // Store the first capture time in a sequence of accumulated buffers.
+ if (accumulated_capture_time_100ns == 0)
+ accumulated_capture_time_100ns = capture_time_100ns;
+
+ if (hr == AUDCLNT_S_BUFFER_EMPTY) {
+ if (accumulated_frames > 0) {
+ ReportDelayStatsAndUpdateGlitchCount(
+ accumulated_frames, accumulated_discontinuity_flag,
+ last_device_position,
+ base::TimeTicks() + CoreAudioUtil::ReferenceTimeToTimeDelta(
+ accumulated_capture_time_100ns));
+ }
break;
-
- ReportDelayStatsAndUpdateGlitchCount(
- num_frames_to_read, flags & AUDCLNT_BUFFERFLAGS_DATA_DISCONTINUITY,
- device_position,
- base::TimeTicks() +
- base::TimeDelta::FromMicroseconds(capture_time_100ns / 10.0));
+ }
// TODO(grunell): Should we handle different errors explicitly? Perhaps exit
// by setting |error = true|. What are the assumptions here that makes us
@@ -721,6 +738,17 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
break;
}
+ if (device_position != last_device_position) {
+ ReportDelayStatsAndUpdateGlitchCount(
+ accumulated_frames, accumulated_discontinuity_flag, device_position,
+ base::TimeTicks() + CoreAudioUtil::ReferenceTimeToTimeDelta(
+ accumulated_capture_time_100ns));
+ last_device_position = device_position;
+ accumulated_frames = 0;
+ accumulated_discontinuity_flag = false;
+ accumulated_capture_time_100ns = 0;
+ }
+
// TODO(dalecurtis, olka, grunell): Is this ever false? If it is, should we
// handle |flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR|?
if (audio_clock_) {
diff --git a/chromium/media/audio/win/core_audio_util_win.cc b/chromium/media/audio/win/core_audio_util_win.cc
index 4e4a534166a..59869e9a879 100644
--- a/chromium/media/audio/win/core_audio_util_win.cc
+++ b/chromium/media/audio/win/core_audio_util_win.cc
@@ -5,7 +5,6 @@
#include "media/audio/win/core_audio_util_win.h"
#include <devicetopology.h>
-#include <dxdiag.h>
#include <functiondiscoverykeys_devpkey.h>
#include <objbase.h>
#include <stddef.h>
@@ -923,59 +922,6 @@ bool CoreAudioUtil::FillRenderEndpointBufferWithSilence(
return true;
}
-bool CoreAudioUtil::GetDxDiagDetails(std::string* driver_name,
- std::string* driver_version) {
- ComPtr<IDxDiagProvider> provider;
- HRESULT hr =
- ::CoCreateInstance(CLSID_DxDiagProvider, NULL, CLSCTX_INPROC_SERVER,
- IID_IDxDiagProvider, &provider);
- if (FAILED(hr))
- return false;
-
- DXDIAG_INIT_PARAMS params = {sizeof(params)};
- params.dwDxDiagHeaderVersion = DXDIAG_DX9_SDK_VERSION;
- params.bAllowWHQLChecks = FALSE;
- params.pReserved = NULL;
- hr = provider->Initialize(&params);
- if (FAILED(hr))
- return false;
-
- ComPtr<IDxDiagContainer> root;
- hr = provider->GetRootContainer(root.GetAddressOf());
- if (FAILED(hr))
- return false;
-
- // Limit to the SoundDevices subtree. The tree in its entirity is
- // enormous and only this branch contains useful information.
- ComPtr<IDxDiagContainer> sound_devices;
- hr = root->GetChildContainer(L"DxDiag_DirectSound.DxDiag_SoundDevices.0",
- sound_devices.GetAddressOf());
- if (FAILED(hr))
- return false;
-
- base::win::ScopedVariant variant;
- hr = sound_devices->GetProp(L"szDriverName", variant.Receive());
- if (FAILED(hr))
- return false;
-
- if (variant.type() == VT_BSTR && variant.ptr()->bstrVal) {
- base::WideToUTF8(variant.ptr()->bstrVal, wcslen(variant.ptr()->bstrVal),
- driver_name);
- }
-
- variant.Reset();
- hr = sound_devices->GetProp(L"szDriverVersion", variant.Receive());
- if (FAILED(hr))
- return false;
-
- if (variant.type() == VT_BSTR && variant.ptr()->bstrVal) {
- base::WideToUTF8(variant.ptr()->bstrVal, wcslen(variant.ptr()->bstrVal),
- driver_version);
- }
-
- return true;
-}
-
HRESULT CoreAudioUtil::GetDeviceCollectionIndex(const std::string& device_id,
EDataFlow data_flow,
WORD* index) {
diff --git a/chromium/media/audio/win/core_audio_util_win.h b/chromium/media/audio/win/core_audio_util_win.h
index 3b7226f5a01..6a2b0dabb65 100644
--- a/chromium/media/audio/win/core_audio_util_win.h
+++ b/chromium/media/audio/win/core_audio_util_win.h
@@ -207,11 +207,6 @@ class MEDIA_EXPORT CoreAudioUtil {
static bool FillRenderEndpointBufferWithSilence(
IAudioClient* client, IAudioRenderClient* render_client);
- // Returns the default audio driver file name and version string according to
- // DxDiag. Used for crash reporting. Can be slow (~seconds).
- static bool GetDxDiagDetails(std::string* driver_name,
- std::string* driver_version);
-
// Gets the device collection index for the device specified by |device_id|.
// If the device is found in the device collection, the index is written to
// |*index| and S_OK is returned. If the device is not found, S_FALSE is
diff --git a/chromium/media/audio/win/core_audio_util_win_unittest.cc b/chromium/media/audio/win/core_audio_util_win_unittest.cc
index 3bd9d3ac08f..73714067b4c 100644
--- a/chromium/media/audio/win/core_audio_util_win_unittest.cc
+++ b/chromium/media/audio/win/core_audio_util_win_unittest.cc
@@ -10,7 +10,7 @@
#include "base/macros.h"
#include "base/strings/utf_string_conversions.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/histogram_tester.h"
+#include "base/test/metrics/histogram_tester.h"
#include "base/win/scoped_co_mem.h"
#include "base/win/scoped_com_initializer.h"
#include "base/win/scoped_handle.h"
@@ -43,14 +43,6 @@ class CoreAudioUtilWinTest : public ::testing::Test {
ScopedCOMInitializer com_init_;
};
-TEST_F(CoreAudioUtilWinTest, GetDxDiagDetails) {
- ABORT_AUDIO_TEST_IF_NOT(DevicesAvailable());
- std::string name, version;
- ASSERT_TRUE(CoreAudioUtil::GetDxDiagDetails(&name, &version));
- EXPECT_TRUE(!name.empty());
- EXPECT_TRUE(!version.empty());
-}
-
TEST_F(CoreAudioUtilWinTest, NumberOfActiveDevices) {
ABORT_AUDIO_TEST_IF_NOT(DevicesAvailable());
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index f0fa3368e92..ac60e933669 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -162,6 +162,7 @@ source_set("base") {
"media_client.h",
"media_content_type.cc",
"media_content_type.h",
+ "media_controller.h",
"media_export.h",
"media_log.cc",
"media_log.h",
@@ -172,6 +173,9 @@ source_set("base") {
"media_permission.h",
"media_resource.cc",
"media_resource.h",
+ "media_status.cc",
+ "media_status.h",
+ "media_status_observer.h",
"media_switches.cc",
"media_switches.h",
"media_track.cc",
@@ -233,7 +237,6 @@ source_set("base") {
"stream_parser_buffer.h",
"subsample_entry.cc",
"subsample_entry.h",
- "surface_manager.h",
"text_cue.cc",
"text_cue.h",
"text_ranges.cc",
@@ -263,6 +266,8 @@ source_set("base") {
"video_decoder_config.h",
"video_frame.cc",
"video_frame.h",
+ "video_frame_layout.cc",
+ "video_frame_layout.h",
"video_frame_metadata.cc",
"video_frame_metadata.h",
"video_frame_pool.cc",
@@ -351,6 +356,14 @@ source_set("base") {
]
}
+ # Note: should also work on is_posix || is_fuchsia
+ if (is_linux) {
+ sources += [
+ "scopedfd_helper.cc",
+ "scopedfd_helper.h",
+ ]
+ }
+
if (is_win) {
deps += [ "//media/base/win" ]
}
@@ -478,6 +491,7 @@ source_set("unit_tests") {
"feedback_signal_accumulator_unittest.cc",
"gmock_callback_support_unittest.cc",
"key_systems_unittest.cc",
+ "media_log_unittest.cc",
"media_url_demuxer_unittest.cc",
"mime_util_unittest.cc",
"moving_average_unittest.cc",
@@ -502,6 +516,7 @@ source_set("unit_tests") {
"video_codecs_unittest.cc",
"video_color_space_unittest.cc",
"video_decoder_config_unittest.cc",
+ "video_frame_layout_unittest.cc",
"video_frame_pool_unittest.cc",
"video_frame_unittest.cc",
"video_util_unittest.cc",
diff --git a/chromium/media/base/OWNERS b/chromium/media/base/OWNERS
index c83a845a813..f4ecf46c4ae 100644
--- a/chromium/media/base/OWNERS
+++ b/chromium/media/base/OWNERS
@@ -1 +1,4 @@
per-file *audio*=file://media/audio/OWNERS
+
+per-file media_switches.*=beccahughes@chromium.org
+per-file media_switches.*=mlamouri@chromium.org
diff --git a/chromium/media/base/android/media_codec_bridge_impl_unittest.cc b/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
index 33b8097a9bd..e95a8c50c63 100644
--- a/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
+++ b/chromium/media/base/android/media_codec_bridge_impl_unittest.cc
@@ -444,7 +444,19 @@ TEST(MediaCodecBridgeTest, H264VideoEncodeAndValidate) {
const int bit_rate = 300000;
const int frame_rate = 30;
const int i_frame_interval = 20;
- const int color_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
+ const std::set<int> supported_color_formats =
+ MediaCodecUtil::GetEncoderColorFormats("video/avc");
+
+ int color_format;
+ if (supported_color_formats.count(COLOR_FORMAT_YUV420_SEMIPLANAR) > 0) {
+ color_format = COLOR_FORMAT_YUV420_SEMIPLANAR;
+ } else if (supported_color_formats.count(COLOR_FORMAT_YUV420_PLANAR) > 0) {
+ color_format = COLOR_FORMAT_YUV420_PLANAR;
+ } else {
+ VLOG(0) << "Could not run test - YUV420_PLANAR and YUV420_SEMIPLANAR "
+ "unavailable for h264 encode.";
+ return;
+ }
std::unique_ptr<MediaCodecBridge> media_codec(
MediaCodecBridgeImpl::CreateVideoEncoder(
diff --git a/chromium/media/base/android/media_codec_loop.cc b/chromium/media/base/android/media_codec_loop.cc
index 21c98d2323d..eebc342969c 100644
--- a/chromium/media/base/android/media_codec_loop.cc
+++ b/chromium/media/base/android/media_codec_loop.cc
@@ -66,7 +66,7 @@ void MediaCodecLoop::OnKeyAdded() {
if (state_ == STATE_WAITING_FOR_KEY)
SetState(STATE_READY);
- DoPendingWork();
+ ExpectWork();
}
bool MediaCodecLoop::TryFlush() {
@@ -95,6 +95,13 @@ bool MediaCodecLoop::TryFlush() {
return true;
}
+void MediaCodecLoop::ExpectWork() {
+ // Start / reset the timer, since we believe that progress can be made soon,
+ // even if not immediately.
+ ManageTimer(true);
+ DoPendingWork();
+}
+
void MediaCodecLoop::DoPendingWork() {
if (state_ == STATE_ERROR)
return;
diff --git a/chromium/media/base/android/media_codec_loop.h b/chromium/media/base/android/media_codec_loop.h
index 539cb241f45..890147ebbda 100644
--- a/chromium/media/base/android/media_codec_loop.h
+++ b/chromium/media/base/android/media_codec_loop.h
@@ -211,12 +211,11 @@ class MEDIA_EXPORT MediaCodecLoop {
// FakeSingleThreadTaskRunner maintains a raw ptr to it also.
void SetTestTickClock(const base::TickClock* test_tick_clock);
- // Does the MediaCodec processing cycle: enqueues an input buffer, then
- // dequeues output buffers. This should be called by the client when more
- // work becomes available, such as when new input data arrives. If codec
- // output buffers are freed after OnDecodedFrame returns, then this should
- // also be called.
- void DoPendingWork();
+ // Notify us that work can be done immediately, or in the near future. This
+ // should be called by the client when more work becomes available, such as
+ // when new input data arrives. If codec output buffers are freed after
+ // OnDecodedFrame returns, then this should also be called.
+ void ExpectWork();
// Try to flush this media codec. Returns true on success, false on failure.
// Failures can result in a state change to the Error state. If this returns
@@ -254,6 +253,11 @@ class MEDIA_EXPORT MediaCodecLoop {
bool is_pending = false;
};
+ // Does the MediaCodec processing cycle: enqueues an input buffer, then
+ // dequeues output buffers. Will restart / reset the timer if any progress is
+ // made on this call.
+ void DoPendingWork();
+
// Enqueues one pending input buffer into MediaCodec if MediaCodec has room,
// and if the client has any input to give us.
// Returns true if any input was processed.
diff --git a/chromium/media/base/android/media_codec_loop_unittest.cc b/chromium/media/base/android/media_codec_loop_unittest.cc
index 171788f9ef0..d257535a84e 100644
--- a/chromium/media/base/android/media_codec_loop_unittest.cc
+++ b/chromium/media/base/android/media_codec_loop_unittest.cc
@@ -94,7 +94,7 @@ class MediaCodecLoopTest : public testing::Test {
}
// Set an expectation that MCL will try to get another input / output buffer,
- // and not get one in DoPendingWork.
+ // and not get one in ExpectWork.
void ExpectEmptyIOLoop() {
ExpectIsAnyInputPending(false);
EXPECT_CALL(Codec(), DequeueOutputBuffer(_, _, _, _, _, _, _))
@@ -215,7 +215,7 @@ TEST_F(MediaCodecLoopTest, TestPendingWorkWithoutInput) {
EXPECT_CALL(Codec(), DequeueOutputBuffer(_, _, _, _, _, _, _))
.Times(1)
.WillOnce(Return(MEDIA_CODEC_TRY_AGAIN_LATER));
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
WaitUntilIdle(ShouldNotBeIdle);
}
@@ -226,7 +226,7 @@ TEST_F(MediaCodecLoopTest, TestPendingWorkWithInput) {
ExpectIsAnyInputPending(true);
EXPECT_CALL(Codec(), DequeueOutputBuffer(_, _, _, _, _, _, _)).Times(1);
EXPECT_CALL(Codec(), DequeueInputBuffer(_, _)).Times(1);
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
WaitUntilIdle(ShouldNotBeIdle);
}
@@ -241,11 +241,11 @@ TEST_F(MediaCodecLoopTest, TestPendingWorkWithOutputBuffer) {
ExpectDequeueOutputBuffer(buf);
ExpectOnDecodedFrame(buf);
- // MCL will try again for another set of buffers before DoPendingWork()
+ // MCL will try again for another set of buffers before ExpectWork()
// returns. This is why we don't just leave them for WaitUntilIdle().
ExpectEmptyIOLoop();
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
WaitUntilIdle(ShouldNotBeIdle);
}
@@ -276,7 +276,7 @@ TEST_F(MediaCodecLoopTest, TestQueueEos) {
.Times(1)
.WillOnce(Return(MEDIA_CODEC_TRY_AGAIN_LATER));
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// Don't WaitUntilIdle() here. See TestUnqueuedEos.
}
@@ -303,7 +303,7 @@ TEST_F(MediaCodecLoopTest, TestQueueEosFailure) {
EXPECT_CALL(*client_, OnDecodedEos(_)).Times(1).WillOnce(Return(false));
EXPECT_CALL(*client_, OnCodecLoopError()).Times(1);
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// Don't WaitUntilIdle() here.
}
@@ -330,10 +330,10 @@ TEST_F(MediaCodecLoopTest, TestQueueInputData) {
.Times(1)
.WillOnce(Return(MEDIA_CODEC_TRY_AGAIN_LATER));
- // DoPendingWork will try again.
+ // ExpectWork will try again.
ExpectEmptyIOLoop();
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
WaitUntilIdle(ShouldNotBeIdle);
}
@@ -356,7 +356,7 @@ TEST_F(MediaCodecLoopTest, TestQueueInputDataFails) {
ExpectInputDataQueued(false);
EXPECT_CALL(*client_, OnCodecLoopError()).Times(1);
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// MCL is now in the error state.
}
@@ -371,7 +371,7 @@ TEST_F(MediaCodecLoopTest, TestQueueInputDataTryAgain) {
// MCL will try for output too.
ExpectDequeueOutputBuffer(MEDIA_CODEC_TRY_AGAIN_LATER);
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// Note that the client might not be allowed to change from "input pending"
// to "no input pending" without actually being asked for input. For now,
// MCL doesn't assume this.
@@ -405,7 +405,7 @@ TEST_F(MediaCodecLoopTest, TestSeveralPendingIOBuffers) {
ExpectEmptyIOLoop();
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
}
TEST_F(MediaCodecLoopTest, TestTryFlushOnJellyBeanMR2) {
@@ -440,7 +440,7 @@ TEST_F(MediaCodecLoopTest, TestOnKeyAdded) {
{
InSequence _s;
- // First DoPendingWork()
+ // First ExpectWork()
ExpectIsAnyInputPending(true);
ExpectDequeueInputBuffer(input_buffer_index);
@@ -458,18 +458,18 @@ TEST_F(MediaCodecLoopTest, TestOnKeyAdded) {
// the buffer we just provided.
ExpectDequeueOutputBuffer(MEDIA_CODEC_TRY_AGAIN_LATER);
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// Try again, to be sure that MCL doesn't request more input. Note that this
// is also done in the above loop, but that one could be made optional. This
- // forces MCL to try again as part of an entirely new DoPendingWork cycle.
+ // forces MCL to try again as part of an entirely new ExpectWork cycle.
{
InSequence _s;
// MCL should only try for output buffers, since it's still waiting for a
// key to be added.
ExpectDequeueOutputBuffer(MEDIA_CODEC_TRY_AGAIN_LATER);
}
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
// When we add the key, MCL will DoPending work again. This time, it should
// succeed since the key has been added.
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index 5026bda9bd4..db557095f75 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -105,9 +105,9 @@ std::string ConvertInitDataType(media::EmeInitDataType init_data_type) {
// Convert CdmSessionType to KeyType supported by MediaDrm.
KeyType ConvertCdmSessionType(CdmSessionType session_type) {
switch (session_type) {
- case CdmSessionType::TEMPORARY_SESSION:
+ case CdmSessionType::kTemporary:
return KeyType::KEY_TYPE_STREAMING;
- case CdmSessionType::PERSISTENT_LICENSE_SESSION:
+ case CdmSessionType::kPersistentLicense:
return KeyType::KEY_TYPE_OFFLINE;
default:
@@ -464,7 +464,7 @@ void MediaDrmBridge::LoadSession(
DCHECK(IsPersistentLicenseTypeSupportedByMediaDrm());
- if (session_type != CdmSessionType::PERSISTENT_LICENSE_SESSION) {
+ if (session_type != CdmSessionType::kPersistentLicense) {
promise->reject(
CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
"LoadSession() is only supported for 'persistent-license'.");
diff --git a/chromium/media/base/android/media_drm_storage_bridge.cc b/chromium/media/base/android/media_drm_storage_bridge.cc
index 040f8edc5ae..d72e9653473 100644
--- a/chromium/media/base/android/media_drm_storage_bridge.cc
+++ b/chromium/media/base/android/media_drm_storage_bridge.cc
@@ -23,7 +23,8 @@ using base::android::AttachCurrentThread;
using base::android::ConvertUTF8ToJavaString;
using base::android::JavaByteArrayToByteVector;
using base::android::JavaParamRef;
-using base::android::RunCallbackAndroid;
+using base::android::RunBooleanCallbackAndroid;
+using base::android::RunObjectCallbackAndroid;
using base::android::ScopedJavaLocalRef;
using base::android::ToJavaByteArray;
@@ -128,7 +129,7 @@ void MediaDrmStorageBridge::OnClearInfo(
void MediaDrmStorageBridge::RunAndroidBoolCallback(JavaObjectPtr j_callback,
bool success) {
- RunCallbackAndroid(*j_callback, success);
+ RunBooleanCallbackAndroid(*j_callback, success);
}
void MediaDrmStorageBridge::OnInitialized(
@@ -147,7 +148,7 @@ void MediaDrmStorageBridge::OnSessionDataLoaded(
const std::string& session_id,
std::unique_ptr<MediaDrmStorage::SessionData> session_data) {
if (!session_data) {
- RunCallbackAndroid(*j_callback, ScopedJavaLocalRef<jobject>());
+ RunObjectCallbackAndroid(*j_callback, ScopedJavaLocalRef<jobject>());
return;
}
@@ -158,8 +159,9 @@ void MediaDrmStorageBridge::OnSessionDataLoaded(
ScopedJavaLocalRef<jstring> j_mime =
ConvertUTF8ToJavaString(env, session_data->mime_type);
- RunCallbackAndroid(*j_callback, Java_PersistentInfo_create(
- env, j_eme_id, j_key_set_id, j_mime));
+ RunObjectCallbackAndroid(
+ *j_callback,
+ Java_PersistentInfo_create(env, j_eme_id, j_key_set_id, j_mime));
}
} // namespace media
diff --git a/chromium/media/base/android/media_player_bridge_unittest.cc b/chromium/media/base/android/media_player_bridge_unittest.cc
index c2b2447b6d6..48a9bf1fb01 100644
--- a/chromium/media/base/android/media_player_bridge_unittest.cc
+++ b/chromium/media/base/android/media_player_bridge_unittest.cc
@@ -36,7 +36,6 @@ class MockMediaPlayerManager : public MediaPlayerManager {
MOCK_METHOD3(OnVideoSizeChanged, void(int player_id, int width, int height));
MOCK_METHOD2(OnAudibleStateChanged, void(int player_id, bool is_audible_now));
MOCK_METHOD1(OnWaitingForDecryptionKey, void(int player_id));
- MOCK_METHOD0(GetFullscreenPlayer, MediaPlayerAndroid*());
MOCK_METHOD1(GetPlayer, MediaPlayerAndroid*(int player_id));
MOCK_METHOD3(RequestPlay,
bool(int player_id, base::TimeDelta duration, bool has_audio));
diff --git a/chromium/media/base/android/media_player_manager.h b/chromium/media/base/android/media_player_manager.h
index 8763b90178d..9c764147517 100644
--- a/chromium/media/base/android/media_player_manager.h
+++ b/chromium/media/base/android/media_player_manager.h
@@ -61,9 +61,6 @@ class MEDIA_EXPORT MediaPlayerManager {
// Called when video size has changed. Args: player ID, width, height.
virtual void OnVideoSizeChanged(int player_id, int width, int height) = 0;
- // Returns the player that's in the fullscreen mode currently.
- virtual MediaPlayerAndroid* GetFullscreenPlayer() = 0;
-
// Returns the player with the specified id.
virtual MediaPlayerAndroid* GetPlayer(int player_id) = 0;
diff --git a/chromium/media/base/android_overlay_config.h b/chromium/media/base/android_overlay_config.h
index 744da408ff3..d51131024ed 100644
--- a/chromium/media/base/android_overlay_config.h
+++ b/chromium/media/base/android_overlay_config.h
@@ -65,8 +65,14 @@ struct MEDIA_EXPORT AndroidOverlayConfig {
bool power_efficient = false;
// Convenient helpers since the syntax is weird.
- void is_ready(AndroidOverlay* overlay) { std::move(ready_cb).Run(overlay); }
- void is_failed(AndroidOverlay* overlay) { std::move(failed_cb).Run(overlay); }
+ void is_ready(AndroidOverlay* overlay) {
+ if (ready_cb)
+ std::move(ready_cb).Run(overlay);
+ }
+ void is_failed(AndroidOverlay* overlay) {
+ if (failed_cb)
+ std::move(failed_cb).Run(overlay);
+ }
ReadyCB ready_cb;
FailedCB failed_cb;
diff --git a/chromium/media/base/audio_bus.cc b/chromium/media/base/audio_bus.cc
index c21c6c93642..0c61d5fc69b 100644
--- a/chromium/media/base/audio_bus.cc
+++ b/chromium/media/base/audio_bus.cc
@@ -148,12 +148,22 @@ std::unique_ptr<AudioBus> AudioBus::WrapMemory(const AudioParameters& params,
static_cast<float*>(data)));
}
+std::unique_ptr<const AudioBus> AudioBus::WrapReadOnlyMemory(int channels,
+ int frames,
+ const void* data) {
+ // Note: const_cast is generally dangerous but is used in this case since
+ // AudioBus accomodates both read-only and read/write use cases. A const
+ // AudioBus object is returned to ensure no one accidentally writes to the
+ // read-only data.
+ return WrapMemory(channels, frames, const_cast<void*>(data));
+}
+
std::unique_ptr<const AudioBus> AudioBus::WrapReadOnlyMemory(
const AudioParameters& params,
const void* data) {
// Note: const_cast is generally dangerous but is used in this case since
// AudioBus accomodates both read-only and read/write use cases. A const
- // AudioBus object is returned to ensure noone accidentally writes to the
+ // AudioBus object is returned to ensure no one accidentally writes to the
// read-only data.
return WrapMemory(params, const_cast<void*>(data));
}
diff --git a/chromium/media/base/audio_bus.h b/chromium/media/base/audio_bus.h
index 5e55b5c7ed3..77263024403 100644
--- a/chromium/media/base/audio_bus.h
+++ b/chromium/media/base/audio_bus.h
@@ -56,6 +56,9 @@ class MEDIA_SHMEM_EXPORT AudioBus {
void* data);
static std::unique_ptr<AudioBus> WrapMemory(const AudioParameters& params,
void* data);
+ static std::unique_ptr<const AudioBus> WrapReadOnlyMemory(int channels,
+ int frames,
+ const void* data);
static std::unique_ptr<const AudioBus> WrapReadOnlyMemory(
const AudioParameters& params,
const void* data);
diff --git a/chromium/media/base/audio_decoder.cc b/chromium/media/base/audio_decoder.cc
index 4ff425e9dfd..ad272910030 100644
--- a/chromium/media/base/audio_decoder.cc
+++ b/chromium/media/base/audio_decoder.cc
@@ -12,6 +12,10 @@ AudioDecoder::AudioDecoder() = default;
AudioDecoder::~AudioDecoder() = default;
+bool AudioDecoder::IsPlatformDecoder() const {
+ return false;
+}
+
bool AudioDecoder::NeedsBitstreamConversion() const {
return false;
}
diff --git a/chromium/media/base/audio_decoder.h b/chromium/media/base/audio_decoder.h
index d27cd16cfa9..c66ffa76ce8 100644
--- a/chromium/media/base/audio_decoder.h
+++ b/chromium/media/base/audio_decoder.h
@@ -54,6 +54,12 @@ class MEDIA_EXPORT AudioDecoder {
// TODO(xhwang): Rename this method since the name is not only for display.
virtual std::string GetDisplayName() const = 0;
+ // Returns true if the implementation is expected to be implemented by the
+ // platform. The value should be available immediately after construction and
+ // should not change within the lifetime of a decoder instance. The value is
+ // used only for logging.
+ virtual bool IsPlatformDecoder() const;
+
// Initializes an AudioDecoder with |config|, executing the |init_cb| upon
// completion.
//
diff --git a/chromium/media/base/audio_parameters.h b/chromium/media/base/audio_parameters.h
index de18c65d95f..fa6eb8ea982 100644
--- a/chromium/media/base/audio_parameters.h
+++ b/chromium/media/base/audio_parameters.h
@@ -144,6 +144,7 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
EXPERIMENTAL_ECHO_CANCELLER = 0x40, // Indicates an echo canceller is
// available that should only
// experimentally be enabled.
+ MULTIZONE = 0x80,
};
AudioParameters();
diff --git a/chromium/media/base/audio_point.cc b/chromium/media/base/audio_point.cc
index cbd7a642a95..fd1ee46261d 100644
--- a/chromium/media/base/audio_point.cc
+++ b/chromium/media/base/audio_point.cc
@@ -6,12 +6,6 @@
#include <stddef.h>
-#include "base/logging.h"
-#include "base/strings/string_number_conversions.h"
-#include "base/strings/string_split.h"
-#include "base/strings/string_util.h"
-#include "base/strings/stringprintf.h"
-
namespace media {
std::string PointsToString(const std::vector<Point>& points) {
@@ -26,38 +20,4 @@ std::string PointsToString(const std::vector<Point>& points) {
return points_string;
}
-std::vector<Point> ParsePointsFromString(const std::string& points_string) {
- std::vector<Point> points;
- if (points_string.empty())
- return points;
-
- const auto& tokens =
- base::SplitString(points_string, base::kWhitespaceASCII,
- base::KEEP_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
- if (tokens.size() < 3 || tokens.size() % 3 != 0) {
- LOG(ERROR) << "Malformed points string: " << points_string;
- return points;
- }
-
- std::vector<float> float_tokens;
- float_tokens.reserve(tokens.size());
- for (const auto& token : tokens) {
- double float_token;
- if (!base::StringToDouble(token, &float_token)) {
- LOG(ERROR) << "Unable to convert token=" << token
- << " to double from points string: " << points_string;
- return points;
- }
- float_tokens.push_back(float_token);
- }
-
- points.reserve(float_tokens.size() / 3);
- for (size_t i = 0; i < float_tokens.size(); i += 3) {
- points.push_back(
- Point(float_tokens[i + 0], float_tokens[i + 1], float_tokens[i + 2]));
- }
-
- return points;
-}
-
} // namespace media
diff --git a/chromium/media/base/audio_point.h b/chromium/media/base/audio_point.h
index 20234aa3f9d..3b5632868e4 100644
--- a/chromium/media/base/audio_point.h
+++ b/chromium/media/base/audio_point.h
@@ -15,13 +15,6 @@ namespace media {
using Point = gfx::Point3F;
-// Returns a vector of points parsed from a whitespace-separated string
-// formatted as: "x1 y1 z1 ... zn yn zn" for n points.
-//
-// Returns an empty vector if |points_string| is empty or isn't parseable.
-MEDIA_SHMEM_EXPORT std::vector<Point> ParsePointsFromString(
- const std::string& points_string);
-
// Returns |points| as a human-readable string. (Not necessarily in the format
// required by ParsePointsFromString).
MEDIA_SHMEM_EXPORT std::string PointsToString(const std::vector<Point>& points);
diff --git a/chromium/media/base/audio_point_unittest.cc b/chromium/media/base/audio_point_unittest.cc
index 0704c87b31b..0970ef272e0 100644
--- a/chromium/media/base/audio_point_unittest.cc
+++ b/chromium/media/base/audio_point_unittest.cc
@@ -19,23 +19,5 @@ TEST(PointTest, PointsToString) {
EXPECT_EQ("", PointsToString(std::vector<Point>()));
}
-TEST(PointTest, ParsePointString) {
- const std::vector<Point> expected_empty;
- EXPECT_EQ(expected_empty, ParsePointsFromString(""));
- EXPECT_EQ(expected_empty, ParsePointsFromString("0 0 a"));
- EXPECT_EQ(expected_empty, ParsePointsFromString("1 2"));
- EXPECT_EQ(expected_empty, ParsePointsFromString("1 2 3 4"));
-
- {
- std::vector<Point> expected(1, Point(-0.02f, 0, 0));
- expected.push_back(Point(0.02f, 0, 0));
- EXPECT_EQ(expected, ParsePointsFromString("-0.02 0 0 0.02 0 0"));
- }
- {
- std::vector<Point> expected(1, Point(1, 2, 3));
- EXPECT_EQ(expected, ParsePointsFromString("1 2 3"));
- }
-}
-
} // namespace
} // namespace media
diff --git a/chromium/media/base/bind_to_current_loop.h b/chromium/media/base/bind_to_current_loop.h
index 8c07621e644..de69a63ace8 100644
--- a/chromium/media/base/bind_to_current_loop.h
+++ b/chromium/media/base/bind_to_current_loop.h
@@ -13,18 +13,22 @@
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
-// This is a helper utility for base::Bind()ing callbacks to the current
-// MessageLoop. The typical use is when |a| (of class |A|) wants to hand a
+// This is a helper utility for base::Bind()ing callbacks to a given
+// TaskRunner. The typical use is when |a| (of class |A|) wants to hand a
// callback such as base::Bind(&A::AMethod, a) to |b|, but needs to ensure that
-// when |b| executes the callback, it does so on |a|'s current MessageLoop.
+// when |b| executes the callback, it does so on |a|'s task_runner's
+// MessageLoop.
//
// Typical usage: request to be called back on the current thread:
// other->StartAsyncProcessAndCallMeBack(
-// media::BindToCurrentLoop(base::BindOnce(&MyClass::MyMethod, this)));
+// media::BindToLoop(task_runner, base::BindOnce(&MyClass::MyMethod, this)));
//
-// media::BindToCurrentLoop returns the same type of callback to the given
+// media::BindToLoop returns the same type of callback to the given
// callback. I.e. it returns a RepeatingCallback for a given RepeatingCallback,
// and returns OnceCallback for a given OnceCallback.
+//
+// The function BindToCurrentLoop is shorthand to bind to the calling function's
+// current MessageLoop.
namespace media {
namespace internal {
@@ -81,7 +85,8 @@ class TrampolineHelper {
} // namespace internal
template <typename... Args>
-inline base::RepeatingCallback<void(Args...)> BindToCurrentLoop(
+inline base::RepeatingCallback<void(Args...)> BindToLoop(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
base::RepeatingCallback<void(Args...)> cb) {
using CallbackType = base::RepeatingCallback<void(Args...)>;
using Helper = internal::TrampolineHelper<CallbackType>;
@@ -89,12 +94,12 @@ inline base::RepeatingCallback<void(Args...)> BindToCurrentLoop(
RunnerType run = &Helper::Run;
// TODO(tzik): Propagate FROM_HERE from the caller.
return base::BindRepeating(
- run, std::make_unique<Helper>(
- FROM_HERE, base::ThreadTaskRunnerHandle::Get(), std::move(cb)));
+ run, std::make_unique<Helper>(FROM_HERE, task_runner, std::move(cb)));
}
template <typename... Args>
-inline base::OnceCallback<void(Args...)> BindToCurrentLoop(
+inline base::OnceCallback<void(Args...)> BindToLoop(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
base::OnceCallback<void(Args...)> cb) {
using CallbackType = base::OnceCallback<void(Args...)>;
using Helper = internal::TrampolineHelper<CallbackType>;
@@ -102,8 +107,19 @@ inline base::OnceCallback<void(Args...)> BindToCurrentLoop(
RunnerType run = &Helper::Run;
// TODO(tzik): Propagate FROM_HERE from the caller.
return base::BindOnce(
- run, std::make_unique<Helper>(
- FROM_HERE, base::ThreadTaskRunnerHandle::Get(), std::move(cb)));
+ run, std::make_unique<Helper>(FROM_HERE, task_runner, std::move(cb)));
+}
+
+template <typename... Args>
+inline base::RepeatingCallback<void(Args...)> BindToCurrentLoop(
+ base::RepeatingCallback<void(Args...)> cb) {
+ return BindToLoop(base::ThreadTaskRunnerHandle::Get(), std::move(cb));
+}
+
+template <typename... Args>
+inline base::OnceCallback<void(Args...)> BindToCurrentLoop(
+ base::OnceCallback<void(Args...)> cb) {
+ return BindToLoop(base::ThreadTaskRunnerHandle::Get(), std::move(cb));
}
} // namespace media
diff --git a/chromium/media/base/cdm_config.h b/chromium/media/base/cdm_config.h
index 6a3dd6b4557..447e70f5b50 100644
--- a/chromium/media/base/cdm_config.h
+++ b/chromium/media/base/cdm_config.h
@@ -17,8 +17,8 @@ struct CdmConfig {
// Allow access to persistent state.
bool allow_persistent_state = false;
- // Use hardware-secure codecs. This flag is only used on Android, it should
- // always be false on other platforms.
+ // Use hardware-secure codecs. This flag is only used on Android and Windows,
+ // it should always be false on other platforms.
bool use_hw_secure_codecs = false;
};
diff --git a/chromium/media/base/cdm_key_information.cc b/chromium/media/base/cdm_key_information.cc
index 4bded483678..66a5290a218 100644
--- a/chromium/media/base/cdm_key_information.cc
+++ b/chromium/media/base/cdm_key_information.cc
@@ -2,15 +2,15 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/base/cdm_key_information.h"
+
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
-#include "media/base/cdm_key_information.h"
namespace media {
CdmKeyInformation::CdmKeyInformation()
- : status(INTERNAL_ERROR), system_code(0) {
-}
+ : status(INTERNAL_ERROR), system_code(0) {}
CdmKeyInformation::CdmKeyInformation(const std::vector<uint8_t>& key_id,
KeyStatus status,
@@ -60,11 +60,15 @@ std::string CdmKeyInformation::KeyStatusToString(KeyStatus key_status) {
return "";
}
+std::ostream& operator<<(std::ostream& os,
+ CdmKeyInformation::KeyStatus status) {
+ return os << CdmKeyInformation::KeyStatusToString(status);
+}
+
std::ostream& operator<<(std::ostream& os, const CdmKeyInformation& info) {
return os << "key_id = "
<< base::HexEncode(info.key_id.data(), info.key_id.size())
- << ", status = "
- << CdmKeyInformation::KeyStatusToString(info.status)
+ << ", status = " << info.status
<< ", system_code = " << info.system_code;
}
diff --git a/chromium/media/base/cdm_key_information.h b/chromium/media/base/cdm_key_information.h
index 16ea225ad36..7e2cc57c45e 100644
--- a/chromium/media/base/cdm_key_information.h
+++ b/chromium/media/base/cdm_key_information.h
@@ -50,7 +50,11 @@ struct MEDIA_EXPORT CdmKeyInformation {
uint32_t system_code;
};
-// For logging use only.
+// The following are for logging use only.
+
+MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
+ CdmKeyInformation::KeyStatus status);
+
MEDIA_EXPORT std::ostream& operator<<(std::ostream& os,
const CdmKeyInformation& info);
diff --git a/chromium/media/base/content_decryption_module.h b/chromium/media/base/content_decryption_module.h
index d3187cda83d..0ab2248d17e 100644
--- a/chromium/media/base/content_decryption_module.h
+++ b/chromium/media/base/content_decryption_module.h
@@ -41,10 +41,10 @@ typedef std::vector<std::unique_ptr<CdmKeyInformation>> CdmKeysInfo;
// Must be consistent with the values specified in the spec:
// https://w3c.github.io/encrypted-media/#idl-def-MediaKeySessionType
enum class CdmSessionType {
- TEMPORARY_SESSION,
- PERSISTENT_LICENSE_SESSION,
- PERSISTENT_RELEASE_MESSAGE_SESSION,
- SESSION_TYPE_MAX = PERSISTENT_RELEASE_MESSAGE_SESSION
+ kTemporary,
+ kPersistentLicense,
+ kPersistentUsageRecord,
+ kMaxValue = kPersistentUsageRecord
};
// Type of message being sent to the application.
diff --git a/chromium/media/base/decoder_buffer.cc b/chromium/media/base/decoder_buffer.cc
index 20344051edd..bb8c856ce77 100644
--- a/chromium/media/base/decoder_buffer.cc
+++ b/chromium/media/base/decoder_buffer.cc
@@ -84,7 +84,7 @@ scoped_refptr<DecoderBuffer> DecoderBuffer::FromSharedMemoryHandle(
const base::SharedMemoryHandle& handle,
off_t offset,
size_t size) {
- auto shm = std::make_unique<UnalignedSharedMemory>(handle, true);
+ auto shm = std::make_unique<UnalignedSharedMemory>(handle, size, true);
if (size == 0 || !shm->MapAt(offset, size))
return nullptr;
return base::WrapRefCounted(new DecoderBuffer(std::move(shm), size));
diff --git a/chromium/media/base/decrypt_config.cc b/chromium/media/base/decrypt_config.cc
index 90df7adb934..eda225a1f7f 100644
--- a/chromium/media/base/decrypt_config.cc
+++ b/chromium/media/base/decrypt_config.cc
@@ -72,6 +72,10 @@ DecryptConfig::DecryptConfig(
DecryptConfig::~DecryptConfig() = default;
+std::unique_ptr<DecryptConfig> DecryptConfig::Clone() const {
+ return base::WrapUnique(new DecryptConfig(*this));
+}
+
bool DecryptConfig::HasPattern() const {
return encryption_pattern_.has_value();
}
@@ -113,4 +117,6 @@ std::ostream& DecryptConfig::Print(std::ostream& os) const {
return os;
}
+DecryptConfig::DecryptConfig(const DecryptConfig& other) = default;
+
} // namespace media
diff --git a/chromium/media/base/decrypt_config.h b/chromium/media/base/decrypt_config.h
index 5d99153bf13..25265a231d0 100644
--- a/chromium/media/base/decrypt_config.h
+++ b/chromium/media/base/decrypt_config.h
@@ -45,7 +45,8 @@ class MEDIA_EXPORT DecryptConfig {
// |encryption_pattern| is the pattern used ('cbcs' only). It is optional
// as Common encryption of MPEG-2 transport streams v1 (23009-1:2014)
// does not specify patterns for cbcs encryption mode. The pattern is
- // assumed to be 1:9 for video and 1:0 for audio.
+ // assumed to be 1:9 for video. Tracks other than video are protected
+ // using whole-block full-sample encryption (pattern 0:0 or unspecified).
static std::unique_ptr<DecryptConfig> CreateCencConfig(
const std::string& key_id,
const std::string& iv,
@@ -71,6 +72,8 @@ class MEDIA_EXPORT DecryptConfig {
return encryption_pattern_;
};
+ std::unique_ptr<DecryptConfig> Clone() const;
+
// Returns whether this config has EncryptionPattern set or not.
bool HasPattern() const;
@@ -81,6 +84,8 @@ class MEDIA_EXPORT DecryptConfig {
std::ostream& Print(std::ostream& os) const;
private:
+ DecryptConfig(const DecryptConfig& other);
+
const EncryptionMode encryption_mode_;
const std::string key_id_;
@@ -94,7 +99,7 @@ class MEDIA_EXPORT DecryptConfig {
// Only specified if |encryption_mode_| requires a pattern.
base::Optional<EncryptionPattern> encryption_pattern_;
- DISALLOW_COPY_AND_ASSIGN(DecryptConfig);
+ DISALLOW_ASSIGN(DecryptConfig);
};
} // namespace media
diff --git a/chromium/media/base/demuxer.h b/chromium/media/base/demuxer.h
index 60e46c86331..b2a1124520e 100644
--- a/chromium/media/base/demuxer.h
+++ b/chromium/media/base/demuxer.h
@@ -24,7 +24,6 @@
namespace media {
-class TextTrackConfig;
class MediaTracks;
class MEDIA_EXPORT DemuxerHost {
@@ -48,13 +47,6 @@ class MEDIA_EXPORT DemuxerHost {
// called with an error.
virtual void OnDemuxerError(PipelineStatus error) = 0;
- // Add |text_stream| to the collection managed by the text renderer.
- virtual void AddTextStream(DemuxerStream* text_stream,
- const TextTrackConfig& config) = 0;
-
- // Remove |text_stream| from the presentation.
- virtual void RemoveTextStream(DemuxerStream* text_stream) = 0;
-
protected:
virtual ~DemuxerHost();
};
@@ -92,8 +84,7 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
// lifetime of the demuxer. Don't delete it! |status_cb| must only be run
// after this method has returned.
virtual void Initialize(DemuxerHost* host,
- const PipelineStatusCB& status_cb,
- bool enable_text_tracks) = 0;
+ const PipelineStatusCB& status_cb) = 0;
// Aborts any pending read operations that the demuxer is involved with; any
// read aborted will be aborted with a status of kAborted. Future reads will
diff --git a/chromium/media/base/eme_constants.h b/chromium/media/base/eme_constants.h
index 29e492d126b..a7b85a08a3d 100644
--- a/chromium/media/base/eme_constants.h
+++ b/chromium/media/base/eme_constants.h
@@ -153,29 +153,38 @@ enum class EmeMediaType {
enum class EmeConfigRule {
// The configuration option is not supported.
NOT_SUPPORTED,
+
// The configuration option prevents use of a distinctive identifier.
IDENTIFIER_NOT_ALLOWED,
+
// The configuration option is supported if a distinctive identifier is
// available.
IDENTIFIER_REQUIRED,
+
// The configuration option is supported, but the user experience may be
// improved if a distinctive identifier is available.
IDENTIFIER_RECOMMENDED,
+
// The configuration option prevents use of persistent state.
PERSISTENCE_NOT_ALLOWED,
+
// The configuration option is supported if persistent state is available.
PERSISTENCE_REQUIRED,
+
// The configuration option is supported if both a distinctive identifier and
// persistent state are available.
IDENTIFIER_AND_PERSISTENCE_REQUIRED,
+
// The configuration option prevents use of hardware-secure codecs.
// This rule only has meaning on platforms that distinguish hardware-secure
- // codecs (ie. Android).
+ // codecs (i.e. Android and Windows).
HW_SECURE_CODECS_NOT_ALLOWED,
+
// The configuration option is supported if hardware-secure codecs are used.
// This rule only has meaning on platforms that distinguish hardware-secure
- // codecs (ie. Android).
+ // codecs (i.e. Android and Windows).
HW_SECURE_CODECS_REQUIRED,
+
// The configuration option is supported without conditions.
SUPPORTED,
};
diff --git a/chromium/media/base/encryption_pattern.cc b/chromium/media/base/encryption_pattern.cc
index e2c194083b8..ea07abb380d 100644
--- a/chromium/media/base/encryption_pattern.cc
+++ b/chromium/media/base/encryption_pattern.cc
@@ -19,14 +19,6 @@ EncryptionPattern& EncryptionPattern::operator=(const EncryptionPattern& rhs) =
EncryptionPattern::~EncryptionPattern() = default;
-bool EncryptionPattern::IsInEffect() const {
- // ISO/IEC 23001-7(2016), section 10.3, discussing 'cens' pattern encryption
- // scheme, states "Tracks other than video are protected using whole-block
- // full-sample encryption as specified in 9.7 and hence skip_byte_block
- // SHALL be 0." So pattern is in effect as long as |crypt_byte_block_| is set.
- return crypt_byte_block_ != 0;
-}
-
bool EncryptionPattern::operator==(const EncryptionPattern& other) const {
return crypt_byte_block_ == other.crypt_byte_block_ &&
skip_byte_block_ == other.skip_byte_block_;
diff --git a/chromium/media/base/encryption_pattern.h b/chromium/media/base/encryption_pattern.h
index 41217c4c553..928c1dbadb9 100644
--- a/chromium/media/base/encryption_pattern.h
+++ b/chromium/media/base/encryption_pattern.h
@@ -20,9 +20,7 @@ namespace media {
// encrypted, and the next nine are skipped. This pattern is applied
// repeatedly until the end of the last 16-byte block in the subsample.
// Any remaining bytes are left clear.
-// If crypt_byte_block is 0, pattern encryption is disabled.
-// TODO(jrummell): Use base::Optional<EncryptionPattern> everywhere, and remove
-// IsInEffect().
+// TODO(jrummell): Use base::Optional<EncryptionPattern> everywhere.
class MEDIA_EXPORT EncryptionPattern {
public:
EncryptionPattern();
@@ -34,12 +32,15 @@ class MEDIA_EXPORT EncryptionPattern {
uint32_t crypt_byte_block() const { return crypt_byte_block_; }
uint32_t skip_byte_block() const { return skip_byte_block_; }
- bool IsInEffect() const;
-
bool operator==(const EncryptionPattern& other) const;
bool operator!=(const EncryptionPattern& other) const;
private:
+ // ISO/IEC 23001-7(2016), section 10.3, discussing 'cens' pattern encryption
+ // scheme, states "Tracks other than video are protected using whole-block
+ // full-sample encryption as specified in 9.7 and hence skip_byte_block
+ // SHALL be 0." So patterns where |skip_byte_block| = 0 should be treated
+ // as whole-block full-sample encryption.
uint32_t crypt_byte_block_ = 0; // Count of the encrypted blocks.
uint32_t skip_byte_block_ = 0; // Count of the unencrypted blocks.
};
diff --git a/chromium/media/base/encryption_scheme.cc b/chromium/media/base/encryption_scheme.cc
index 78349d2499e..d20fbf41d3d 100644
--- a/chromium/media/base/encryption_scheme.cc
+++ b/chromium/media/base/encryption_scheme.cc
@@ -39,21 +39,17 @@ std::ostream& operator<<(std::ostream& os,
if (!encryption_scheme.is_encrypted())
return os << "Unencrypted";
- bool pattern_in_effect = encryption_scheme.pattern().IsInEffect();
-
- if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CTR &&
- !pattern_in_effect) {
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CTR)
return os << "CENC";
- }
- if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC &&
- pattern_in_effect) {
- return os << "CBCS";
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC) {
+ return os << "CBCS with pattern ("
+ << encryption_scheme.pattern().crypt_byte_block() << ","
+ << encryption_scheme.pattern().skip_byte_block() << ")";
}
NOTREACHED();
- return os << "Unknown (mode = " << encryption_scheme.mode()
- << ", pattern_in_effect = " << pattern_in_effect << ")";
+ return os << "Unknown EncryptionScheme, mode = " << encryption_scheme.mode();
}
} // namespace media
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index 2e7aa74fce5..c5ae7b035e0 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -61,16 +61,16 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::CdmPromise::Exception,
media::CdmPromise::Exception::EXCEPTION_MAX)
IPC_ENUM_TRAITS_MAX_VALUE(media::CdmProxy::Function,
- media::CdmProxy::Function::kMax)
+ media::CdmProxy::Function::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::CdmProxy::Protocol,
- media::CdmProxy::Protocol::kMax)
+ media::CdmProxy::Protocol::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::CdmProxy::Status,
- media::CdmProxy::Status::kMax)
+ media::CdmProxy::Status::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::CdmSessionType,
- media::CdmSessionType::SESSION_TYPE_MAX)
+ media::CdmSessionType::kMaxValue)
IPC_ENUM_TRAITS_MAX_VALUE(media::ChannelLayout, media::CHANNEL_LAYOUT_MAX)
@@ -193,7 +193,6 @@ IPC_STRUCT_TRAITS_BEGIN(media::HDRMetadata)
IPC_STRUCT_TRAITS_END()
IPC_STRUCT_TRAITS_BEGIN(media::OverlayInfo)
- IPC_STRUCT_TRAITS_MEMBER(surface_id)
IPC_STRUCT_TRAITS_MEMBER(routing_token)
IPC_STRUCT_TRAITS_MEMBER(is_fullscreen)
IPC_STRUCT_TRAITS_END()
diff --git a/chromium/media/base/key_system_properties.cc b/chromium/media/base/key_system_properties.cc
index 1e21981955c..6582cb15a0c 100644
--- a/chromium/media/base/key_system_properties.cc
+++ b/chromium/media/base/key_system_properties.cc
@@ -4,16 +4,11 @@
#include "media/base/key_system_properties.h"
-#include "base/logging.h"
-#include "media/media_buildflags.h"
-
namespace media {
-#if defined(OS_ANDROID)
-SupportedCodecs KeySystemProperties::GetSupportedSecureCodecs() const {
+SupportedCodecs KeySystemProperties::GetSupportedHwSecureCodecs() const {
return EME_CODEC_NONE;
}
-#endif
bool KeySystemProperties::UseAesDecryptor() const {
return false;
diff --git a/chromium/media/base/key_system_properties.h b/chromium/media/base/key_system_properties.h
index 33d2d52de3d..72f59f8fa29 100644
--- a/chromium/media/base/key_system_properties.h
+++ b/chromium/media/base/key_system_properties.h
@@ -26,17 +26,15 @@ class MEDIA_EXPORT KeySystemProperties {
virtual bool IsSupportedInitDataType(
EmeInitDataType init_data_type) const = 0;
- // Returns whether |encryption_scheme| is supported by this key system.
- virtual bool IsEncryptionSchemeSupported(
+ // Returns the configuration rule for supporting |encryption_scheme|.
+ virtual EmeConfigRule GetEncryptionSchemeConfigRule(
EncryptionMode encryption_scheme) const = 0;
// Returns the codecs supported by this key system.
virtual SupportedCodecs GetSupportedCodecs() const = 0;
-#if defined(OS_ANDROID)
// Returns the codecs with hardware-secure support in this key system.
- virtual SupportedCodecs GetSupportedSecureCodecs() const;
-#endif
+ virtual SupportedCodecs GetSupportedHwSecureCodecs() const;
// Returns the configuration rule for supporting a robustness requirement.
virtual EmeConfigRule GetRobustnessConfigRule(
@@ -47,9 +45,9 @@ class MEDIA_EXPORT KeySystemProperties {
// sessions.
virtual EmeSessionTypeSupport GetPersistentLicenseSessionSupport() const = 0;
- // Returns the support this key system provides for persistent-release-message
+ // Returns the support this key system provides for persistent-usage-record
// sessions.
- virtual EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport()
+ virtual EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport()
const = 0;
// Returns the support this key system provides for persistent state.
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index a51a28df388..bff71ec8358 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -102,17 +102,17 @@ class ClearKeyProperties : public KeySystemProperties {
init_data_type == EmeInitDataType::KEYIDS;
}
- bool IsEncryptionSchemeSupported(
- EncryptionMode encryption_scheme) const override {
+ media::EmeConfigRule GetEncryptionSchemeConfigRule(
+ media::EncryptionMode encryption_scheme) const override {
switch (encryption_scheme) {
- case EncryptionMode::kCenc:
- case EncryptionMode::kCbcs:
- return true;
- case EncryptionMode::kUnencrypted:
+ case media::EncryptionMode::kCenc:
+ case media::EncryptionMode::kCbcs:
+ return media::EmeConfigRule::SUPPORTED;
+ case media::EncryptionMode::kUnencrypted:
break;
}
NOTREACHED();
- return false;
+ return media::EmeConfigRule::NOT_SUPPORTED;
}
SupportedCodecs GetSupportedCodecs() const override {
@@ -128,19 +128,24 @@ class ClearKeyProperties : public KeySystemProperties {
return requested_robustness.empty() ? EmeConfigRule::SUPPORTED
: EmeConfigRule::NOT_SUPPORTED;
}
+
EmeSessionTypeSupport GetPersistentLicenseSessionSupport() const override {
return EmeSessionTypeSupport::NOT_SUPPORTED;
}
- EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport()
+
+ EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport()
const override {
return EmeSessionTypeSupport::NOT_SUPPORTED;
}
+
EmeFeatureSupport GetPersistentStateSupport() const override {
return EmeFeatureSupport::NOT_SUPPORTED;
}
+
EmeFeatureSupport GetDistinctiveIdentifierSupport() const override {
return EmeFeatureSupport::NOT_SUPPORTED;
}
+
bool UseAesDecryptor() const override { return true; }
};
@@ -200,8 +205,6 @@ class KeySystemsImpl : public KeySystems {
std::string GetKeySystemNameForUMA(const std::string& key_system) const;
- bool UseAesDecryptor(const std::string& key_system) const;
-
// These two functions are for testing purpose only.
void AddCodecMask(EmeMediaType media_type,
const std::string& codec,
@@ -211,10 +214,12 @@ class KeySystemsImpl : public KeySystems {
// Implementation of KeySystems interface.
bool IsSupportedKeySystem(const std::string& key_system) const override;
+ bool CanUseAesDecryptor(const std::string& key_system) const override;
+
bool IsSupportedInitDataType(const std::string& key_system,
EmeInitDataType init_data_type) const override;
- bool IsEncryptionSchemeSupported(
+ EmeConfigRule GetEncryptionSchemeConfigRule(
const std::string& key_system,
EncryptionMode encryption_scheme) const override;
@@ -232,7 +237,7 @@ class KeySystemsImpl : public KeySystems {
EmeSessionTypeSupport GetPersistentLicenseSessionSupport(
const std::string& key_system) const override;
- EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport(
+ EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport(
const std::string& key_system) const override;
EmeFeatureSupport GetPersistentStateSupport(
@@ -389,7 +394,7 @@ void KeySystemsImpl::AddSupportedKeySystems(
DCHECK(!properties->GetKeySystemName().empty());
DCHECK(properties->GetPersistentLicenseSessionSupport() !=
EmeSessionTypeSupport::INVALID);
- DCHECK(properties->GetPersistentReleaseMessageSessionSupport() !=
+ DCHECK(properties->GetPersistentUsageRecordSessionSupport() !=
EmeSessionTypeSupport::INVALID);
DCHECK(properties->GetPersistentStateSupport() !=
EmeFeatureSupport::INVALID);
@@ -409,13 +414,13 @@ void KeySystemsImpl::AddSupportedKeySystems(
EmeFeatureSupport::NOT_SUPPORTED) {
DCHECK(properties->GetPersistentLicenseSessionSupport() ==
EmeSessionTypeSupport::NOT_SUPPORTED);
- DCHECK(properties->GetPersistentReleaseMessageSessionSupport() ==
+ DCHECK(properties->GetPersistentUsageRecordSessionSupport() ==
EmeSessionTypeSupport::NOT_SUPPORTED);
}
- // persistent-release-message sessions are not currently supported.
+ // persistent-usage-record sessions are not currently supported.
// http://crbug.com/448888
- DCHECK(properties->GetPersistentReleaseMessageSessionSupport() ==
+ DCHECK(properties->GetPersistentUsageRecordSessionSupport() ==
EmeSessionTypeSupport::NOT_SUPPORTED);
// If distinctive identifiers are not supported, then no other features can
@@ -424,7 +429,7 @@ void KeySystemsImpl::AddSupportedKeySystems(
EmeFeatureSupport::NOT_SUPPORTED) {
DCHECK(properties->GetPersistentLicenseSessionSupport() !=
EmeSessionTypeSupport::SUPPORTED_WITH_IDENTIFIER);
- DCHECK(properties->GetPersistentReleaseMessageSessionSupport() !=
+ DCHECK(properties->GetPersistentUsageRecordSessionSupport() !=
EmeSessionTypeSupport::SUPPORTED_WITH_IDENTIFIER);
}
@@ -500,7 +505,7 @@ bool KeySystemsImpl::IsSupportedInitDataType(
return key_system_iter->second->IsSupportedInitDataType(init_data_type);
}
-bool KeySystemsImpl::IsEncryptionSchemeSupported(
+EmeConfigRule KeySystemsImpl::GetEncryptionSchemeConfigRule(
const std::string& key_system,
EncryptionMode encryption_scheme) const {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -509,9 +514,9 @@ bool KeySystemsImpl::IsEncryptionSchemeSupported(
key_system_properties_map_.find(key_system);
if (key_system_iter == key_system_properties_map_.end()) {
NOTREACHED();
- return false;
+ return EmeConfigRule::NOT_SUPPORTED;
}
- return key_system_iter->second->IsEncryptionSchemeSupported(
+ return key_system_iter->second->GetEncryptionSchemeConfigRule(
encryption_scheme);
}
@@ -531,18 +536,6 @@ std::string KeySystemsImpl::GetKeySystemNameForUMA(
return kUnknownKeySystemNameForUMA;
}
-bool KeySystemsImpl::UseAesDecryptor(const std::string& key_system) const {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- KeySystemPropertiesMap::const_iterator key_system_iter =
- key_system_properties_map_.find(key_system);
- if (key_system_iter == key_system_properties_map_.end()) {
- DLOG(ERROR) << key_system << " is not a known key system";
- return false;
- }
- return key_system_iter->second->UseAesDecryptor();
-}
-
void KeySystemsImpl::AddCodecMask(EmeMediaType media_type,
const std::string& codec,
uint32_t mask) {
@@ -570,6 +563,18 @@ bool KeySystemsImpl::IsSupportedKeySystem(const std::string& key_system) const {
return true;
}
+bool KeySystemsImpl::CanUseAesDecryptor(const std::string& key_system) const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ KeySystemPropertiesMap::const_iterator key_system_iter =
+ key_system_properties_map_.find(key_system);
+ if (key_system_iter == key_system_properties_map_.end()) {
+ DLOG(ERROR) << key_system << " is not a known key system";
+ return false;
+ }
+ return key_system_iter->second->UseAesDecryptor();
+}
+
EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
const std::string& key_system,
EmeMediaType media_type,
@@ -591,20 +596,20 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
break;
}
- // Look up the key system's supported codecs.
+ // Double check whether the key system is supported.
KeySystemPropertiesMap::const_iterator key_system_iter =
key_system_properties_map_.find(key_system);
if (key_system_iter == key_system_properties_map_.end()) {
- NOTREACHED();
+ NOTREACHED()
+ << "KeySystemConfigSelector should've checked key system support";
return EmeConfigRule::NOT_SUPPORTED;
}
+ // Look up the key system's supported codecs and secure codecs.
SupportedCodecs key_system_codec_mask =
key_system_iter->second->GetSupportedCodecs();
-#if defined(OS_ANDROID)
- SupportedCodecs key_system_secure_codec_mask =
- key_system_iter->second->GetSupportedSecureCodecs();
-#endif // defined(OS_ANDROID)
+ SupportedCodecs key_system_hw_secure_codec_mask =
+ key_system_iter->second->GetSupportedHwSecureCodecs();
// Check that the container is supported by the key system. (This check is
// necessary because |codecs| may be empty.)
@@ -616,7 +621,12 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
return EmeConfigRule::NOT_SUPPORTED;
}
- // Check that the codecs are supported by the key system and container.
+ // Check that the codecs are supported by the key system and container based
+ // on the following rule:
+ // SupportedCodecs | SupportedSecureCodecs | Result
+ // yes | yes | SUPPORTED
+ // yes | no | HW_SECURE_CODECS_NOT_ALLOWED
+ // no | any | NOT_SUPPORTED
EmeConfigRule support = EmeConfigRule::SUPPORTED;
for (size_t i = 0; i < codecs.size(); i++) {
SupportedCodecs codec = GetCodecForString(codecs[i]);
@@ -625,7 +635,7 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
<< codecs[i] << ") not supported by " << key_system;
return EmeConfigRule::NOT_SUPPORTED;
}
-#if defined(OS_ANDROID)
+
// Check whether the codec supports a hardware-secure mode. The goal is to
// prevent mixing of non-hardware-secure codecs with hardware-secure codecs,
// since the mode is fixed at CDM creation.
@@ -634,9 +644,8 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
// to consider codecs that are only supported in hardware-secure mode. We
// could do so, and make use of HW_SECURE_CODECS_REQUIRED, if it turns out
// that hardware-secure-only codecs actually exist and are useful.
- if ((codec & key_system_secure_codec_mask) == 0)
+ if ((codec & key_system_hw_secure_codec_mask) == 0)
support = EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
-#endif // defined(OS_ANDROID)
}
return support;
@@ -671,7 +680,7 @@ EmeSessionTypeSupport KeySystemsImpl::GetPersistentLicenseSessionSupport(
return key_system_iter->second->GetPersistentLicenseSessionSupport();
}
-EmeSessionTypeSupport KeySystemsImpl::GetPersistentReleaseMessageSessionSupport(
+EmeSessionTypeSupport KeySystemsImpl::GetPersistentUsageRecordSessionSupport(
const std::string& key_system) const {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -681,7 +690,7 @@ EmeSessionTypeSupport KeySystemsImpl::GetPersistentReleaseMessageSessionSupport(
NOTREACHED();
return EmeSessionTypeSupport::INVALID;
}
- return key_system_iter->second->GetPersistentReleaseMessageSessionSupport();
+ return key_system_iter->second->GetPersistentUsageRecordSessionSupport();
}
EmeFeatureSupport KeySystemsImpl::GetPersistentStateSupport(
@@ -727,7 +736,7 @@ std::string GetKeySystemNameForUMA(const std::string& key_system) {
}
bool CanUseAesDecryptor(const std::string& key_system) {
- return KeySystemsImpl::GetInstance()->UseAesDecryptor(key_system);
+ return KeySystemsImpl::GetInstance()->CanUseAesDecryptor(key_system);
}
// These two functions are for testing purpose only. The declaration in the
diff --git a/chromium/media/base/key_systems.h b/chromium/media/base/key_systems.h
index a99e45b2c98..8dd05fbe165 100644
--- a/chromium/media/base/key_systems.h
+++ b/chromium/media/base/key_systems.h
@@ -31,13 +31,16 @@ class MEDIA_EXPORT KeySystems {
// Returns whether |key_system| is a supported key system.
virtual bool IsSupportedKeySystem(const std::string& key_system) const = 0;
+ // Returns whether AesDecryptor can be used for the given |key_system|.
+ virtual bool CanUseAesDecryptor(const std::string& key_system) const = 0;
+
// Returns whether |init_data_type| is supported by |key_system|.
virtual bool IsSupportedInitDataType(
const std::string& key_system,
EmeInitDataType init_data_type) const = 0;
- // Returns whether |encryption_scheme| is supported by |key_system|.
- virtual bool IsEncryptionSchemeSupported(
+ // Returns the configuration rule for supporting |encryption_scheme|.
+ virtual EmeConfigRule GetEncryptionSchemeConfigRule(
const std::string& key_system,
EncryptionMode encryption_scheme) const = 0;
@@ -59,9 +62,9 @@ class MEDIA_EXPORT KeySystems {
virtual EmeSessionTypeSupport GetPersistentLicenseSessionSupport(
const std::string& key_system) const = 0;
- // Returns the support |key_system| provides for persistent-release-message
+ // Returns the support |key_system| provides for persistent-usage-record
// sessions.
- virtual EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport(
+ virtual EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport(
const std::string& key_system) const = 0;
// Returns the support |key_system| provides for persistent state.
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index 19aaa277906..dc6f2ff72a4 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -24,11 +24,13 @@
namespace media {
+namespace {
+
// These are the (fake) key systems that are registered for these tests.
// kUsesAes uses the AesDecryptor like Clear Key.
// kExternal uses an external CDM, such as library CDM or Android platform CDM.
-const char kUsesAes[] = "x-org.example.clear";
-const char kExternal[] = "x-com.example.test";
+const char kUsesAes[] = "x-org.example.usesaes";
+const char kExternal[] = "x-com.example.external";
const char kClearKey[] = "org.w3.clearkey";
const char kExternalClearKey[] = "org.chromium.externalclearkey";
@@ -42,13 +44,16 @@ const char kRobustnessSupported[] = "supported";
const char kRobustnessSecureCodecsRequired[] = "secure-codecs-required";
const char kRobustnessNotSupported[] = "not-supported";
-// Pick some arbitrary bit fields as long as they are not in conflict with the
-// real ones.
+// Codecs only supported in FOO container. Pick some arbitrary bit fields as
+// long as they are not in conflict with the real ones (static_asserted below).
+// TODO(crbug.com/724362): Remove container type (FOO) from codec enums.
enum TestCodec : uint32_t {
- TEST_CODEC_FOO_AUDIO = 1 << 29, // An audio codec for foo container.
+ TEST_CODEC_FOO_AUDIO = 1 << 25,
TEST_CODEC_FOO_AUDIO_ALL = TEST_CODEC_FOO_AUDIO,
- TEST_CODEC_FOO_VIDEO = 1 << 30, // A video codec for foo container.
- TEST_CODEC_FOO_VIDEO_ALL = TEST_CODEC_FOO_VIDEO,
+ TEST_CODEC_FOO_VIDEO = 1 << 26,
+ // Only supported by hardware secure codec in kExternal key system.
+ TEST_CODEC_FOO_SECURE_VIDEO = 1 << 27,
+ TEST_CODEC_FOO_VIDEO_ALL = TEST_CODEC_FOO_VIDEO | TEST_CODEC_FOO_SECURE_VIDEO,
TEST_CODEC_FOO_ALL = TEST_CODEC_FOO_AUDIO_ALL | TEST_CODEC_FOO_VIDEO_ALL
};
@@ -62,8 +67,9 @@ class TestKeySystemPropertiesBase : public KeySystemProperties {
return init_data_type == EmeInitDataType::WEBM;
}
+ // Note: TEST_CODEC_FOO_SECURE_VIDEO is not supported by default.
SupportedCodecs GetSupportedCodecs() const override {
- return EME_CODEC_WEBM_ALL | TEST_CODEC_FOO_ALL;
+ return EME_CODEC_WEBM_ALL | TEST_CODEC_FOO_AUDIO | TEST_CODEC_FOO_VIDEO;
}
EmeConfigRule GetRobustnessConfigRule(
@@ -73,7 +79,7 @@ class TestKeySystemPropertiesBase : public KeySystemProperties {
: EmeConfigRule::NOT_SUPPORTED;
}
- EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport()
+ EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport()
const override {
return EmeSessionTypeSupport::NOT_SUPPORTED;
}
@@ -85,10 +91,12 @@ class AesKeySystemProperties : public TestKeySystemPropertiesBase {
std::string GetKeySystemName() const override { return name_; }
- bool IsEncryptionSchemeSupported(
+ EmeConfigRule GetEncryptionSchemeConfigRule(
EncryptionMode encryption_scheme) const override {
- return encryption_scheme == EncryptionMode::kUnencrypted ||
- encryption_scheme == EncryptionMode::kCenc;
+ return (encryption_scheme == EncryptionMode::kUnencrypted ||
+ encryption_scheme == EncryptionMode::kCenc)
+ ? EmeConfigRule::SUPPORTED
+ : EmeConfigRule::NOT_SUPPORTED;
}
EmeSessionTypeSupport GetPersistentLicenseSessionSupport() const override {
@@ -113,17 +121,25 @@ class ExternalKeySystemProperties : public TestKeySystemPropertiesBase {
public:
std::string GetKeySystemName() const override { return kExternal; }
- bool IsEncryptionSchemeSupported(
+ // Pretend clear (unencrypted) and 'cenc' content are always supported. But
+ // 'cbcs' is not supported by hardware secure codecs.
+ EmeConfigRule GetEncryptionSchemeConfigRule(
EncryptionMode encryption_scheme) const override {
- return encryption_scheme != EncryptionMode::kUnencrypted;
+ switch (encryption_scheme) {
+ case media::EncryptionMode::kUnencrypted:
+ case media::EncryptionMode::kCenc:
+ return media::EmeConfigRule::SUPPORTED;
+ case media::EncryptionMode::kCbcs:
+ return media::EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
+ }
+ NOTREACHED();
+ return media::EmeConfigRule::NOT_SUPPORTED;
}
-#if defined(OS_ANDROID)
- // We have hw-secure FOO_VIDEO codec support.
- SupportedCodecs GetSupportedSecureCodecs() const override {
- return TEST_CODEC_FOO_VIDEO;
+ // We have hardware secure codec support for FOO_VIDEO and FOO_SECURE_VIDEO.
+ SupportedCodecs GetSupportedHwSecureCodecs() const override {
+ return TEST_CODEC_FOO_VIDEO | TEST_CODEC_FOO_SECURE_VIDEO;
}
-#endif
EmeConfigRule GetRobustnessConfigRule(
EmeMediaType media_type,
@@ -152,13 +168,15 @@ class ExternalKeySystemProperties : public TestKeySystemPropertiesBase {
}
};
-static bool IsEncryptionSchemeSupported(const std::string& key_system,
- EncryptionMode encryption_scheme) {
- return KeySystems::GetInstance()->IsEncryptionSchemeSupported(
- key_system, encryption_scheme);
+void ExpectEncryptionSchemeConfigRule(const std::string& key_system,
+ EncryptionMode encryption_scheme,
+ EmeConfigRule expected_rule) {
+ EXPECT_EQ(expected_rule,
+ KeySystems::GetInstance()->GetEncryptionSchemeConfigRule(
+ key_system, encryption_scheme));
}
-static EmeConfigRule GetVideoContentTypeConfigRule(
+EmeConfigRule GetVideoContentTypeConfigRule(
const std::string& mime_type,
const std::vector<std::string>& codecs,
const std::string& key_system) {
@@ -168,7 +186,7 @@ static EmeConfigRule GetVideoContentTypeConfigRule(
// Adapt IsSupportedKeySystemWithMediaMimeType() to the new API,
// IsSupportedCodecCombination().
-static bool IsSupportedKeySystemWithMediaMimeType(
+bool IsSupportedKeySystemWithMediaMimeType(
const std::string& mime_type,
const std::vector<std::string>& codecs,
const std::string& key_system) {
@@ -176,7 +194,7 @@ static bool IsSupportedKeySystemWithMediaMimeType(
EmeConfigRule::NOT_SUPPORTED);
}
-static bool IsSupportedKeySystemWithAudioMimeType(
+bool IsSupportedKeySystemWithAudioMimeType(
const std::string& mime_type,
const std::vector<std::string>& codecs,
const std::string& key_system) {
@@ -185,12 +203,11 @@ static bool IsSupportedKeySystemWithAudioMimeType(
EmeConfigRule::NOT_SUPPORTED);
}
-static bool IsSupportedKeySystem(const std::string& key_system) {
+bool IsSupportedKeySystem(const std::string& key_system) {
return KeySystems::GetInstance()->IsSupportedKeySystem(key_system);
}
-static EmeConfigRule GetRobustnessConfigRule(
- const std::string& requested_robustness) {
+EmeConfigRule GetRobustnessConfigRule(const std::string& requested_robustness) {
return KeySystems::GetInstance()->GetRobustnessConfigRule(
kExternal, EmeMediaType::VIDEO, requested_robustness);
}
@@ -203,7 +220,7 @@ static EmeConfigRule GetRobustnessConfigRule(
// systems. In test code, the MediaClient is set by SetMediaClient().
// Therefore, SetMediaClient() must be called before this function to make sure
// MediaClient in effect when constructing KeySystems.
-static void AddContainerAndCodecMasksForTest() {
+void AddContainerAndCodecMasksForTest() {
// Since KeySystems is a singleton. Make sure we only add test container and
// codec masks once per process.
static bool is_test_masks_added = false;
@@ -213,13 +230,15 @@ static void AddContainerAndCodecMasksForTest() {
AddCodecMask(EmeMediaType::AUDIO, "fooaudio", TEST_CODEC_FOO_AUDIO);
AddCodecMask(EmeMediaType::VIDEO, "foovideo", TEST_CODEC_FOO_VIDEO);
+ AddCodecMask(EmeMediaType::VIDEO, "securefoovideo",
+ TEST_CODEC_FOO_SECURE_VIDEO);
AddMimeTypeCodecMask("audio/foo", TEST_CODEC_FOO_AUDIO_ALL);
AddMimeTypeCodecMask("video/foo", TEST_CODEC_FOO_VIDEO_ALL);
is_test_masks_added = true;
}
-static bool CanRunExternalKeySystemTests() {
+bool CanRunExternalKeySystemTests() {
#if defined(OS_ANDROID)
if (HasPlatformDecoderSupport())
return true;
@@ -258,8 +277,7 @@ class TestMediaClient : public MediaClient {
};
TestMediaClient::TestMediaClient()
- : is_update_needed_(true), supports_external_key_system_(true) {
-}
+ : is_update_needed_(true), supports_external_key_system_(true) {}
TestMediaClient::~TestMediaClient() = default;
@@ -299,6 +317,8 @@ void TestMediaClient::DisableExternalKeySystemSupport() {
supports_external_key_system_ = false;
}
+} // namespace
+
class KeySystemsTest : public testing::Test {
protected:
KeySystemsTest() {
@@ -320,8 +340,12 @@ class KeySystemsTest : public testing::Test {
foovideo_codec_.push_back("foovideo");
- foovideo_extended_codec_.push_back("foovideo.4D400C");
+ securefoovideo_codec_.push_back("securefoovideo");
+ // KeySystems only do strict codec string comparison. Extended codecs are
+ // not supported. Note that in production KeySystemConfigSelector will strip
+ // codec extension before calling into KeySystems.
+ foovideo_extended_codec_.push_back("foovideo.4D400C");
foovideo_dot_codec_.push_back("foovideo.");
fooaudio_codec_.push_back("fooaudio");
@@ -337,9 +361,7 @@ class KeySystemsTest : public testing::Test {
SetMediaClient(&test_media_client_);
}
- void SetUp() override {
- AddContainerAndCodecMasksForTest();
- }
+ void SetUp() override { AddContainerAndCodecMasksForTest(); }
~KeySystemsTest() override {
// Clear the use of |test_media_client_|, which was set in SetUp().
@@ -372,6 +394,9 @@ class KeySystemsTest : public testing::Test {
}
const CodecVector& foovideo_codec() const { return foovideo_codec_; }
+ const CodecVector& securefoovideo_codec() const {
+ return securefoovideo_codec_;
+ }
const CodecVector& foovideo_extended_codec() const {
return foovideo_extended_codec_;
}
@@ -396,6 +421,7 @@ class KeySystemsTest : public testing::Test {
CodecVector vp9_and_vorbis_codecs_;
CodecVector foovideo_codec_;
+ CodecVector securefoovideo_codec_;
CodecVector foovideo_extended_codec_;
CodecVector foovideo_dot_codec_;
CodecVector fooaudio_codec_;
@@ -416,8 +442,8 @@ TEST_F(KeySystemsTest, EmptyKeySystem) {
// Clear Key is the only key system registered in content.
TEST_F(KeySystemsTest, ClearKey) {
EXPECT_TRUE(IsSupportedKeySystem(kClearKey));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kClearKey));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(),
+ kClearKey));
EXPECT_EQ("ClearKey", GetKeySystemNameForUMA(kClearKey));
}
@@ -446,8 +472,8 @@ TEST_F(KeySystemsTest, Basic_UnrecognizedKeySystem) {
TEST_F(KeySystemsTest, Basic_UsesAesDecryptor) {
EXPECT_TRUE(IsSupportedKeySystem(kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(), kUsesAes));
// No UMA value for this test key system.
EXPECT_EQ("Unknown", GetKeySystemNameForUMA(kUsesAes));
@@ -458,44 +484,44 @@ TEST_F(KeySystemsTest, Basic_UsesAesDecryptor) {
TEST_F(KeySystemsTest,
IsSupportedKeySystemWithMediaMimeType_UsesAesDecryptor_TypesContainer1) {
// Valid video types.
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp8_codec(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp80_codec(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp9_codec(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp90_codec(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp8_codec(), kUsesAes));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp80_codec(),
+ kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp9_codec(), kUsesAes));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp90_codec(),
+ kUsesAes));
// Audio in a video container.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, vp8_and_vorbis_codecs(), kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, vp9_and_vorbis_codecs(), kUsesAes));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vorbis_codec(), kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vorbis_codec(),
+ kUsesAes));
// Non-Webm codecs.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, foovideo_codec(), kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, unknown_codec(), kUsesAes));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, mixed_codecs(), kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, mixed_codecs(),
+ kUsesAes));
// Valid audio types.
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, no_codecs(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, vorbis_codec(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithAudioMimeType(kAudioWebM, no_codecs(), kUsesAes));
+ EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(kAudioWebM, vorbis_codec(),
+ kUsesAes));
// Non-audio codecs.
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kAudioWebM, vp8_codec(), kUsesAes));
+ EXPECT_FALSE(
+ IsSupportedKeySystemWithMediaMimeType(kAudioWebM, vp8_codec(), kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kAudioWebM, vp8_and_vorbis_codecs(), kUsesAes));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kAudioWebM, vp9_codec(), kUsesAes));
+ EXPECT_FALSE(
+ IsSupportedKeySystemWithMediaMimeType(kAudioWebM, vp9_codec(), kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kAudioWebM, vp9_and_vorbis_codecs(), kUsesAes));
@@ -530,8 +556,8 @@ TEST_F(KeySystemsTest, IsSupportedKeySystem_InvalidVariants) {
}
TEST_F(KeySystemsTest, IsSupportedKeySystemWithMediaMimeType_NoType) {
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- std::string(), no_codecs(), kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(std::string(), no_codecs(),
+ kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(std::string(), no_codecs(),
"x-org.example.foo"));
@@ -544,18 +570,18 @@ TEST_F(KeySystemsTest, IsSupportedKeySystemWithMediaMimeType_NoType) {
TEST_F(KeySystemsTest,
IsSupportedKeySystemWithMediaMimeType_UsesAesDecryptor_TypesContainer2) {
// Valid video types.
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, no_codecs(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, foovideo_codec(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoFoo, no_codecs(), kUsesAes));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, foovideo_codec(),
+ kUsesAes));
// Audio in a video container.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoFoo, foovideo_and_fooaudio_codecs(), kUsesAes));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoFoo, fooaudio_codec(), kUsesAes));
- // Extended codecs fail because this is handled by SimpleWebMimeRegistryImpl.
- // They should really pass canPlayType().
+
+ // Extended codecs.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoFoo, foovideo_extended_codec(), kUsesAes));
@@ -564,18 +590,18 @@ TEST_F(KeySystemsTest,
kVideoFoo, foovideo_dot_codec(), kUsesAes));
// Non-container2 codec.
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, vp8_codec(), kUsesAes));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, unknown_codec(), kUsesAes));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, mixed_codecs(), kUsesAes));
+ EXPECT_FALSE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoFoo, vp8_codec(), kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, unknown_codec(),
+ kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, mixed_codecs(),
+ kUsesAes));
// Valid audio types.
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, no_codecs(), kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, fooaudio_codec(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithAudioMimeType(kAudioFoo, no_codecs(), kUsesAes));
+ EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(kAudioFoo, fooaudio_codec(),
+ kUsesAes));
// Non-audio codecs.
EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
@@ -584,16 +610,18 @@ TEST_F(KeySystemsTest,
kAudioFoo, foovideo_and_fooaudio_codecs(), kUsesAes));
// Non-container2 codec.
- EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, vorbis_codec(), kUsesAes));
+ EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(kAudioFoo, vorbis_codec(),
+ kUsesAes));
}
TEST_F(KeySystemsTest,
IsSupportedKeySystem_UsesAesDecryptor_EncryptionSchemes) {
- EXPECT_TRUE(
- IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kUnencrypted));
- EXPECT_TRUE(IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kCenc));
- EXPECT_FALSE(IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kCbcs));
+ ExpectEncryptionSchemeConfigRule(kUsesAes, EncryptionMode::kUnencrypted,
+ EmeConfigRule::SUPPORTED);
+ ExpectEncryptionSchemeConfigRule(kUsesAes, EncryptionMode::kCenc,
+ EmeConfigRule::SUPPORTED);
+ ExpectEncryptionSchemeConfigRule(kUsesAes, EncryptionMode::kCbcs,
+ EmeConfigRule::NOT_SUPPORTED);
}
//
@@ -605,8 +633,8 @@ TEST_F(KeySystemsTest, Basic_ExternalDecryptor) {
return;
EXPECT_TRUE(IsSupportedKeySystem(kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(),
+ kExternal));
EXPECT_FALSE(CanUseAesDecryptor(kExternal));
}
@@ -618,46 +646,46 @@ TEST_F(
return;
// Valid video types.
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp8_codec(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp80_codec(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp9_codec(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vp90_codec(), kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(),
+ kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp8_codec(),
+ kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp80_codec(),
+ kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp9_codec(),
+ kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vp90_codec(),
+ kExternal));
// Audio in a video container.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, vp8_and_vorbis_codecs(), kExternal));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, vp9_and_vorbis_codecs(), kExternal));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, vorbis_codec(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, vorbis_codec(),
+ kExternal));
// Non-Webm codecs.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, foovideo_codec(), kExternal));
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoWebM, unknown_codec(), kExternal));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, mixed_codecs(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoWebM, mixed_codecs(),
+ kExternal));
// Valid audio types.
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, no_codecs(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, vorbis_codec(), kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(kAudioWebM, no_codecs(),
+ kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(kAudioWebM, vorbis_codec(),
+ kExternal));
// Non-audio codecs.
- EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, vp8_codec(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(kAudioWebM, vp8_codec(),
+ kExternal));
EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
kAudioWebM, vp8_and_vorbis_codecs(), kExternal));
- EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
- kAudioWebM, vp9_codec(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(kAudioWebM, vp9_codec(),
+ kExternal));
EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
kAudioWebM, vp9_and_vorbis_codecs(), kExternal));
@@ -673,10 +701,10 @@ TEST_F(
return;
// Valid video types.
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, no_codecs(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, foovideo_codec(), kExternal));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoFoo, no_codecs(), kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, foovideo_codec(),
+ kExternal));
// Audio in a video container.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
@@ -684,8 +712,7 @@ TEST_F(
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoFoo, fooaudio_codec(), kExternal));
- // Extended codecs fail because this is handled by SimpleWebMimeRegistryImpl.
- // They should really pass canPlayType().
+ // Extended codecs.
EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
kVideoFoo, foovideo_extended_codec(), kExternal));
@@ -694,18 +721,18 @@ TEST_F(
kVideoFoo, foovideo_dot_codec(), kExternal));
// Non-container2 codecs.
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, vp8_codec(), kExternal));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, unknown_codec(), kExternal));
- EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(
- kVideoFoo, mixed_codecs(), kExternal));
+ EXPECT_FALSE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoFoo, vp8_codec(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, unknown_codec(),
+ kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithMediaMimeType(kVideoFoo, mixed_codecs(),
+ kExternal));
// Valid audio types.
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, no_codecs(), kExternal));
- EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, fooaudio_codec(), kExternal));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithAudioMimeType(kAudioFoo, no_codecs(), kExternal));
+ EXPECT_TRUE(IsSupportedKeySystemWithAudioMimeType(kAudioFoo, fooaudio_codec(),
+ kExternal));
// Non-audio codecs.
EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
@@ -714,8 +741,8 @@ TEST_F(
kAudioFoo, foovideo_and_fooaudio_codecs(), kExternal));
// Non-container2 codec.
- EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(
- kAudioFoo, vorbis_codec(), kExternal));
+ EXPECT_FALSE(IsSupportedKeySystemWithAudioMimeType(kAudioFoo, vorbis_codec(),
+ kExternal));
}
TEST_F(KeySystemsTest,
@@ -723,10 +750,12 @@ TEST_F(KeySystemsTest,
if (!CanRunExternalKeySystemTests())
return;
- EXPECT_FALSE(
- IsEncryptionSchemeSupported(kExternal, EncryptionMode::kUnencrypted));
- EXPECT_TRUE(IsEncryptionSchemeSupported(kExternal, EncryptionMode::kCenc));
- EXPECT_TRUE(IsEncryptionSchemeSupported(kExternal, EncryptionMode::kCbcs));
+ ExpectEncryptionSchemeConfigRule(kExternal, EncryptionMode::kUnencrypted,
+ EmeConfigRule::SUPPORTED);
+ ExpectEncryptionSchemeConfigRule(kExternal, EncryptionMode::kCenc,
+ EmeConfigRule::SUPPORTED);
+ ExpectEncryptionSchemeConfigRule(kExternal, EncryptionMode::kCbcs,
+ EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED);
}
TEST_F(KeySystemsTest, KeySystemNameForUMA) {
@@ -741,8 +770,8 @@ TEST_F(KeySystemsTest, KeySystemNameForUMA) {
TEST_F(KeySystemsTest, KeySystemsUpdate) {
EXPECT_TRUE(IsSupportedKeySystem(kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(), kUsesAes));
if (CanRunExternalKeySystemTests()) {
EXPECT_TRUE(IsSupportedKeySystem(kExternal));
@@ -753,8 +782,8 @@ TEST_F(KeySystemsTest, KeySystemsUpdate) {
UpdateClientKeySystems();
EXPECT_TRUE(IsSupportedKeySystem(kUsesAes));
- EXPECT_TRUE(IsSupportedKeySystemWithMediaMimeType(
- kVideoWebM, no_codecs(), kUsesAes));
+ EXPECT_TRUE(
+ IsSupportedKeySystemWithMediaMimeType(kVideoWebM, no_codecs(), kUsesAes));
if (CanRunExternalKeySystemTests())
EXPECT_FALSE(IsSupportedKeySystem(kExternal));
}
@@ -771,7 +800,6 @@ TEST_F(KeySystemsTest, GetContentTypeConfigRule) {
GetRobustnessConfigRule(kRobustnessSecureCodecsRequired));
}
-#if defined(OS_ANDROID)
TEST_F(KeySystemsTest, HardwareSecureCodecs) {
if (!CanRunExternalKeySystemTests())
return;
@@ -781,13 +809,22 @@ TEST_F(KeySystemsTest, HardwareSecureCodecs) {
EXPECT_EQ(
EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED,
GetVideoContentTypeConfigRule(kVideoFoo, foovideo_codec(), kUsesAes));
+ EXPECT_EQ(EmeConfigRule::NOT_SUPPORTED,
+ GetVideoContentTypeConfigRule(kVideoFoo, securefoovideo_codec(),
+ kUsesAes));
EXPECT_EQ(EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED,
GetVideoContentTypeConfigRule(kVideoWebM, vp8_codec(), kExternal));
EXPECT_EQ(
EmeConfigRule::SUPPORTED,
GetVideoContentTypeConfigRule(kVideoFoo, foovideo_codec(), kExternal));
+
+ // Codec that is supported by hardware secure codec but not otherwise is
+ // treated as NOT_SUPPORTED instead of HW_SECURE_CODECS_REQUIRED. See
+ // KeySystemsImpl::GetContentTypeConfigRule() for details.
+ EXPECT_EQ(EmeConfigRule::NOT_SUPPORTED,
+ GetVideoContentTypeConfigRule(kVideoFoo, securefoovideo_codec(),
+ kExternal));
}
-#endif
} // namespace media
diff --git a/chromium/media/base/media_controller.h b/chromium/media/base/media_controller.h
new file mode 100644
index 00000000000..afd1f063cbf
--- /dev/null
+++ b/chromium/media/base/media_controller.h
@@ -0,0 +1,44 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_MEDIA_CONTROLLER_H_
+#define MEDIA_BASE_MEDIA_CONTROLLER_H_
+
+#include "base/time/time.h"
+
+namespace media {
+
+// High level interface that allows a controller to issue simple media commands.
+// Modeled after the media_router.mojom.MediaController interface.
+// State changes will be signaled via the MediaStatusObserver interface.
+// TODO(tguilbert): Add MediaStatusObserver interface.
+class MediaController {
+ public:
+ virtual ~MediaController() = default;
+
+ // Starts playing the media if it is paused. Is a no-op if not supported by
+ // the media or the media is already playing.
+ virtual void Play() = 0;
+
+ // Pauses the media if it is playing. Is a no-op if not supported by the media
+ // or the media is already paused.
+ virtual void Pause() = 0;
+
+ // Mutes the media if |mute| is true, and unmutes it if false. Is a no-op if
+ // not supported by the media.
+ virtual void SetMute(bool mute) = 0;
+
+ // Changes the current volume of the media, with 1 being the highest and 0
+ // being the lowest/no sound. Does not change the (un)muted state of the
+ // media. Is a no-op if not supported by the media.
+ virtual void SetVolume(float volume) = 0;
+
+ // Sets the current playback position. |time| must be less than or equal to
+ // the duration of the media. Is a no-op if the media doesn't support seeking.
+ virtual void Seek(base::TimeDelta time) = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_MEDIA_CONTROLLER_H_
diff --git a/chromium/media/base/media_log.cc b/chromium/media/base/media_log.cc
index 623ba5d0964..548b8c17b7f 100644
--- a/chromium/media/base/media_log.cc
+++ b/chromium/media/base/media_log.cc
@@ -186,7 +186,7 @@ std::unique_ptr<MediaLogEvent> MediaLog::CreateCreatedEvent(
const std::string& origin_url) {
std::unique_ptr<MediaLogEvent> event(
CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_CREATED));
- event->params.SetString("origin_url", origin_url);
+ event->params.SetString("origin_url", TruncateUrlString(origin_url));
return event;
}
@@ -231,7 +231,7 @@ std::unique_ptr<MediaLogEvent> MediaLog::CreateTimeEvent(
std::unique_ptr<MediaLogEvent> MediaLog::CreateLoadEvent(
const std::string& url) {
std::unique_ptr<MediaLogEvent> event(CreateEvent(MediaLogEvent::LOAD));
- event->params.SetString("url", url);
+ event->params.SetString("url", TruncateUrlString(url));
return event;
}
@@ -306,6 +306,19 @@ void MediaLog::SetBooleanProperty(
AddEvent(std::move(event));
}
+// static
+std::string MediaLog::TruncateUrlString(std::string log_string) {
+ if (log_string.length() > kMaxUrlLength) {
+ log_string.resize(kMaxUrlLength);
+
+ // Room for the ellipsis.
+ DCHECK_GE(kMaxUrlLength, std::size_t{3});
+ log_string.replace(log_string.end() - 3, log_string.end(), "...");
+ }
+
+ return log_string;
+}
+
LogHelper::LogHelper(MediaLog::MediaLogLevel level, MediaLog* media_log)
: level_(level), media_log_(media_log) {
DCHECK(media_log_);
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index 084c113dcdd..d60cd1188b7 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -123,6 +123,18 @@ class MEDIA_EXPORT MediaLog {
int32_t id() const { return id_; }
private:
+ friend class MediaLogTest;
+
+ enum : size_t {
+ // Max length of URLs in Created/Load events. Exceeding triggers truncation.
+ kMaxUrlLength = 1000,
+ };
+
+ // URLs (for Created and Load events) may be of arbitrary length from the
+ // untrusted renderer. This method truncates to |kMaxUrlLength| before storing
+ // the event, and sets the last 3 characters to an ellipsis.
+ static std::string TruncateUrlString(std::string log_string);
+
// A unique (to this process) id for this MediaLog.
int32_t id_;
diff --git a/chromium/media/base/media_log_unittest.cc b/chromium/media/base/media_log_unittest.cc
new file mode 100644
index 00000000000..c67842515a5
--- /dev/null
+++ b/chromium/media/base/media_log_unittest.cc
@@ -0,0 +1,79 @@
+// Copyright (c) 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+#include <string>
+
+#include "base/macros.h"
+#include "media/base/media_log.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+// Friend class of MediaLog for access to internal constants.
+class MediaLogTest : public testing::Test {
+ public:
+ static constexpr size_t kMaxUrlLength = MediaLog::kMaxUrlLength;
+};
+
+constexpr size_t MediaLogTest::kMaxUrlLength;
+
+TEST_F(MediaLogTest, DontTruncateShortUrlString) {
+ const std::string short_url("chromium.org");
+ EXPECT_LT(short_url.length(), MediaLogTest::kMaxUrlLength);
+
+ // Verify that CreatedEvent does not truncate the short URL.
+ std::unique_ptr<MediaLogEvent> created_event =
+ MediaLog().CreateCreatedEvent(short_url);
+ std::string stored_url;
+ created_event->params.GetString("origin_url", &stored_url);
+ EXPECT_EQ(stored_url, short_url);
+
+ // Verify that LoadEvent does not truncate the short URL.
+ std::unique_ptr<MediaLogEvent> load_event =
+ MediaLog().CreateLoadEvent(short_url);
+ load_event->params.GetString("url", &stored_url);
+ EXPECT_EQ(stored_url, short_url);
+}
+
+TEST_F(MediaLogTest, TruncateLongUrlStrings) {
+ // Build a long string that exceeds the URL length limit.
+ std::stringstream string_builder;
+ constexpr size_t kLongStringLength = MediaLogTest::kMaxUrlLength + 10;
+ for (size_t i = 0; i < kLongStringLength; i++) {
+ string_builder << "c";
+ }
+ const std::string long_url = string_builder.str();
+ EXPECT_GT(long_url.length(), MediaLogTest::kMaxUrlLength);
+
+ // Verify that long CreatedEvent URL...
+ std::unique_ptr<MediaLogEvent> created_event =
+ MediaLog().CreateCreatedEvent(long_url);
+ std::string stored_url;
+ created_event->params.GetString("origin_url", &stored_url);
+
+ // ... is truncated
+ EXPECT_EQ(stored_url.length(), MediaLogTest::kMaxUrlLength);
+ // ... ends with ellipsis
+ EXPECT_EQ(stored_url.compare(MediaLogTest::kMaxUrlLength - 3, 3, "..."), 0);
+ // ... is otherwise a substring of the longer URL
+ EXPECT_EQ(stored_url.compare(0, MediaLogTest::kMaxUrlLength - 3, long_url, 0,
+ MediaLogTest::kMaxUrlLength - 3),
+ 0);
+
+ // Verify that long LoadEvent URL...
+ std::unique_ptr<MediaLogEvent> load_event =
+ MediaLog().CreateCreatedEvent(long_url);
+ load_event->params.GetString("url", &stored_url);
+ // ... is truncated
+ EXPECT_EQ(stored_url.length(), MediaLogTest::kMaxUrlLength);
+ // ... ends with ellipsis
+ EXPECT_EQ(stored_url.compare(MediaLogTest::kMaxUrlLength - 3, 3, "..."), 0);
+ // ... is otherwise a substring of the longer URL
+ EXPECT_EQ(stored_url.compare(0, MediaLogTest::kMaxUrlLength - 3, long_url, 0,
+ MediaLogTest::kMaxUrlLength - 3),
+ 0);
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/base/media_status.cc b/chromium/media/base/media_status.cc
new file mode 100644
index 00000000000..4640eefe2ad
--- /dev/null
+++ b/chromium/media/base/media_status.cc
@@ -0,0 +1,25 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+#include "media/base/media_status.h"
+
+namespace media {
+
+MediaStatus::MediaStatus() = default;
+
+MediaStatus::MediaStatus(const MediaStatus& other) = default;
+
+MediaStatus::~MediaStatus() = default;
+
+MediaStatus& MediaStatus::operator=(const MediaStatus& other) = default;
+
+bool MediaStatus::operator==(const MediaStatus& other) const {
+ return std::tie(title, can_play_pause, can_mute, can_set_volume, can_seek,
+ play_state, is_muted, volume, duration, current_time) ==
+ std::tie(other.title, other.can_play_pause, other.can_mute,
+ other.can_set_volume, other.can_seek, play_state,
+ other.is_muted, other.volume, other.duration,
+ other.current_time);
+}
+
+} // namespace media
diff --git a/chromium/media/base/media_status.h b/chromium/media/base/media_status.h
new file mode 100644
index 00000000000..5acead588db
--- /dev/null
+++ b/chromium/media/base/media_status.h
@@ -0,0 +1,62 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_MEDIA_STATUS_H_
+#define MEDIA_BASE_MEDIA_STATUS_H_
+
+#include "base/time/time.h"
+
+namespace media {
+
+// Describes the current state of media being controlled via the MediaController
+// interface. This is a copy of the media_router.mojom.MediaStatus interface,
+// without the cast specific portions.
+// TODO(https://crbug.com/820277): Deduplicate media_router::MediaStatus.
+struct MediaStatus {
+ public:
+ enum class PlayState { PLAYING, PAUSED, BUFFERING };
+
+ MediaStatus();
+ MediaStatus(const MediaStatus& other);
+ ~MediaStatus();
+
+ MediaStatus& operator=(const MediaStatus& other);
+ bool operator==(const MediaStatus& other) const;
+
+ // The main title of the media. For example, in a MediaStatus representing
+ // a YouTube Cast session, this could be the title of the video.
+ std::string title;
+
+ // If this is true, the media can be played and paused.
+ bool can_play_pause = false;
+
+ // If this is true, the media can be muted and unmuted.
+ bool can_mute = false;
+
+ // If this is true, the media's volume can be changed.
+ bool can_set_volume = false;
+
+ // If this is true, the media's current playback position can be changed.
+ bool can_seek = false;
+
+ PlayState play_state = PlayState::PLAYING;
+
+ bool is_muted = false;
+
+ // Current volume of the media, with 1 being the highest and 0 being the
+ // lowest/no sound. When |is_muted| is true, there should be no sound
+ // regardless of |volume|.
+ float volume = 0;
+
+ // The length of the media. A value of zero indicates that this is a media
+ // with no set duration (e.g. a live stream).
+ base::TimeDelta duration;
+
+ // Current playback position. Must be less than or equal to |duration|.
+ base::TimeDelta current_time;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_MEDIA_STATUS_H_
diff --git a/chromium/media/base/media_status_observer.h b/chromium/media/base/media_status_observer.h
new file mode 100644
index 00000000000..8431163c842
--- /dev/null
+++ b/chromium/media/base/media_status_observer.h
@@ -0,0 +1,25 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_MEDIA_STATUS_OBSERVER_H_
+#define MEDIA_BASE_MEDIA_STATUS_OBSERVER_H_
+
+#include "media/base/media_status.h"
+
+namespace media {
+
+// Describes the current state of media being controlled via the MediaController
+// interface. This is a copy of the media_router.mojom.MediaStatus interface,
+// without the cast specific portions.
+// TODO(https://crbug.com/820277): Deduplicate media_router::MediaStatus.
+class MediaStatusObserver {
+ public:
+ virtual ~MediaStatusObserver() = default;
+
+ virtual void OnMediaStatusUpdated(const MediaStatus& status) = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_MEDIA_STATUS_OBSERVER_H_
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 8fda8274d5d..5fb8f3c2035 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -117,8 +117,10 @@ const char kUseFileForFakeAudioCapture[] = "use-file-for-fake-audio-capture";
// accelerator hardware to be present.
const char kUseFakeJpegDecodeAccelerator[] = "use-fake-jpeg-decode-accelerator";
-// Enables support for inband text tracks in media content.
-const char kEnableInbandTextTracks[] = "enable-inband-text-tracks";
+// Disable hardware acceleration of mjpeg decode for captured frame, where
+// available.
+const char kDisableAcceleratedMjpegDecode[] =
+ "disable-accelerated-mjpeg-decode";
// When running tests on a system without the required hardware or libraries,
// this flag will cause the tests to fail. Otherwise, they silently succeed.
@@ -142,10 +144,10 @@ const char kDisableRTCSmoothnessAlgorithm[] =
// Force media player using SurfaceView instead of SurfaceTexture on Android.
const char kForceVideoOverlays[] = "force-video-overlays";
-// Allows explicitly specifying MSE audio/video buffer sizes.
+// Allows explicitly specifying MSE audio/video buffer sizes as megabytes.
// Default values are 150M for video and 12M for audio.
-const char kMSEAudioBufferSizeLimit[] = "mse-audio-buffer-size-limit";
-const char kMSEVideoBufferSizeLimit[] = "mse-video-buffer-size-limit";
+const char kMSEAudioBufferSizeLimitMb[] = "mse-audio-buffer-size-limit-mb";
+const char kMSEVideoBufferSizeLimitMb[] = "mse-video-buffer-size-limit-mb";
// Specifies the path to the Clear Key CDM for testing, which is necessary to
// support External Clear Key key system when library CDM is enabled. Note that
@@ -167,6 +169,18 @@ const char kClearKeyCdmPathForTesting[] = "clear-key-cdm-path-for-testing";
const char kOverrideEnabledCdmInterfaceVersion[] =
"override-enabled-cdm-interface-version";
+// Overrides hardware secure codecs support for testing. If specified, real
+// platform hardware secure codecs check will be skipped. Codecs are separated
+// by comma. Valid codecs are "vp8", "vp9" and "avc1". For example:
+// --override-hardware-secure-codecs-for-testing=vp8,vp9
+// --override-hardware-secure-codecs-for-testing=avc1
+// CENC encryption scheme is assumed to be supported for the specified codecs.
+// If no valid codecs specified, no hardware secure codecs are supported. This
+// can be used to disable hardware secure codecs support:
+// --override-hardware-secure-codecs-for-testing
+const char kOverrideHardwareSecureCodecsForTesting[] =
+ "override-hardware-secure-codecs-for-testing";
+
#if !defined(OS_ANDROID)
// Turns on the internal media session backend. This should be used by embedders
// that want to control the media playback with the media session interfaces.
@@ -205,8 +219,14 @@ const base::Feature kOverlayFullscreenVideo{"overlay-fullscreen-video",
base::FEATURE_ENABLED_BY_DEFAULT};
// Enable Picture-in-Picture.
-const base::Feature kPictureInPicture{"PictureInPicture",
- base::FEATURE_DISABLED_BY_DEFAULT};
+const base::Feature kPictureInPicture {
+ "PictureInPicture",
+#if defined(OS_ANDROID)
+ base::FEATURE_DISABLED_BY_DEFAULT
+#else
+ base::FEATURE_ENABLED_BY_DEFAULT
+#endif
+};
const base::Feature kPreloadMetadataSuspend{"PreloadMetadataSuspend",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -237,7 +257,7 @@ const base::Feature kUseAndroidOverlayAggressively{
// Enables playback of AV1 video files.
const base::Feature kAv1Decoder{"Av1Decoder",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
// Let video track be unselected when video is playing in the background.
const base::Feature kBackgroundVideoTrackOptimization{
@@ -253,9 +273,16 @@ const base::Feature kBackgroundVideoPauseOptimization{
const base::Feature kMemoryPressureBasedSourceBufferGC{
"MemoryPressureBasedSourceBufferGC", base::FEATURE_DISABLED_BY_DEFAULT};
-// Enable MojoVideoDecoder. Experimental.
-const base::Feature kMojoVideoDecoder{"MojoVideoDecoder",
- base::FEATURE_DISABLED_BY_DEFAULT};
+// Enable MojoVideoDecoder. On Android, we use this by default. Elsewhere,
+// it's experimental.
+const base::Feature kMojoVideoDecoder {
+ "MojoVideoDecoder",
+#if defined(OS_ANDROID)
+ base::FEATURE_ENABLED_BY_DEFAULT
+#else
+ base::FEATURE_DISABLED_BY_DEFAULT
+#endif
+};
// Enable The D3D11 Video decoder. Must also enable MojoVideoDecoder for
// this to have any effect.
@@ -312,6 +339,13 @@ const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
const base::Feature kExternalClearKeyForTesting{
"ExternalClearKeyForTesting", base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables hardware secure decryption if supported by hardware and CDM.
+// TODO(xhwang): Currently this is only used for development of new features.
+// Apply this to Android and ChromeOS as well where hardware secure decryption
+// is already available.
+const base::Feature kHardwareSecureDecryption{
+ "HardwareSecureDecryption", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables low-delay video rendering in media pipeline on "live" stream.
const base::Feature kLowDelayVideoRenderingOnLiveStream{
"low-delay-video-rendering-on-live-stream",
@@ -324,6 +358,10 @@ const base::Feature kAutoplayIgnoreWebAudio{"AutoplayIgnoreWebAudio",
base::FEATURE_ENABLED_BY_DEFAULT};
#if defined(OS_ANDROID)
+// Enable a gesture to make the media controls expaned into the display cutout.
+const base::Feature kMediaControlsExpandGesture{
+ "MediaControlsExpandGesture", base::FEATURE_ENABLED_BY_DEFAULT};
+
// Lock the screen orientation when a video goes fullscreen.
const base::Feature kVideoFullscreenOrientationLock{
"VideoFullscreenOrientationLock", base::FEATURE_ENABLED_BY_DEFAULT};
@@ -423,4 +461,19 @@ const base::Feature kPreloadMediaEngagementData{
"PreloadMediaEngagementData", base::FEATURE_ENABLED_BY_DEFAULT};
#endif
+bool IsVideoCaptureAcceleratedJpegDecodingEnabled() {
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kDisableAcceleratedMjpegDecode)) {
+ return false;
+ }
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kUseFakeJpegDecodeAccelerator)) {
+ return true;
+ }
+#if defined(OS_CHROMEOS)
+ return true;
+#endif
+ return false;
+}
+
} // namespace media
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 257c4892c9d..ac585b639af 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -66,8 +66,7 @@ MEDIA_EXPORT extern const char kUseFakeDeviceForMediaStream[];
MEDIA_EXPORT extern const char kUseFileForFakeVideoCapture[];
MEDIA_EXPORT extern const char kUseFileForFakeAudioCapture[];
MEDIA_EXPORT extern const char kUseFakeJpegDecodeAccelerator[];
-
-MEDIA_EXPORT extern const char kEnableInbandTextTracks[];
+MEDIA_EXPORT extern const char kDisableAcceleratedMjpegDecode[];
MEDIA_EXPORT extern const char kRequireAudioHardwareForTesting[];
MEDIA_EXPORT extern const char kMuteAudio[];
@@ -78,11 +77,12 @@ MEDIA_EXPORT extern const char kDisableRTCSmoothnessAlgorithm[];
MEDIA_EXPORT extern const char kForceVideoOverlays[];
-MEDIA_EXPORT extern const char kMSEAudioBufferSizeLimit[];
-MEDIA_EXPORT extern const char kMSEVideoBufferSizeLimit[];
+MEDIA_EXPORT extern const char kMSEAudioBufferSizeLimitMb[];
+MEDIA_EXPORT extern const char kMSEVideoBufferSizeLimitMb[];
MEDIA_EXPORT extern const char kClearKeyCdmPathForTesting[];
MEDIA_EXPORT extern const char kOverrideEnabledCdmInterfaceVersion[];
+MEDIA_EXPORT extern const char kOverrideHardwareSecureCodecsForTesting[];
#if !defined(OS_ANDROID)
MEDIA_EXPORT extern const char kEnableInternalMediaSession[];
@@ -110,6 +110,7 @@ MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoTrackOptimization;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
+MEDIA_EXPORT extern const base::Feature kHardwareSecureDecryption;
MEDIA_EXPORT extern const base::Feature kLowDelayVideoRenderingOnLiveStream;
MEDIA_EXPORT extern const base::Feature kMediaCastOverlayButton;
MEDIA_EXPORT extern const base::Feature kRecordMediaEngagementScores;
@@ -138,6 +139,7 @@ MEDIA_EXPORT extern const base::Feature kUseSurfaceLayerForVideo;
MEDIA_EXPORT extern const base::Feature kUseModernMediaControls;
#if defined(OS_ANDROID)
+MEDIA_EXPORT extern const base::Feature kMediaControlsExpandGesture;
MEDIA_EXPORT extern const base::Feature kVideoFullscreenOrientationLock;
MEDIA_EXPORT extern const base::Feature kVideoRotateToFullscreen;
MEDIA_EXPORT extern const base::Feature kMediaDrmPersistentLicense;
@@ -164,6 +166,8 @@ MEDIA_EXPORT std::string GetEffectiveAutoplayPolicy(
// audio focus duck flash should be enabled.
MEDIA_EXPORT bool IsAudioFocusDuckFlashEnabled();
+MEDIA_EXPORT bool IsVideoCaptureAcceleratedJpegDecodingEnabled();
+
} // namespace media
#endif // MEDIA_BASE_MEDIA_SWITCHES_H_
diff --git a/chromium/media/base/media_url_demuxer.cc b/chromium/media/base/media_url_demuxer.cc
index d83213553cb..953b6047a5d 100644
--- a/chromium/media/base/media_url_demuxer.cc
+++ b/chromium/media/base/media_url_demuxer.cc
@@ -36,8 +36,7 @@ std::string MediaUrlDemuxer::GetDisplayName() const {
}
void MediaUrlDemuxer::Initialize(DemuxerHost* host,
- const PipelineStatusCB& status_cb,
- bool enable_text_tracks) {
+ const PipelineStatusCB& status_cb) {
DVLOG(1) << __func__;
task_runner_->PostTask(FROM_HERE, base::Bind(status_cb, PIPELINE_OK));
}
diff --git a/chromium/media/base/media_url_demuxer.h b/chromium/media/base/media_url_demuxer.h
index 38527f2d5b4..33f1d8608a3 100644
--- a/chromium/media/base/media_url_demuxer.h
+++ b/chromium/media/base/media_url_demuxer.h
@@ -46,8 +46,7 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
// Demuxer interface.
std::string GetDisplayName() const override;
void Initialize(DemuxerHost* host,
- const PipelineStatusCB& status_cb,
- bool enable_text_tracks) override;
+ const PipelineStatusCB& status_cb) override;
void StartWaitingForSeek(base::TimeDelta seek_time) override;
void CancelPendingSeek(base::TimeDelta seek_time) override;
void Seek(base::TimeDelta time, const PipelineStatusCB& status_cb) override;
diff --git a/chromium/media/base/media_url_demuxer_unittest.cc b/chromium/media/base/media_url_demuxer_unittest.cc
index 3ca820793d7..ccbc43a0f0d 100644
--- a/chromium/media/base/media_url_demuxer_unittest.cc
+++ b/chromium/media/base/media_url_demuxer_unittest.cc
@@ -67,8 +67,7 @@ TEST_F(MediaUrlDemuxerTest, InitializeReturnsPipelineOk) {
InitializeTest();
demuxer_->Initialize(nullptr,
base::Bind(&MediaUrlDemuxerTest::VerifyCallbackOk,
- base::Unretained(this)),
- false);
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index 22a0657b387..92e201b4950 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -74,11 +74,6 @@ const base::flat_map<std::string, MimeUtil::Codec>& GetStringToCodecMap() {
{"vorbis", MimeUtil::VORBIS}, {"opus", MimeUtil::OPUS},
{"flac", MimeUtil::FLAC}, {"vp8", MimeUtil::VP8},
{"vp8.0", MimeUtil::VP8}, {"theora", MimeUtil::THEORA},
-// TODO(dalecurtis): This is not the correct final string. Fix before enabling
-// by default. http://crbug.com/784607
-#if BUILDFLAG(ENABLE_AV1_DECODER)
- {"av1", MimeUtil::AV1},
-#endif
},
base::KEEP_FIRST_OF_DUPES);
@@ -780,16 +775,6 @@ bool MimeUtil::ParseCodecHelper(const std::string& mime_type_lower_case,
case Codec::THEORA:
out_result->video_profile = THEORAPROFILE_ANY;
break;
- case Codec::AV1: {
-#if BUILDFLAG(ENABLE_AV1_DECODER)
- if (base::FeatureList::IsEnabled(kAv1Decoder)) {
- out_result->video_profile = AV1PROFILE_PROFILE0;
- break;
- }
-#endif
- return false;
- }
-
default:
NOTREACHED();
}
@@ -831,6 +816,14 @@ bool MimeUtil::ParseCodecHelper(const std::string& mime_type_lower_case,
return true;
}
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+ if (base::FeatureList::IsEnabled(kAv1Decoder) &&
+ ParseAv1CodecId(codec_id, out_profile, out_level, out_color_space)) {
+ out_result->codec = MimeUtil::AV1;
+ return true;
+ }
+#endif
+
if (ParseAVCCodecId(codec_id, out_profile, out_level)) {
out_result->codec = MimeUtil::H264;
// Allowed string ambiguity since 2014. DO NOT ADD NEW CASES FOR AMBIGUITY.
diff --git a/chromium/media/base/mock_demuxer_host.h b/chromium/media/base/mock_demuxer_host.h
index 55329b78dc3..8cd4894c1fe 100644
--- a/chromium/media/base/mock_demuxer_host.h
+++ b/chromium/media/base/mock_demuxer_host.h
@@ -21,9 +21,6 @@ class MockDemuxerHost : public DemuxerHost {
void(const Ranges<base::TimeDelta>&));
MOCK_METHOD1(SetDuration, void(base::TimeDelta duration));
MOCK_METHOD1(OnDemuxerError, void(PipelineStatus error));
- MOCK_METHOD2(AddTextStream, void(DemuxerStream*,
- const TextTrackConfig&));
- MOCK_METHOD1(RemoveTextStream, void(DemuxerStream*));
private:
DISALLOW_COPY_AND_ASSIGN(MockDemuxerHost);
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 41f90d69b2e..a21f29f2c56 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -134,8 +134,7 @@ class MockDemuxer : public Demuxer {
// Demuxer implementation.
std::string GetDisplayName() const override;
- MOCK_METHOD3(Initialize,
- void(DemuxerHost* host, const PipelineStatusCB& cb, bool));
+ MOCK_METHOD2(Initialize, void(DemuxerHost* host, const PipelineStatusCB& cb));
MOCK_METHOD1(StartWaitingForSeek, void(base::TimeDelta));
MOCK_METHOD1(CancelPendingSeek, void(base::TimeDelta));
MOCK_METHOD2(Seek, void(base::TimeDelta time, const PipelineStatusCB& cb));
@@ -579,7 +578,7 @@ class MockStreamParser : public StreamParser {
// StreamParser interface
MOCK_METHOD8(
Init,
- void(const InitCB& init_cb,
+ void(InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_track,
@@ -588,6 +587,7 @@ class MockStreamParser : public StreamParser {
const EndMediaSegmentCB& end_of_segment_cb,
MediaLog* media_log));
MOCK_METHOD0(Flush, void());
+ MOCK_CONST_METHOD0(GetGenerateTimestampsFlag, bool());
MOCK_METHOD2(Parse, bool(const uint8_t*, int));
private:
diff --git a/chromium/media/base/overlay_info.cc b/chromium/media/base/overlay_info.cc
index 8704658814e..2f67aae8e3a 100644
--- a/chromium/media/base/overlay_info.cc
+++ b/chromium/media/base/overlay_info.cc
@@ -3,23 +3,18 @@
// found in the LICENSE file.
#include "media/base/overlay_info.h"
-#include "media/base/surface_manager.h"
namespace media {
OverlayInfo::OverlayInfo() = default;
OverlayInfo::OverlayInfo(const OverlayInfo&) = default;
-bool OverlayInfo::HasValidSurfaceId() const {
- return surface_id != SurfaceManager::kNoSurfaceID;
-}
-
bool OverlayInfo::HasValidRoutingToken() const {
return routing_token.has_value();
}
bool OverlayInfo::RefersToSameOverlayAs(const OverlayInfo& other) {
- return surface_id == other.surface_id && routing_token == other.routing_token;
+ return routing_token == other.routing_token;
}
} // namespace media
diff --git a/chromium/media/base/overlay_info.h b/chromium/media/base/overlay_info.h
index cc788be4b56..e4af30f153b 100644
--- a/chromium/media/base/overlay_info.h
+++ b/chromium/media/base/overlay_info.h
@@ -10,7 +10,6 @@
#include "base/optional.h"
#include "base/unguessable_token.h"
#include "media/base/media_export.h"
-#include "media/base/surface_manager.h"
namespace media {
@@ -25,17 +24,12 @@ struct MEDIA_EXPORT OverlayInfo {
// Convenience functions to return true if and only if this specifies a
// surface ID / routing token that is not kNoSurfaceID / empty. I.e., if we
// provide enough info to create an overlay.
- bool HasValidSurfaceId() const;
bool HasValidRoutingToken() const;
// Whether |other| refers to the same (surface_id, routing_token) pair as
// |this|.
bool RefersToSameOverlayAs(const OverlayInfo& other);
- // This is the SurfaceManager surface id, or SurfaceManager::kNoSurfaceID to
- // indicate that no surface from SurfaceManager should be used.
- int surface_id = SurfaceManager::kNoSurfaceID;
-
// The routing token for AndroidOverlay, if any.
RoutingToken routing_token;
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index 5776cc4abe0..3f63c4068fc 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -55,7 +55,6 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
void Start(StartType start_type,
Demuxer* demuxer,
std::unique_ptr<Renderer> renderer,
- std::unique_ptr<TextRenderer> text_renderer,
base::WeakPtr<PipelineImpl> weak_pipeline);
void Stop(const base::Closure& stop_cb);
void Seek(base::TimeDelta time);
@@ -122,9 +121,6 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
void OnBufferedTimeRangesChanged(const Ranges<base::TimeDelta>& ranges) final;
void SetDuration(base::TimeDelta duration) final;
void OnDemuxerError(PipelineStatus error) final;
- void AddTextStream(DemuxerStream* text_stream,
- const TextTrackConfig& config) final;
- void RemoveTextStream(DemuxerStream* text_stream) final;
// RendererClient implementation.
void OnError(PipelineStatus error) final;
@@ -138,12 +134,6 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
void OnVideoOpacityChange(bool opaque) final;
void OnDurationChange(base::TimeDelta duration) final;
- // TextRenderer tasks and notifications.
- void OnTextRendererEnded();
- void AddTextStreamTask(DemuxerStream* text_stream,
- const TextTrackConfig& config);
- void RemoveTextStreamTask(DemuxerStream* text_stream);
-
// Common handlers for notifications from renderers and demuxer.
void OnPipelineError(PipelineStatus error);
void OnCdmAttached(const CdmAttachedCB& cdm_attached_cb,
@@ -166,7 +156,6 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
base::WeakPtr<PipelineImpl> weak_pipeline_;
Demuxer* demuxer_;
- std::unique_ptr<TextRenderer> text_renderer_;
double playback_rate_;
float volume_;
CdmContext* cdm_context_;
@@ -232,7 +221,6 @@ void PipelineImpl::RendererWrapper::Start(
StartType start_type,
Demuxer* demuxer,
std::unique_ptr<Renderer> renderer,
- std::unique_ptr<TextRenderer> text_renderer,
base::WeakPtr<PipelineImpl> weak_pipeline) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(state_ == kCreated || state_ == kStopped)
@@ -245,7 +233,6 @@ void PipelineImpl::RendererWrapper::Start(
DCHECK(!demuxer_);
DCHECK(!shared_state_.renderer);
- DCHECK(!text_renderer_);
DCHECK(!renderer_ended_);
DCHECK(!text_renderer_ended_);
demuxer_ = demuxer;
@@ -253,11 +240,6 @@ void PipelineImpl::RendererWrapper::Start(
base::AutoLock auto_lock(shared_state_lock_);
shared_state_.renderer = std::move(renderer);
}
- text_renderer_ = std::move(text_renderer);
- if (text_renderer_) {
- text_renderer_->Initialize(
- base::Bind(&RendererWrapper::OnTextRendererEnded, weak_this_));
- }
weak_pipeline_ = weak_pipeline;
// Queue asynchronous actions required to start.
@@ -300,7 +282,6 @@ void PipelineImpl::RendererWrapper::Stop(const base::Closure& stop_cb) {
pending_callbacks_.reset();
DestroyRenderer();
- text_renderer_.reset();
if (demuxer_) {
demuxer_->Stop();
@@ -346,22 +327,12 @@ void PipelineImpl::RendererWrapper::Seek(base::TimeDelta time) {
// Abort any reads the renderer may be blocked on.
demuxer_->AbortPendingReads();
- // Pause.
- if (text_renderer_) {
- bound_fns.Push(base::Bind(&TextRenderer::Pause,
- base::Unretained(text_renderer_.get())));
- }
// Flush.
DCHECK(shared_state_.renderer);
bound_fns.Push(base::Bind(&Renderer::Flush,
base::Unretained(shared_state_.renderer.get())));
- if (text_renderer_) {
- bound_fns.Push(base::Bind(&TextRenderer::Flush,
- base::Unretained(text_renderer_.get())));
- }
-
// Seek demuxer.
bound_fns.Push(
base::Bind(&Demuxer::Seek, base::Unretained(demuxer_), seek_timestamp));
@@ -398,11 +369,6 @@ void PipelineImpl::RendererWrapper::Suspend() {
// Queue the asynchronous actions required to stop playback.
SerialRunner::Queue fns;
- if (text_renderer_) {
- fns.Push(base::Bind(&TextRenderer::Pause,
- base::Unretained(text_renderer_.get())));
- }
-
// No need to flush the renderer since it's going to be destroyed.
pending_callbacks_ = SerialRunner::Run(
fns, base::Bind(&RendererWrapper::CompleteSuspend, weak_this_));
@@ -547,25 +513,6 @@ void PipelineImpl::RendererWrapper::OnDemuxerError(PipelineStatus error) {
base::Bind(&RendererWrapper::OnPipelineError, weak_this_, error));
}
-void PipelineImpl::RendererWrapper::AddTextStream(
- DemuxerStream* text_stream,
- const TextTrackConfig& config) {
- // TODO(alokp): Add thread DCHECK after ensuring that all Demuxer
- // implementations call DemuxerHost on the media thread.
- media_task_runner_->PostTask(
- FROM_HERE, base::Bind(&RendererWrapper::AddTextStreamTask, weak_this_,
- text_stream, config));
-}
-
-void PipelineImpl::RendererWrapper::RemoveTextStream(
- DemuxerStream* text_stream) {
- // TODO(alokp): Add thread DCHECK after ensuring that all Demuxer
- // implementations call DemuxerHost on the media thread.
- media_task_runner_->PostTask(
- FROM_HERE, base::Bind(&RendererWrapper::RemoveTextStreamTask, weak_this_,
- text_stream));
-}
-
void PipelineImpl::RendererWrapper::OnError(PipelineStatus error) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
@@ -810,37 +757,6 @@ void PipelineImpl::RendererWrapper::OnDurationChange(base::TimeDelta duration) {
SetDuration(duration);
}
-void PipelineImpl::RendererWrapper::OnTextRendererEnded() {
- DCHECK(media_task_runner_->BelongsToCurrentThread());
- media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::TEXT_ENDED));
-
- if (state_ != kPlaying)
- return;
-
- DCHECK(!text_renderer_ended_);
- text_renderer_ended_ = true;
- CheckPlaybackEnded();
-}
-
-void PipelineImpl::RendererWrapper::AddTextStreamTask(
- DemuxerStream* text_stream,
- const TextTrackConfig& config) {
- DCHECK(media_task_runner_->BelongsToCurrentThread());
-
- // TODO(matthewjheaney): fix up text_ended_ when text stream
- // is added (http://crbug.com/321446).
- if (text_renderer_)
- text_renderer_->AddTextStream(text_stream, config);
-}
-
-void PipelineImpl::RendererWrapper::RemoveTextStreamTask(
- DemuxerStream* text_stream) {
- DCHECK(media_task_runner_->BelongsToCurrentThread());
-
- if (text_renderer_)
- text_renderer_->RemoveTextStream(text_stream);
-}
-
void PipelineImpl::RendererWrapper::OnPipelineError(PipelineStatus error) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK_NE(PIPELINE_OK, error) << "PIPELINE_OK isn't an error!";
@@ -882,9 +798,6 @@ void PipelineImpl::RendererWrapper::CheckPlaybackEnded() {
if (shared_state_.renderer && !renderer_ended_)
return;
- if (text_renderer_ && text_renderer_->HasTracks() && !text_renderer_ended_)
- return;
-
DCHECK_EQ(status_, PIPELINE_OK);
main_task_runner_->PostTask(
FROM_HERE, base::Bind(&PipelineImpl::OnEnded, weak_pipeline_));
@@ -924,9 +837,6 @@ void PipelineImpl::RendererWrapper::CompleteSeek(base::TimeDelta seek_time,
shared_state_.suspend_timestamp = kNoTimestamp;
}
- if (text_renderer_)
- text_renderer_->StartPlaying();
-
shared_state_.renderer->SetPlaybackRate(playback_rate_);
shared_state_.renderer->SetVolume(volume_);
@@ -967,7 +877,7 @@ void PipelineImpl::RendererWrapper::InitializeDemuxer(
const PipelineStatusCB& done_cb) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
- demuxer_->Initialize(this, done_cb, !!text_renderer_);
+ demuxer_->Initialize(this, done_cb);
}
void PipelineImpl::RendererWrapper::InitializeRenderer(
@@ -1114,21 +1024,11 @@ void PipelineImpl::Start(StartType start_type,
last_media_time_ = base::TimeDelta();
seek_time_ = kNoTimestamp;
- std::unique_ptr<TextRenderer> text_renderer;
- if (base::CommandLine::ForCurrentProcess()->HasSwitch(
- switches::kEnableInbandTextTracks)) {
- text_renderer.reset(new TextRenderer(
- media_task_runner_,
- BindToCurrentLoop(base::Bind(&PipelineImpl::OnAddTextTrack,
- weak_factory_.GetWeakPtr()))));
- }
-
media_task_runner_->PostTask(
FROM_HERE,
base::Bind(&RendererWrapper::Start,
base::Unretained(renderer_wrapper_.get()), start_type, demuxer,
- base::Passed(&renderer), base::Passed(&text_renderer),
- weak_factory_.GetWeakPtr()));
+ base::Passed(&renderer), weak_factory_.GetWeakPtr()));
}
void PipelineImpl::Stop() {
@@ -1427,16 +1327,6 @@ void PipelineImpl::OnDurationChange(base::TimeDelta duration) {
client_->OnDurationChange();
}
-void PipelineImpl::OnAddTextTrack(const TextTrackConfig& config,
- const AddTextTrackDoneCB& done_cb) {
- DVLOG(2) << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(IsRunning());
-
- DCHECK(client_);
- client_->OnAddTextTrack(config, done_cb);
-}
-
void PipelineImpl::OnWaitingForDecryptionKey() {
DVLOG(2) << __func__;
DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/chromium/media/base/pipeline_impl.h b/chromium/media/base/pipeline_impl.h
index 543f768b7b9..c89b88aa7f7 100644
--- a/chromium/media/base/pipeline_impl.h
+++ b/chromium/media/base/pipeline_impl.h
@@ -138,8 +138,6 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
void OnMetadata(PipelineMetadata metadata);
void OnBufferingStateChange(BufferingState state);
void OnDurationChange(base::TimeDelta duration);
- void OnAddTextTrack(const TextTrackConfig& config,
- const AddTextTrackDoneCB& done_cb);
void OnWaitingForDecryptionKey();
void OnAudioConfigChange(const AudioDecoderConfig& config);
void OnVideoConfigChange(const VideoDecoderConfig& config);
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index 07229dbfd81..6f96efa7e64 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -127,11 +127,6 @@ class PipelineImplTest : public ::testing::Test {
if (pipeline_->IsRunning()) {
ExpectDemuxerStop();
- // The mock demuxer doesn't stop the fake text track stream,
- // so just stop it manually.
- if (text_stream_)
- text_stream_->Stop();
-
pipeline_->Stop();
}
@@ -147,7 +142,7 @@ class PipelineImplTest : public ::testing::Test {
void SetDemuxerExpectations(MockDemuxerStreamVector* streams,
const base::TimeDelta& duration) {
EXPECT_CALL(callbacks_, OnDurationChange());
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(DoAll(SaveArg<0>(&demuxer_host_),
SetDemuxerProperties(duration),
PostCallback<1>(PIPELINE_OK)));
@@ -181,14 +176,6 @@ class PipelineImplTest : public ::testing::Test {
DoAll(SaveArg<1>(&renderer_client_), PostCallback<2>(PIPELINE_OK)));
}
- void AddTextStream() {
- EXPECT_CALL(callbacks_, OnAddTextTrack(_, _))
- .WillOnce(Invoke(this, &PipelineImplTest::DoOnAddTextTrack));
- demuxer_host_->AddTextStream(text_stream(),
- TextTrackConfig(kTextSubtitles, "", "", ""));
- base::RunLoop().RunUntilIdle();
- }
-
void StartPipeline(
Pipeline::StartType start_type = Pipeline::StartType::kNormal) {
EXPECT_CALL(callbacks_, OnWaitingForDecryptionKey()).Times(0);
@@ -242,18 +229,10 @@ class PipelineImplTest : public ::testing::Test {
video_stream_->set_video_decoder_config(video_decoder_config_);
}
- void CreateTextStream() {
- std::unique_ptr<FakeTextTrackStream> text_stream(new FakeTextTrackStream());
- EXPECT_CALL(*text_stream, OnRead()).Times(AnyNumber());
- text_stream_ = std::move(text_stream);
- }
-
MockDemuxerStream* audio_stream() { return audio_stream_.get(); }
MockDemuxerStream* video_stream() { return video_stream_.get(); }
- FakeTextTrackStream* text_stream() { return text_stream_.get(); }
-
void ExpectSeek(const base::TimeDelta& seek_time, bool underflowed) {
EXPECT_CALL(*demuxer_, AbortPendingReads());
EXPECT_CALL(*demuxer_, Seek(seek_time, _))
@@ -323,12 +302,6 @@ class PipelineImplTest : public ::testing::Test {
EXPECT_CALL(*demuxer_, Stop());
}
- void DoOnAddTextTrack(const TextTrackConfig& config,
- const AddTextTrackDoneCB& done_cb) {
- std::unique_ptr<TextTrack> text_track(new MockTextTrack);
- done_cb.Run(std::move(text_track));
- }
-
void RunBufferedTimeRangesTest(const base::TimeDelta duration) {
EXPECT_EQ(0u, pipeline_->GetBufferedTimeRanges().size());
EXPECT_FALSE(pipeline_->DidLoadingProgress());
@@ -356,11 +329,8 @@ class PipelineImplTest : public ::testing::Test {
DemuxerHost* demuxer_host_;
std::unique_ptr<StrictMock<MockRenderer>> scoped_renderer_;
StrictMock<MockRenderer>* renderer_;
- StrictMock<CallbackHelper> text_renderer_callbacks_;
- TextRenderer* text_renderer_;
std::unique_ptr<StrictMock<MockDemuxerStream>> audio_stream_;
std::unique_ptr<StrictMock<MockDemuxerStream>> video_stream_;
- std::unique_ptr<FakeTextTrackStream> text_stream_;
RendererClient* renderer_client_;
VideoDecoderConfig video_decoder_config_;
PipelineMetadata metadata_;
@@ -398,7 +368,7 @@ TEST_F(PipelineImplTest, NotStarted) {
TEST_F(PipelineImplTest, NeverInitializes) {
// Don't execute the callback passed into Initialize().
- EXPECT_CALL(*demuxer_, Initialize(_, _, _));
+ EXPECT_CALL(*demuxer_, Initialize(_, _));
// This test hangs during initialization by never calling
// InitializationComplete(). StrictMock<> will ensure that the callback is
@@ -418,7 +388,7 @@ TEST_F(PipelineImplTest, StopWithoutStart) {
}
TEST_F(PipelineImplTest, StartThenStopImmediately) {
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(PostCallback<1>(PIPELINE_OK));
EXPECT_CALL(*demuxer_, Stop());
EXPECT_CALL(callbacks_, OnMetadata(_));
@@ -499,7 +469,7 @@ TEST_F(PipelineImplTest, DemuxerErrorDuringStop) {
}
TEST_F(PipelineImplTest, NoStreams) {
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(PostCallback<1>(PIPELINE_OK));
EXPECT_CALL(callbacks_, OnMetadata(_));
@@ -547,44 +517,9 @@ TEST_F(PipelineImplTest, AudioVideoStream) {
EXPECT_TRUE(metadata_.has_video);
}
-TEST_F(PipelineImplTest, VideoTextStream) {
- CreateVideoStream();
- CreateTextStream();
- MockDemuxerStreamVector streams;
- streams.push_back(video_stream());
-
- SetDemuxerExpectations(&streams);
- SetRendererExpectations();
-
- StartPipelineAndExpect(PIPELINE_OK);
- EXPECT_FALSE(metadata_.has_audio);
- EXPECT_TRUE(metadata_.has_video);
-
- AddTextStream();
-}
-
-TEST_F(PipelineImplTest, VideoAudioTextStream) {
- CreateVideoStream();
- CreateAudioStream();
- CreateTextStream();
- MockDemuxerStreamVector streams;
- streams.push_back(video_stream());
- streams.push_back(audio_stream());
-
- SetDemuxerExpectations(&streams);
- SetRendererExpectations();
-
- StartPipelineAndExpect(PIPELINE_OK);
- EXPECT_TRUE(metadata_.has_audio);
- EXPECT_TRUE(metadata_.has_video);
-
- AddTextStream();
-}
-
TEST_F(PipelineImplTest, Seek) {
CreateAudioStream();
CreateVideoStream();
- CreateTextStream();
MockDemuxerStreamVector streams;
streams.push_back(audio_stream());
streams.push_back(video_stream());
@@ -627,7 +562,6 @@ TEST_F(PipelineImplTest, SeekAfterError) {
TEST_F(PipelineImplTest, SuspendResume) {
CreateAudioStream();
CreateVideoStream();
- CreateTextStream();
MockDemuxerStreamVector streams;
streams.push_back(audio_stream());
streams.push_back(video_stream());
@@ -713,7 +647,7 @@ TEST_F(PipelineImplTest, GetBufferedTimeRanges) {
}
TEST_F(PipelineImplTest, BufferedTimeRangesCanChangeAfterStop) {
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(
DoAll(SaveArg<0>(&demuxer_host_), PostCallback<1>(PIPELINE_OK)));
EXPECT_CALL(*demuxer_, Stop());
@@ -729,7 +663,6 @@ TEST_F(PipelineImplTest, BufferedTimeRangesCanChangeAfterStop) {
TEST_F(PipelineImplTest, EndedCallback) {
CreateAudioStream();
CreateVideoStream();
- CreateTextStream();
MockDemuxerStreamVector streams;
streams.push_back(audio_stream());
streams.push_back(video_stream());
@@ -738,14 +671,10 @@ TEST_F(PipelineImplTest, EndedCallback) {
SetRendererExpectations();
StartPipelineAndExpect(PIPELINE_OK);
- AddTextStream();
// The ended callback shouldn't run until all renderers have ended.
- renderer_client_->OnEnded();
- base::RunLoop().RunUntilIdle();
-
EXPECT_CALL(callbacks_, OnEnded());
- text_stream()->SendEosNotification();
+ renderer_client_->OnEnded();
base::RunLoop().RunUntilIdle();
}
@@ -1043,12 +972,12 @@ class PipelineTeardownTest : public PipelineImplTest {
StopOrError stop_or_error) {
if (state == kInitDemuxer) {
if (stop_or_error == kStop) {
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(
DoAll(PostStop(pipeline_.get()), PostCallback<1>(PIPELINE_OK)));
// Note: OnStart callback is not called after pipeline is stopped.
} else {
- EXPECT_CALL(*demuxer_, Initialize(_, _, _))
+ EXPECT_CALL(*demuxer_, Initialize(_, _))
.WillOnce(PostCallback<1>(DEMUXER_ERROR_COULD_NOT_OPEN));
EXPECT_CALL(callbacks_, OnStart(DEMUXER_ERROR_COULD_NOT_OPEN))
.WillOnce(Stop(pipeline_.get()));
@@ -1112,6 +1041,7 @@ class PipelineTeardownTest : public PipelineImplTest {
void SetSeekExpectations(TeardownState state, StopOrError stop_or_error) {
if (state == kFlushing) {
+ EXPECT_CALL(*demuxer_, Seek(_, _));
if (stop_or_error == kStop) {
EXPECT_CALL(*renderer_, Flush(_))
.WillOnce(DoAll(
diff --git a/chromium/media/base/pipeline_status.h b/chromium/media/base/pipeline_status.h
index 81ea17ca84a..11cc0e8ca86 100644
--- a/chromium/media/base/pipeline_status.h
+++ b/chromium/media/base/pipeline_status.h
@@ -96,7 +96,7 @@ MEDIA_EXPORT bool operator!=(const PipelineStatistics& first,
// Used for updating pipeline statistics; the passed value should be a delta
// of all attributes since the last update.
-typedef base::Callback<void(const PipelineStatistics&)> StatisticsCB;
+using StatisticsCB = base::RepeatingCallback<void(const PipelineStatistics&)>;
} // namespace media
diff --git a/chromium/media/base/scopedfd_helper.cc b/chromium/media/base/scopedfd_helper.cc
new file mode 100644
index 00000000000..1b310a3c39f
--- /dev/null
+++ b/chromium/media/base/scopedfd_helper.cc
@@ -0,0 +1,30 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <unistd.h>
+
+#include <vector>
+
+#include "base/posix/eintr_wrapper.h"
+#include "media/base/scopedfd_helper.h"
+
+namespace media {
+
+std::vector<base::ScopedFD> DuplicateFDs(
+ const std::vector<base::ScopedFD>& fds) {
+ std::vector<base::ScopedFD> ret;
+
+ for (auto& fd : fds) {
+ base::ScopedFD dup_fd = base::ScopedFD(HANDLE_EINTR(dup(fd.get())));
+ if (!dup_fd.is_valid()) {
+ DPLOG(ERROR) << "Failed to duplicate ScopedFD's file descriptor";
+ return std::vector<base::ScopedFD>();
+ }
+ ret.push_back(std::move(dup_fd));
+ }
+
+ return ret;
+}
+
+} // namespace media
diff --git a/chromium/media/base/scopedfd_helper.h b/chromium/media/base/scopedfd_helper.h
new file mode 100644
index 00000000000..7a784b77d48
--- /dev/null
+++ b/chromium/media/base/scopedfd_helper.h
@@ -0,0 +1,28 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_SCOPEDFD_HELPER_H_
+#define MEDIA_BASE_SCOPEDFD_HELPER_H_
+
+#include "base/files/scoped_file.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Theoretically, we can test on defined(OS_POSIX) || defined(OS_FUCHSIA), but
+// since the only current user is V4L2 we are limiting the scope to OS_LINUX so
+// the binary size does not inflate on non-using systems. Feel free to adapt
+// this and BUILD.gn as our needs evolve.
+#if defined(OS_LINUX)
+
+// Return a new vector containing duplicates of |fds|, or an empty vector in
+// case of error.
+MEDIA_EXPORT std::vector<base::ScopedFD> DuplicateFDs(
+ const std::vector<base::ScopedFD>& fds);
+
+#endif // OS_LINUX
+
+} // namespace media
+
+#endif // MEDIA_BASE_SCOPEDFD_HELPER_H_
diff --git a/chromium/media/base/stream_parser.cc b/chromium/media/base/stream_parser.cc
index 8df75927a82..ccf06a63557 100644
--- a/chromium/media/base/stream_parser.cc
+++ b/chromium/media/base/stream_parser.cc
@@ -10,7 +10,6 @@ namespace media {
StreamParser::InitParameters::InitParameters(base::TimeDelta duration)
: duration(duration),
- auto_update_timestamp_offset(false),
liveness(DemuxerStream::LIVENESS_UNKNOWN),
detected_audio_track_count(0),
detected_video_track_count(0),
diff --git a/chromium/media/base/stream_parser.h b/chromium/media/base/stream_parser.h
index d6c7b0c36f0..3eaf9798975 100644
--- a/chromium/media/base/stream_parser.h
+++ b/chromium/media/base/stream_parser.h
@@ -61,10 +61,6 @@ class MEDIA_EXPORT StreamParser {
// null Time is returned if no mapping to Time exists.
base::Time timeline_offset;
- // Indicates that timestampOffset should be updated based on the earliest
- // end timestamp (audio or video) provided during each NewBuffersCB.
- bool auto_update_timestamp_offset;
-
// Indicates live stream.
DemuxerStream::Liveness liveness;
@@ -77,7 +73,7 @@ class MEDIA_EXPORT StreamParser {
// Indicates completion of parser initialization.
// params - Stream parameters.
- typedef base::Callback<void(const InitParameters& params)> InitCB;
+ typedef base::OnceCallback<void(const InitParameters& params)> InitCB;
// Indicates when new stream configurations have been parsed.
// First parameter - An object containing information about media tracks as
@@ -88,8 +84,8 @@ class MEDIA_EXPORT StreamParser {
// Return value - True if the new configurations are accepted.
// False if the new configurations are not supported
// and indicates that a parsing error should be signalled.
- typedef base::Callback<bool(std::unique_ptr<MediaTracks>,
- const TextTrackConfigMap&)>
+ typedef base::RepeatingCallback<bool(std::unique_ptr<MediaTracks>,
+ const TextTrackConfigMap&)>
NewConfigCB;
// New stream buffers have been parsed.
@@ -97,19 +93,20 @@ class MEDIA_EXPORT StreamParser {
// Return value - True indicates that the buffers are accepted.
// False if something was wrong with the buffers and a parsing
// error should be signalled.
- typedef base::Callback<bool(const BufferQueueMap&)> NewBuffersCB;
+ typedef base::RepeatingCallback<bool(const BufferQueueMap&)> NewBuffersCB;
// Signals the beginning of a new media segment.
- typedef base::Callback<void()> NewMediaSegmentCB;
+ typedef base::RepeatingCallback<void()> NewMediaSegmentCB;
// Signals the end of a media segment.
- typedef base::Callback<void()> EndMediaSegmentCB;
+ typedef base::RepeatingCallback<void()> EndMediaSegmentCB;
// A new potentially encrypted stream has been parsed.
// First parameter - The type of the initialization data associated with the
// stream.
// Second parameter - The initialization data associated with the stream.
- typedef base::Callback<void(EmeInitDataType, const std::vector<uint8_t>&)>
+ typedef base::RepeatingCallback<void(EmeInitDataType,
+ const std::vector<uint8_t>&)>
EncryptedMediaInitDataCB;
StreamParser();
@@ -121,7 +118,7 @@ class MEDIA_EXPORT StreamParser {
// start time, and duration. If |ignore_text_track| is true, then no text
// buffers should be passed later by the parser to |new_buffers_cb|.
virtual void Init(
- const InitCB& init_cb,
+ InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_track,
@@ -136,6 +133,10 @@ class MEDIA_EXPORT StreamParser {
// algorithm already resets the segment parsing state.
virtual void Flush() = 0;
+ // Returns the MSE byte stream format registry's "Generate Timestamps Flag"
+ // for the byte stream corresponding to this parser.
+ virtual bool GetGenerateTimestampsFlag() const = 0;
+
// Called when there is new data to parse.
//
// Returns true if the parse succeeds.
diff --git a/chromium/media/base/stream_parser_buffer.cc b/chromium/media/base/stream_parser_buffer.cc
index ba03928735c..fc359527d42 100644
--- a/chromium/media/base/stream_parser_buffer.cc
+++ b/chromium/media/base/stream_parser_buffer.cc
@@ -64,7 +64,7 @@ StreamParserBuffer::StreamParserBuffer(const uint8_t* data,
config_id_(kInvalidConfigId),
type_(type),
track_id_(track_id),
- duration_type_(DurationType::kKnownDuration) {
+ is_duration_estimated_(false) {
// TODO(scherkus): Should DataBuffer constructor accept a timestamp and
// duration to force clients to set them? Today they end up being zero which
// is both a common and valid value and could lead to bugs.
diff --git a/chromium/media/base/stream_parser_buffer.h b/chromium/media/base/stream_parser_buffer.h
index ff38dffc118..d26d783b147 100644
--- a/chromium/media/base/stream_parser_buffer.h
+++ b/chromium/media/base/stream_parser_buffer.h
@@ -100,14 +100,6 @@ MEDIA_EXPORT extern inline DecodeTimestamp kNoDecodeTimestamp() {
return DecodeTimestamp::FromPresentationTime(kNoTimestamp);
}
-// Sync with StreamParserBufferDurationType in enums.xml.
-enum class DurationType {
- kKnownDuration = 0,
- kConstantEstimate = 1,
- kRoughEstimate = 2,
- kDurationTypeMax = kRoughEstimate // Must point to last.
-};
-
class MEDIA_EXPORT StreamParserBuffer : public DecoderBuffer {
public:
// Value used to signal an invalid decoder config ID.
@@ -164,14 +156,10 @@ class MEDIA_EXPORT StreamParserBuffer : public DecoderBuffer {
void set_timestamp(base::TimeDelta timestamp) override;
- DurationType duration_type() const { return duration_type_; }
-
- void set_duration_type(DurationType duration_type) {
- duration_type_ = duration_type;
- }
+ bool is_duration_estimated() const { return is_duration_estimated_; }
- bool is_duration_estimated() const {
- return duration_type_ != DurationType::kKnownDuration;
+ void set_is_duration_estimated(bool is_estimated) {
+ is_duration_estimated_ = is_estimated;
}
private:
@@ -189,7 +177,7 @@ class MEDIA_EXPORT StreamParserBuffer : public DecoderBuffer {
Type type_;
TrackId track_id_;
scoped_refptr<StreamParserBuffer> preroll_buffer_;
- DurationType duration_type_;
+ bool is_duration_estimated_;
DISALLOW_COPY_AND_ASSIGN(StreamParserBuffer);
};
diff --git a/chromium/media/base/subsample_entry.h b/chromium/media/base/subsample_entry.h
index 48f8ea70b7f..f4117f61c5a 100644
--- a/chromium/media/base/subsample_entry.h
+++ b/chromium/media/base/subsample_entry.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_BASE_SUBSAMPLE_ENTRY_H_
#define MEDIA_BASE_SUBSAMPLE_ENTRY_H_
+#include <stddef.h>
#include <stdint.h>
#include <vector>
diff --git a/chromium/media/base/surface_manager.h b/chromium/media/base/surface_manager.h
deleted file mode 100644
index 02af1c7bbc1..00000000000
--- a/chromium/media/base/surface_manager.h
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BASE_SURFACE_MANAGER_H_
-#define MEDIA_BASE_SURFACE_MANAGER_H_
-
-#include "base/callback.h"
-#include "base/macros.h"
-#include "media/base/media_export.h"
-#include "ui/gfx/geometry/size.h"
-
-namespace media {
-
-using SurfaceCreatedCB = base::Callback<void(int)>;
-using RequestSurfaceCB = base::Callback<void(bool, const SurfaceCreatedCB&)>;
-
-class MEDIA_EXPORT SurfaceManager {
- public:
- enum { kNoSurfaceID = -1 };
-
- SurfaceManager() {}
- virtual ~SurfaceManager() {}
-
- // Create a fullscreen surface. The id will be returned with
- // |surface_created_cb|. If this is called more than once before the first
- // |surface_created_cb| is called, the surface will be delivered to the last
- // caller. If this is called after the fullscreen surface is created, the
- // existing surface will be returned. The client should ensure that the
- // previous consumer is no longer using the surface.
- virtual void CreateFullscreenSurface(
- const gfx::Size& video_natural_size,
- const SurfaceCreatedCB& surface_created_cb) = 0;
-
- // Call this when the natural size of the fullscreen video changes. The
- // surface will be resized to match the aspect ratio.
- virtual void NaturalSizeChanged(const gfx::Size& size) = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SurfaceManager);
-};
-
-} // namespace media
-
-#endif // MEDIA_BASE_SURFACE_MANAGER_H_
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index 36e6965ee62..7e07d92413d 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -98,7 +98,7 @@ void WaitableMessageLoopEvent::RunAndWaitForStatus(PipelineStatus expected) {
}
run_loop_.reset(new base::RunLoop());
- base::Timer timer(false, false);
+ base::OneShotTimer timer;
timer.Start(
FROM_HERE, timeout_,
base::Bind(&WaitableMessageLoopEvent::OnTimeout, base::Unretained(this)));
diff --git a/chromium/media/base/unaligned_shared_memory.cc b/chromium/media/base/unaligned_shared_memory.cc
index e84bef4273d..1386d343d07 100644
--- a/chromium/media/base/unaligned_shared_memory.cc
+++ b/chromium/media/base/unaligned_shared_memory.cc
@@ -8,13 +8,15 @@
#include "base/logging.h"
#include "base/sys_info.h"
+#include "mojo/public/cpp/system/platform_handle.h"
namespace media {
UnalignedSharedMemory::UnalignedSharedMemory(
const base::SharedMemoryHandle& handle,
+ size_t size,
bool read_only)
- : shm_(handle, read_only), misalignment_(0) {}
+ : shm_(handle, read_only), size_(size), misalignment_(0) {}
UnalignedSharedMemory::~UnalignedSharedMemory() = default;
@@ -42,6 +44,9 @@ bool UnalignedSharedMemory::MapAt(off_t offset, size_t size) {
DLOG(ERROR) << "Invalid size";
return false;
}
+ // TODO(b/795291): |size| could also be compared against |size_|. However,
+ // this will shortly all be changed for the shared memory refactor and so this
+ // extra check will be deferred.
off_t adjusted_offset = offset - static_cast<off_t>(misalignment);
if (!shm_.MapAt(adjusted_offset, size + misalignment)) {
@@ -57,4 +62,71 @@ void* UnalignedSharedMemory::memory() const {
return static_cast<uint8_t*>(shm_.memory()) + misalignment_;
}
+WritableUnalignedMapping::WritableUnalignedMapping(
+ const base::UnsafeSharedMemoryRegion& region,
+ size_t size,
+ off_t offset)
+ : size_(size), misalignment_(0) {
+ if (!region.IsValid()) {
+ DLOG(ERROR) << "Invalid region";
+ return;
+ }
+
+ if (offset < 0) {
+ DLOG(ERROR) << "Invalid offset";
+ return;
+ }
+
+ /* | | | | | | shm pages
+ * | offset (may exceed max size_t)
+ * |-----------| size
+ * |-| misalignment
+ * | adjusted offset
+ * |-------------| requested mapping
+ */
+ // Note: result of % computation may be off_t or size_t, depending on the
+ // relative ranks of those types. In any case we assume that
+ // VMAllocationGranularity() fits in both types, so the final result does too.
+ misalignment_ = offset % base::SysInfo::VMAllocationGranularity();
+
+ // Above this |size_|, |size_| + |misalignment| overflows.
+ size_t max_size = std::numeric_limits<size_t>::max() - misalignment_;
+ if (size_ > max_size) {
+ DLOG(ERROR) << "Invalid size";
+ return;
+ }
+
+ off_t adjusted_offset = offset - static_cast<off_t>(misalignment_);
+ mapping_ = region.MapAt(adjusted_offset, size_ + misalignment_);
+ if (!mapping_.IsValid()) {
+ DLOG(ERROR) << "Failed to map shared memory " << adjusted_offset << "("
+ << offset << ")"
+ << "@" << size << "/\\" << misalignment_ << " on "
+ << region.GetSize();
+
+ return;
+ }
+}
+
+WritableUnalignedMapping::WritableUnalignedMapping(
+ const base::SharedMemoryHandle& handle,
+ size_t size,
+ off_t offset)
+ : WritableUnalignedMapping(
+ mojo::UnwrapUnsafeSharedMemoryRegion(mojo::WrapSharedMemoryHandle(
+ handle,
+ handle.GetSize(),
+ mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite)),
+ size,
+ offset) {}
+
+WritableUnalignedMapping::~WritableUnalignedMapping() = default;
+
+void* WritableUnalignedMapping::memory() const {
+ if (!IsValid()) {
+ return nullptr;
+ }
+ return static_cast<uint8_t*>(mapping_.memory()) + misalignment_;
+}
+
} // namespace media
diff --git a/chromium/media/base/unaligned_shared_memory.h b/chromium/media/base/unaligned_shared_memory.h
index 0f2731c9f65..519bb063c3e 100644
--- a/chromium/media/base/unaligned_shared_memory.h
+++ b/chromium/media/base/unaligned_shared_memory.h
@@ -9,22 +9,36 @@
#include "base/macros.h"
#include "base/memory/shared_memory.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "media/base/media_export.h"
namespace media {
// Wrapper over base::SharedMemory that can be mapped at unaligned offsets.
+// DEPRECATED! See https://crbug.com/795291.
class MEDIA_EXPORT UnalignedSharedMemory {
public:
- UnalignedSharedMemory(const base::SharedMemoryHandle& handle, bool read_only);
+ // Creates an |UnalignedSharedMemory| instance from a
+ // |SharedMemoryHandle|. |size| sets the maximum size that may be mapped. This
+ // instance will own the handle.
+ UnalignedSharedMemory(const base::SharedMemoryHandle& handle,
+ size_t size,
+ bool read_only);
~UnalignedSharedMemory();
+ // Map the shared memory region. Note that the passed |size| parameter should
+ // be less than or equal to |size()|.
bool MapAt(off_t offset, size_t size);
+ size_t size() const { return size_; }
void* memory() const;
private:
base::SharedMemory shm_;
+ // The size of the region associated with |shm_|.
+ size_t size_;
+
// Offset withing |shm_| memory that data has been mapped; strictly less than
// base::SysInfo::VMAllocationGranularity().
size_t misalignment_;
@@ -32,6 +46,47 @@ class MEDIA_EXPORT UnalignedSharedMemory {
DISALLOW_COPY_AND_ASSIGN(UnalignedSharedMemory);
};
+// Wrapper over base::WritableSharedMemoryMapping that is mapped at unaligned
+// offsets.
+class MEDIA_EXPORT WritableUnalignedMapping {
+ public:
+ // Creates an |UnalignedSharedMemory| instance from a
+ // |UnsafeSharedMemoryRegion|. |size| sets the maximum size that may be mapped
+ // within |region| and |offset| is the offset that will be mapped. |region| is
+ // not retained and is used only in the constructor.
+ WritableUnalignedMapping(const base::UnsafeSharedMemoryRegion& region,
+ size_t size,
+ off_t offset);
+
+ // As above, but creates from a handle. This region will not own the handle.
+ // DEPRECATED: this should be used only for the legacy shared memory
+ // conversion project, see https://crbug.com/795291.
+ WritableUnalignedMapping(const base::SharedMemoryHandle& handle,
+ size_t size,
+ off_t offset);
+
+ ~WritableUnalignedMapping();
+
+ size_t size() const { return size_; }
+ void* memory() const;
+
+ // True if the mapping backing the memory is valid.
+ bool IsValid() const { return mapping_.IsValid(); }
+
+ private:
+ base::WritableSharedMemoryMapping mapping_;
+
+ // The size of the region associated with |mapping_|.
+ size_t size_;
+
+ // Difference between actual offset within |mapping_| where data has been
+ // mapped and requested offset; strictly less than
+ // base::SysInfo::VMAllocationGranularity().
+ size_t misalignment_;
+
+ DISALLOW_COPY_AND_ASSIGN(WritableUnalignedMapping);
+};
+
} // namespace media
#endif // MEDIA_BASE_UNALIGNED_SHARED_MEMORY_H_
diff --git a/chromium/media/base/unaligned_shared_memory_unittest.cc b/chromium/media/base/unaligned_shared_memory_unittest.cc
index fe36edc8d95..34b287e7273 100644
--- a/chromium/media/base/unaligned_shared_memory_unittest.cc
+++ b/chromium/media/base/unaligned_shared_memory_unittest.cc
@@ -32,55 +32,127 @@ base::SharedMemoryHandle CreateHandle(const uint8_t* data, size_t size) {
return shm.TakeHandle();
}
+base::UnsafeSharedMemoryRegion CreateRegion(const uint8_t* data, size_t size) {
+ auto region = base::UnsafeSharedMemoryRegion::Create(size);
+ auto mapping = region.Map();
+ EXPECT_TRUE(mapping.IsValid());
+ memcpy(mapping.memory(), data, size);
+ return region;
+}
+
} // namespace
TEST(UnalignedSharedMemoryTest, CreateAndDestroy) {
auto handle = CreateHandle(kData, kDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
}
TEST(UnalignedSharedMemoryTest, CreateAndDestroy_InvalidHandle) {
base::SharedMemoryHandle handle;
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
}
TEST(UnalignedSharedMemoryTest, Map) {
auto handle = CreateHandle(kData, kDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
ASSERT_TRUE(shm.MapAt(0, kDataSize));
EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
}
TEST(UnalignedSharedMemoryTest, Map_Unaligned) {
auto handle = CreateHandle(kUnalignedData, kUnalignedDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kUnalignedDataSize, true);
ASSERT_TRUE(shm.MapAt(kUnalignedOffset, kDataSize));
EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
}
TEST(UnalignedSharedMemoryTest, Map_InvalidHandle) {
base::SharedMemoryHandle handle;
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
ASSERT_FALSE(shm.MapAt(1, kDataSize));
EXPECT_EQ(shm.memory(), nullptr);
}
TEST(UnalignedSharedMemoryTest, Map_NegativeOffset) {
auto handle = CreateHandle(kData, kDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
ASSERT_FALSE(shm.MapAt(-1, kDataSize));
}
TEST(UnalignedSharedMemoryTest, Map_SizeOverflow) {
auto handle = CreateHandle(kData, kDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
ASSERT_FALSE(shm.MapAt(1, std::numeric_limits<size_t>::max()));
}
TEST(UnalignedSharedMemoryTest, UnmappedIsNullptr) {
auto handle = CreateHandle(kData, kDataSize);
- UnalignedSharedMemory shm(handle, true);
+ UnalignedSharedMemory shm(handle, kDataSize, true);
ASSERT_EQ(shm.memory(), nullptr);
}
+TEST(WritableUnalignedMappingTest, CreateAndDestroy) {
+ auto region = CreateRegion(kData, kDataSize);
+ WritableUnalignedMapping shm(region, kDataSize, 0);
+ EXPECT_TRUE(shm.IsValid());
+}
+
+TEST(WritableUnalignedMappingTest, CreateAndDestroy_InvalidHandle) {
+ base::SharedMemoryHandle handle;
+ WritableUnalignedMapping shm(handle, kDataSize, 0);
+ EXPECT_FALSE(shm.IsValid());
+}
+
+TEST(WritableUnalignedMappingTest, CreateAndDestroyHandle) {
+ auto handle = CreateHandle(kData, kDataSize);
+ WritableUnalignedMapping shm(handle, kDataSize, 0);
+ EXPECT_TRUE(shm.IsValid());
+}
+
+TEST(WritableUnalignedMappingTest, CreateAndDestroy_InvalidRegion) {
+ base::UnsafeSharedMemoryRegion region;
+ WritableUnalignedMapping shm(region, kDataSize, 0);
+ EXPECT_FALSE(shm.IsValid());
+}
+
+TEST(WritableUnalignedMappingTest, Map) {
+ auto region = CreateRegion(kData, kDataSize);
+ WritableUnalignedMapping shm(region, kDataSize, 0);
+ ASSERT_TRUE(shm.IsValid());
+ EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
+}
+
+TEST(WritableUnalignedMappingTest, Map_Unaligned) {
+ auto region = CreateRegion(kUnalignedData, kUnalignedDataSize);
+ WritableUnalignedMapping shm(region, kDataSize, kUnalignedOffset);
+ ASSERT_TRUE(shm.IsValid());
+ EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
+}
+
+TEST(WritableUnalignedMappingTest, Map_UnalignedHandle) {
+ auto region = CreateHandle(kUnalignedData, kUnalignedDataSize);
+ WritableUnalignedMapping shm(region, kDataSize, kUnalignedOffset);
+ ASSERT_TRUE(shm.IsValid());
+ EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
+}
+
+TEST(WritableUnalignedMappingTest, Map_InvalidRegion) {
+ base::UnsafeSharedMemoryRegion region;
+ WritableUnalignedMapping shm(region, kDataSize, 0);
+ ASSERT_FALSE(shm.IsValid());
+ EXPECT_EQ(shm.memory(), nullptr);
+}
+
+TEST(WritableUnalignedMappingTest, Map_NegativeOffset) {
+ auto region = CreateRegion(kData, kDataSize);
+ WritableUnalignedMapping shm(region, kDataSize, -1);
+ ASSERT_FALSE(shm.IsValid());
+}
+
+TEST(WritableUnalignedMappingTest, Map_SizeOverflow) {
+ auto region = CreateRegion(kData, kDataSize);
+ WritableUnalignedMapping shm(region, std::numeric_limits<size_t>::max(), 1);
+ ASSERT_FALSE(shm.IsValid());
+}
+
} // namespace media
diff --git a/chromium/media/base/video_bitrate_allocation.cc b/chromium/media/base/video_bitrate_allocation.cc
index 629092eedf9..8d270b866cb 100644
--- a/chromium/media/base/video_bitrate_allocation.cc
+++ b/chromium/media/base/video_bitrate_allocation.cc
@@ -8,13 +8,14 @@
#include <numeric>
#include "base/logging.h"
+#include "base/numerics/checked_math.h"
namespace media {
constexpr size_t VideoBitrateAllocation::kMaxSpatialLayers;
constexpr size_t VideoBitrateAllocation::kMaxTemporalLayers;
-VideoBitrateAllocation::VideoBitrateAllocation() : bitrates_{} {}
+VideoBitrateAllocation::VideoBitrateAllocation() : sum_(0), bitrates_{} {}
bool VideoBitrateAllocation::SetBitrate(size_t spatial_index,
size_t temporal_index,
@@ -23,11 +24,14 @@ bool VideoBitrateAllocation::SetBitrate(size_t spatial_index,
CHECK_LT(temporal_index, kMaxTemporalLayers);
CHECK_GE(bitrate_bps, 0);
- if (GetSumBps() - bitrates_[spatial_index][temporal_index] >
- std::numeric_limits<int>::max() - bitrate_bps) {
+ base::CheckedNumeric<int> checked_sum = sum_;
+ checked_sum -= bitrates_[spatial_index][temporal_index];
+ checked_sum += bitrate_bps;
+ if (!checked_sum.IsValid()) {
return false; // Would cause overflow of the sum.
}
+ sum_ = checked_sum.ValueOrDie();
bitrates_[spatial_index][temporal_index] = bitrate_bps;
return true;
}
@@ -40,15 +44,15 @@ int VideoBitrateAllocation::GetBitrateBps(size_t spatial_index,
}
int VideoBitrateAllocation::GetSumBps() const {
- int sum = 0;
- for (size_t spatial_index = 0; spatial_index < kMaxSpatialLayers;
- ++spatial_index) {
- for (size_t temporal_index = 0; temporal_index < kMaxTemporalLayers;
- ++temporal_index) {
- sum += bitrates_[spatial_index][temporal_index];
- }
+ return sum_;
+}
+
+bool VideoBitrateAllocation::operator==(
+ const VideoBitrateAllocation& other) const {
+ if (sum_ != other.sum_) {
+ return false;
}
- return sum;
+ return memcmp(bitrates_, other.bitrates_, sizeof(bitrates_)) == 0;
}
} // namespace media
diff --git a/chromium/media/base/video_bitrate_allocation.h b/chromium/media/base/video_bitrate_allocation.h
index 6341ce3f751..7d5059a9814 100644
--- a/chromium/media/base/video_bitrate_allocation.h
+++ b/chromium/media/base/video_bitrate_allocation.h
@@ -14,7 +14,7 @@
namespace media {
// Class that describes how video bitrate, in bps, is allocated across temporal
-// and spatial layers. Not that bitrates are NOT cumulative. Depending on if
+// and spatial layers. Note that bitrates are NOT cumulative. Depending on if
// layers are dependent or not, it is up to the user to aggregate.
class MEDIA_EXPORT VideoBitrateAllocation {
public:
@@ -33,9 +33,14 @@ class MEDIA_EXPORT VideoBitrateAllocation {
// Sum of all bitrates.
int32_t GetSumBps() const;
+ bool operator==(const VideoBitrateAllocation& other) const;
+ inline bool operator!=(const VideoBitrateAllocation& other) const {
+ return !(*this == other);
+ }
+
private:
+ int sum_; // Cached sum of all elements of |bitrates_|, for perfomance.
int bitrates_[kMaxSpatialLayers][kMaxTemporalLayers];
- DISALLOW_COPY_AND_ASSIGN(VideoBitrateAllocation);
};
} // namespace media
diff --git a/chromium/media/base/video_bitrate_allocation_unittest.cc b/chromium/media/base/video_bitrate_allocation_unittest.cc
index ce9e8acaaf9..df8b3ae43ea 100644
--- a/chromium/media/base/video_bitrate_allocation_unittest.cc
+++ b/chromium/media/base/video_bitrate_allocation_unittest.cc
@@ -67,4 +67,17 @@ TEST(VideoBitrateAllocationTest, ValidatesSumWhenOverwriting) {
EXPECT_EQ(std::numeric_limits<int>::max() - 1, allocation.GetSumBps());
}
+TEST(VideoBitrateAllocationTest, CanCopyAndCompare) {
+ VideoBitrateAllocation allocation;
+ EXPECT_TRUE(allocation.SetBitrate(0, 0, 1000));
+ EXPECT_TRUE(allocation.SetBitrate(
+ VideoBitrateAllocation::kMaxSpatialLayers - 1,
+ VideoBitrateAllocation::kMaxTemporalLayers - 1, 2000));
+
+ VideoBitrateAllocation copy = allocation;
+ EXPECT_EQ(copy, allocation);
+ copy.SetBitrate(0, 0, 0);
+ EXPECT_NE(copy, allocation);
+}
+
} // namespace media
diff --git a/chromium/media/base/video_codecs.cc b/chromium/media/base/video_codecs.cc
index 4257b52b4df..c815641d8da 100644
--- a/chromium/media/base/video_codecs.cc
+++ b/chromium/media/base/video_codecs.cc
@@ -94,8 +94,12 @@ std::string GetProfileName(VideoCodecProfile profile) {
return "dolby vision profile 7";
case THEORAPROFILE_ANY:
return "theora";
- case AV1PROFILE_PROFILE0:
- return "av1 profile0";
+ case AV1PROFILE_PROFILE_MAIN:
+ return "av1 profile main";
+ case AV1PROFILE_PROFILE_HIGH:
+ return "av1 profile high";
+ case AV1PROFILE_PROFILE_PRO:
+ return "av1 profile pro";
}
NOTREACHED();
return "";
@@ -254,6 +258,211 @@ bool ParseLegacyVp9CodecID(const std::string& codec_id,
return false;
}
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+bool ParseAv1CodecId(const std::string& codec_id,
+ VideoCodecProfile* profile,
+ uint8_t* level_idc,
+ VideoColorSpace* color_space) {
+ // The codecs parameter string for the AOM AV1 codec is as follows:
+ //
+ // <sample entry4CC>.<profile>.<level><tier>.<bitDepth>.<monochrome>.
+ // <chromaSubsampling>.<colorPrimaries>.<transferCharacteristics>.
+ // <matrixCoefficients>.<videoFullRangeFlag>
+ //
+
+ std::vector<std::string> fields = base::SplitString(
+ codec_id, ".", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
+
+ // The parameters sample entry 4CC, profile, level, tier, and bitDepth are all
+ // mandatory fields. If any of these fields are empty, or not within their
+ // allowed range, the processing device SHOULD treat it as an error.
+ if (fields.size() < 4 || fields.size() > 10) {
+ DVLOG(3) << __func__ << " Invalid number of fields (" << fields.size()
+ << ")";
+ return false;
+ }
+
+ // All the other fields (including their leading '.') are optional, mutually
+ // inclusive (all or none) fields. If not specified then the values listed in
+ // the table below are assumed.
+ //
+ // mono_chrome 0
+ // chromaSubsampling 112 (4:2:0 colocated with luma (0,0))
+ // colorPrimaries 1 (ITU-R BT.709)
+ // transferCharacteristics 1 (ITU-R BT.709)
+ // matrixCoefficients 1 (ITU-R BT.709)
+ // videoFullRangeFlag 0 (studio swing representation)
+ *color_space = VideoColorSpace::REC709();
+
+ if (fields[0] != "av01") {
+ DVLOG(3) << __func__ << " Invalid AV1 4CC (" << fields[0] << ")";
+ return false;
+ }
+
+ // The level parameter value SHALL equal the first level value indicated by
+ // seq_level_idx in the Sequence Header. The tier parameter value SHALL be
+ // equal to M when the first seq_tier value in the Sequence Header is equal to
+ // 0, and H when it is equal to 1.
+ if (fields[2].size() != 3 || (fields[2][2] != 'M' && fields[2][2] != 'H')) {
+ DVLOG(3) << __func__ << " Invalid level+tier (" << fields[2] << ")";
+ return false;
+ }
+
+ // Since tier has been validated, strip the trailing tier indicator to allow
+ // int conversion below.
+ fields[2].resize(2);
+
+ // Fill with dummy values to ensure parallel indices with fields.
+ std::vector<int> values(fields.size(), 0);
+ for (size_t i = 1; i < fields.size(); ++i) {
+ if (fields[i].empty()) {
+ DVLOG(3) << __func__ << " Invalid empty field (position:" << i << ")";
+ return false;
+ }
+
+ if (!base::StringToInt(fields[i], &values[i]) || values[i] < 0) {
+ DVLOG(3) << __func__ << " Invalid field value (" << values[i] << ")";
+ return false;
+ }
+ }
+
+ // The profile parameter value, represented by a single digit decimal, SHALL
+ // equal the value of seq_profile in the Sequence Header.
+ const int profile_idc = fields[1].size() == 1 ? values[1] : -1;
+ switch (profile_idc) {
+ case 0:
+ *profile = AV1PROFILE_PROFILE_MAIN;
+ break;
+ case 1:
+ *profile = AV1PROFILE_PROFILE_HIGH;
+ break;
+ case 2:
+ *profile = AV1PROFILE_PROFILE_PRO;
+ break;
+ default:
+ DVLOG(3) << __func__ << " Invalid profile (" << fields[1] << ")";
+ return false;
+ }
+
+ // The level parameter value SHALL equal the first level value indicated by
+ // seq_level_idx in the Sequence Header. Note: We validate that this field has
+ // the required leading zeros above.
+ *level_idc = values[2];
+ if (*level_idc > 31) {
+ DVLOG(3) << __func__ << " Invalid level (" << *level_idc << ")";
+ return false;
+ }
+
+ // The bitDepth parameter value SHALL equal the value of BitDepth variable as
+ // defined in [AV1] derived from the Sequence Header. Leading zeros required.
+ const int bit_depth = values[3];
+ if (fields[3].size() != 2 ||
+ (bit_depth != 8 && bit_depth != 10 && bit_depth != 12)) {
+ DVLOG(3) << __func__ << " Invalid bit-depth (" << fields[3] << ")";
+ return false;
+ }
+
+ if (values.size() <= 4)
+ return true;
+
+ // The monochrome parameter value, represented by a single digit decimal,
+ // SHALL equal the value of mono_chrome in the Sequence Header.
+ const int monochrome = values[4];
+ if (fields[4].size() != 1 || monochrome > 1) {
+ DVLOG(3) << __func__ << " Invalid monochrome (" << fields[4] << ")";
+ return false;
+ }
+
+ if (values.size() <= 5)
+ return true;
+
+ // The chromaSubsampling parameter value, represented by a three-digit
+ // decimal, SHALL have its first digit equal to subsampling_x and its second
+ // digit equal to subsampling_y. If both subsampling_x and subsampling_y are
+ // set to 1, then the third digit SHALL be equal to chroma_sample_position,
+ // otherwise it SHALL be set to 0.
+ if (fields[5].size() != 3) {
+ DVLOG(3) << __func__ << " Invalid chroma subsampling (" << fields[5] << ")";
+ return false;
+ }
+
+ const char subsampling_x = fields[5][0];
+ const char subsampling_y = fields[5][1];
+ const char chroma_sample_position = fields[5][2];
+ if ((subsampling_x < '0' || subsampling_x > '1') ||
+ (subsampling_y < '0' || subsampling_y > '1') ||
+ (chroma_sample_position < '0' || chroma_sample_position > '3')) {
+ DVLOG(3) << __func__ << " Invalid chroma subsampling (" << fields[5] << ")";
+ return false;
+ }
+
+ if (((subsampling_x == '0' || subsampling_y == '0') &&
+ chroma_sample_position != '0') ||
+ (subsampling_x == '1' && subsampling_y == '1' &&
+ chroma_sample_position == '0')) {
+ DVLOG(3) << __func__ << " Invalid chroma subsampling (" << fields[5] << ")";
+ return false;
+ }
+
+ if (values.size() <= 6)
+ return true;
+
+ // The colorPrimaries, transferCharacteristics, matrixCoefficients and
+ // videoFullRangeFlag parameter values SHALL equal the value of matching
+ // fields in the Sequence Header, if color_description_present_flag is set to
+ // 1, otherwise they SHOULD not be set, defaulting to the values below. The
+ // videoFullRangeFlag is represented by a single digit.
+ color_space->primaries = VideoColorSpace::GetPrimaryID(values[6]);
+ if (fields[6].size() != 2 ||
+ color_space->primaries == VideoColorSpace::PrimaryID::INVALID) {
+ DVLOG(3) << __func__ << " Invalid color primaries (" << fields[6] << ")";
+ return false;
+ }
+
+ if (values.size() <= 7)
+ return true;
+
+ color_space->transfer = VideoColorSpace::GetTransferID(values[7]);
+ if (fields[7].size() != 2 ||
+ color_space->transfer == VideoColorSpace::TransferID::INVALID) {
+ DVLOG(3) << __func__ << " Invalid transfer function (" << fields[7] << ")";
+ return false;
+ }
+
+ if (values.size() <= 8)
+ return true;
+
+ color_space->matrix = VideoColorSpace::GetMatrixID(values[8]);
+ if (fields[8].size() != 2 ||
+ color_space->matrix == VideoColorSpace::MatrixID::INVALID) {
+ // TODO(dalecurtis): AV1 allows a few matrices we don't support yet.
+ // https://crbug.com/854290
+ if (values[8] == 12 || values[8] == 13 || values[8] == 14) {
+ DVLOG(3) << __func__ << " Unsupported matrix coefficients (" << fields[8]
+ << ")";
+ } else {
+ DVLOG(3) << __func__ << " Invalid matrix coefficients (" << fields[8]
+ << ")";
+ }
+ return false;
+ }
+
+ if (values.size() <= 9)
+ return true;
+
+ const int video_full_range_flag = values[9];
+ if (fields[9].size() != 1 || video_full_range_flag > 1) {
+ DVLOG(3) << __func__ << " Invalid full range flag (" << fields[9] << ")";
+ return false;
+ }
+ color_space->range = video_full_range_flag == 1
+ ? gfx::ColorSpace::RangeID::FULL
+ : gfx::ColorSpace::RangeID::LIMITED;
+
+ return true;
+}
+#endif // BUILDFLAG(ENABLE_AV1_DECODER)
+
bool ParseAVCCodecId(const std::string& codec_id,
VideoCodecProfile* profile,
uint8_t* level_idc) {
@@ -612,16 +821,18 @@ VideoCodec StringToVideoCodec(const std::string& codec_id) {
uint8_t level = 0;
VideoColorSpace color_space;
- // TODO(dalecurtis): The actual codec string will be similar (equivalent?) to
- // the vp9 codec string. Fix this before release. http://crbug.com/784607.
- if (codec_id == "av1")
- return kCodecAV1;
if (codec_id == "vp8" || codec_id == "vp8.0")
return kCodecVP8;
if (ParseNewStyleVp9CodecID(codec_id, &profile, &level, &color_space) ||
ParseLegacyVp9CodecID(codec_id, &profile, &level)) {
return kCodecVP9;
}
+
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+ if (ParseAv1CodecId(codec_id, &profile, &level, &color_space))
+ return kCodecAV1;
+#endif
+
if (codec_id == "theora")
return kCodecTheora;
if (ParseAVCCodecId(codec_id, &profile, &level))
diff --git a/chromium/media/base/video_codecs.h b/chromium/media/base/video_codecs.h
index 0788788b365..eafae3115d7 100644
--- a/chromium/media/base/video_codecs.h
+++ b/chromium/media/base/video_codecs.h
@@ -9,6 +9,7 @@
#include <string>
#include "media/base/media_export.h"
#include "media/media_buildflags.h"
+#include "third_party/libaom/av1_buildflags.h"
#include "ui/gfx/color_space.h"
namespace media {
@@ -88,12 +89,12 @@ enum VideoCodecProfile {
THEORAPROFILE_MIN = 23,
THEORAPROFILE_ANY = THEORAPROFILE_MIN,
THEORAPROFILE_MAX = THEORAPROFILE_ANY,
- // TODO(dalecurtis): AV1 profiles are not finalized, this needs updating
- // before enabling for release. http://crbug.com/784993
AV1PROFILE_MIN = 24,
- AV1PROFILE_PROFILE0 = AV1PROFILE_MIN,
- AV1PROFILE_MAX = AV1PROFILE_PROFILE0,
- VIDEO_CODEC_PROFILE_MAX = AV1PROFILE_PROFILE0,
+ AV1PROFILE_PROFILE_MAIN = AV1PROFILE_MIN,
+ AV1PROFILE_PROFILE_HIGH = 25,
+ AV1PROFILE_PROFILE_PRO = 26,
+ AV1PROFILE_MAX = AV1PROFILE_PROFILE_PRO,
+ VIDEO_CODEC_PROFILE_MAX = AV1PROFILE_PROFILE_PRO,
};
struct CodecProfileLevel {
@@ -120,6 +121,13 @@ MEDIA_EXPORT bool ParseLegacyVp9CodecID(const std::string& codec_id,
VideoCodecProfile* profile,
uint8_t* level_idc);
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+MEDIA_EXPORT bool ParseAv1CodecId(const std::string& codec_id,
+ VideoCodecProfile* profile,
+ uint8_t* level_idc,
+ VideoColorSpace* color_space);
+#endif
+
// Handle parsing AVC/H.264 codec ids as outlined in RFC 6381 and ISO-14496-10.
MEDIA_EXPORT bool ParseAVCCodecId(const std::string& codec_id,
VideoCodecProfile* profile,
diff --git a/chromium/media/base/video_codecs_unittest.cc b/chromium/media/base/video_codecs_unittest.cc
index 84d9103ef58..ebec36fd3d0 100644
--- a/chromium/media/base/video_codecs_unittest.cc
+++ b/chromium/media/base/video_codecs_unittest.cc
@@ -5,6 +5,7 @@
#include <set>
#include "base/logging.h"
+#include "base/strings/stringprintf.h"
#include "media/base/video_codecs.h"
#include "media/base/video_color_space.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -156,6 +157,287 @@ TEST(ParseVP9CodecId, NewStyleVP9CodecIDs) {
&level, &color_space));
}
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+TEST(ParseAv1CodecId, VerifyRequiredValues) {
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ uint8_t level = 0;
+ VideoColorSpace color_space;
+
+ // Old style is not subset of new style.
+ EXPECT_FALSE(ParseAv1CodecId("av1", &profile, &level, &color_space));
+
+ // Parsing should fail when first 4 required fields are not provided.
+ EXPECT_FALSE(ParseAv1CodecId("av01", &profile, &level, &color_space));
+ EXPECT_FALSE(ParseAv1CodecId("av01.0", &profile, &level, &color_space));
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.04M", &profile, &level, &color_space));
+
+ // Expect success when all required fields supplied (and valid).
+ // TransferID not specified by string, should default to 709.
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.04M.08", &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_MAIN, profile);
+ EXPECT_EQ(4, level);
+ EXPECT_EQ(VideoColorSpace::TransferID::BT709, color_space.transfer);
+
+ // Verify high and pro profiles parse correctly.
+ EXPECT_TRUE(ParseAv1CodecId("av01.1.04M.08", &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_HIGH, profile);
+ EXPECT_TRUE(ParseAv1CodecId("av01.2.04M.08", &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_PRO, profile);
+
+ // Leading zeros or negative values are forbidden.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.00.04M.08", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.-0.04M.08", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.-1.04M.08", &profile, &level, &color_space));
+
+ // There are no profile values > 2
+ for (int i = 3; i <= 9; ++i) {
+ const std::string codec_string = base::StringPrintf("av01.%d.00M.08", i);
+ SCOPED_TRACE(codec_string);
+ EXPECT_FALSE(ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+
+ // Leading zeros are required for the level.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.4M.08", &profile, &level, &color_space));
+
+ // Negative values are not allowed.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.-4M.08", &profile, &level, &color_space));
+
+ // Verify valid levels parse correctly. Valid profiles are 00 -> 31.
+ for (int i = 0; i < 99; ++i) {
+ const std::string codec_string = base::StringPrintf("av01.0.%02dM.08", i);
+ SCOPED_TRACE(codec_string);
+
+ if (i < 32) {
+ EXPECT_TRUE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_MAIN, profile);
+ EXPECT_EQ(i, level);
+ EXPECT_EQ(VideoColorSpace::TransferID::BT709, color_space.transfer);
+ } else {
+ EXPECT_FALSE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+ }
+
+ // Verify tier parses correctly.
+ for (char c = '\0'; c <= '\255'; ++c) {
+ const std::string codec_string = base::StringPrintf("av01.1.00%c.08", c);
+ SCOPED_TRACE(codec_string);
+
+ if (c == 'M' || c == 'H') {
+ EXPECT_TRUE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_HIGH, profile);
+ EXPECT_EQ(0, level);
+ EXPECT_EQ(VideoColorSpace::TransferID::BT709, color_space.transfer);
+ } else {
+ EXPECT_FALSE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+ }
+
+ // Leading zeros are required for the bit depth.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.04M.8", &profile, &level, &color_space));
+
+ // Verify bitdepths. Only 8, 10, 12 are valid.
+ for (int i = 0; i < 99; ++i) {
+ const std::string codec_string = base::StringPrintf("av01.0.00M.%02d", i);
+ SCOPED_TRACE(codec_string);
+
+ if (i == 8 || i == 10 || i == 12) {
+ EXPECT_TRUE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ EXPECT_EQ(AV1PROFILE_PROFILE_MAIN, profile);
+ EXPECT_EQ(0, level);
+ EXPECT_EQ(VideoColorSpace::TransferID::BT709, color_space.transfer);
+ } else {
+ EXPECT_FALSE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+ }
+}
+
+TEST(ParseAv1CodecId, VerifyOptionalMonochrome) {
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ uint8_t level = 0;
+ VideoColorSpace color_space;
+
+ // monochrome is either 0, 1 and leading zeros are not allowed.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.04M.08.00", &profile, &level, &color_space));
+ for (int i = 0; i <= 9; ++i) {
+ const std::string codec_string = base::StringPrintf("av01.0.00M.08.%d", i);
+ SCOPED_TRACE(codec_string);
+ EXPECT_EQ(i < 2,
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+}
+
+TEST(ParseAv1CodecId, VerifyOptionalSubsampling) {
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ uint8_t level = 0;
+ VideoColorSpace color_space;
+
+ // chroma subsampling values are {0,1}{0,1}{0,3} with the last value always
+ // zero if either of the first two values are zero.
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.000", &profile, &level, &color_space));
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.100", &profile, &level, &color_space));
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.010", &profile, &level, &color_space));
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.111", &profile, &level, &color_space));
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.112", &profile, &level, &color_space));
+ EXPECT_TRUE(
+ ParseAv1CodecId("av01.0.00M.10.0.113", &profile, &level, &color_space));
+
+ // Invalid cases.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.101", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.102", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.103", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.011", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.012", &profile, &level, &color_space));
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.013", &profile, &level, &color_space));
+
+ // The last-value must be non-zero if the first two values are non-zero.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.110", &profile, &level, &color_space));
+
+ for (int i = 2; i <= 9; ++i) {
+ for (int j = 2; j <= 9; ++j) {
+ for (int k = 4; k <= 9; ++k) {
+ const std::string codec_string =
+ base::StringPrintf("av01.0.00M.08.0.%d%d%d", i, j, k);
+ SCOPED_TRACE(codec_string);
+ EXPECT_FALSE(
+ ParseAv1CodecId(codec_string, &profile, &level, &color_space));
+ }
+ }
+ }
+}
+
+TEST(ParseAv1CodecId, VerifyOptionalColorProperties) {
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ uint8_t level = 0;
+ VideoColorSpace color_space;
+
+ // Verify a few color properties. This is non-exhaustive since validation is
+ // handled by common color space function. Below we validate only portions
+ // specific to the AV1 codec string.
+
+ // Leading zeros must be provided.
+ EXPECT_FALSE(
+ ParseAv1CodecId("av01.0.00M.10.0.000.1", &profile, &level, &color_space));
+ // Negative values are not allowed.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.-1", &profile, &level,
+ &color_space));
+
+ // BT709
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::PrimaryID::BT709, color_space.primaries);
+ // BT2020
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.09", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::PrimaryID::BT2020, color_space.primaries);
+ // 0 is invalid.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.00", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::PrimaryID::INVALID, color_space.primaries);
+ // 23 - 255 are reserved.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.23", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::PrimaryID::INVALID, color_space.primaries);
+
+ // Leading zeros must be provided.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.1", &profile, &level,
+ &color_space));
+ // Negative values are not allowed.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.-1", &profile, &level,
+ &color_space));
+
+ // Verify a few common EOTFs parse correctly.
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::BT709, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.04", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::GAMMA22, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.06", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::SMPTE170M, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.14", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::BT2020_10, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.15", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::BT2020_12, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.13", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::IEC61966_2_1, color_space.transfer);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.16", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::TransferID::SMPTEST2084, color_space.transfer);
+ // Verify 0 and 3 are reserved EOTF values.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.00", &profile, &level,
+ &color_space));
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.03", &profile, &level,
+ &color_space));
+
+ // Leading zeros must be provided.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.1", &profile, &level,
+ &color_space));
+ // Negative values are not allowed.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.-1", &profile, &level,
+ &color_space));
+
+ // Verify a few matrix coefficients.
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::MatrixID::RGB, color_space.matrix);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.01", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::MatrixID::BT709, color_space.matrix);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.10", &profile, &level,
+ &color_space));
+ EXPECT_EQ(VideoColorSpace::MatrixID::BT2020_CL, color_space.matrix);
+
+ // Values 12 - 255 reserved. Though 12 at least is a valid value we should
+ // support in the future. https://crbug.com/854290
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.12", &profile, &level,
+ &color_space));
+
+ // Leading zeros are not allowed.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00.00", &profile,
+ &level, &color_space));
+ // Negative values are not allowed.
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00.-1", &profile,
+ &level, &color_space));
+
+ // Verify full range flag (boolean 0 or 1).
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00.0", &profile,
+ &level, &color_space));
+ EXPECT_EQ(gfx::ColorSpace::RangeID::LIMITED, color_space.range);
+ EXPECT_TRUE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00.1", &profile,
+ &level, &color_space));
+ EXPECT_EQ(gfx::ColorSpace::RangeID::FULL, color_space.range);
+ EXPECT_FALSE(ParseAv1CodecId("av01.0.00M.10.0.000.01.01.00.2", &profile,
+ &level, &color_space));
+}
+#endif // BUILDFLAG(ENABLE_AV1_DECODER)
+
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
TEST(ParseHEVCCodecIdTest, InvalidHEVCCodecIds) {
VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
diff --git a/chromium/media/base/video_decoder.cc b/chromium/media/base/video_decoder.cc
index 59929f5e641..8eeb18eb449 100644
--- a/chromium/media/base/video_decoder.cc
+++ b/chromium/media/base/video_decoder.cc
@@ -16,6 +16,10 @@ void VideoDecoder::Destroy() {
VideoDecoder::~VideoDecoder() = default;
+bool VideoDecoder::IsPlatformDecoder() const {
+ return false;
+}
+
bool VideoDecoder::NeedsBitstreamConversion() const {
return false;
}
diff --git a/chromium/media/base/video_decoder.h b/chromium/media/base/video_decoder.h
index 8271978675c..bbbd08945aa 100644
--- a/chromium/media/base/video_decoder.h
+++ b/chromium/media/base/video_decoder.h
@@ -48,6 +48,15 @@ class MEDIA_EXPORT VideoDecoder {
// name does not change across multiple constructions.
virtual std::string GetDisplayName() const = 0;
+ // Returns true if the implementation is expected to be implemented by the
+ // platform. The value should be available immediately after construction and
+ // should not change within the lifetime of a decoder instance. The value is
+ // used for logging and metrics recording.
+ //
+ // TODO(sandersd): Use this to decide when to switch to software decode for
+ // low-resolution videos. https://crbug.com/684792
+ virtual bool IsPlatformDecoder() const;
+
// Initializes a VideoDecoder with the given |config|, executing the
// |init_cb| upon completion. |output_cb| is called for each output frame
// decoded by Decode().
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index 8d879d7b549..a9f7d4ab0fc 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -48,7 +48,9 @@ VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
return kCodecDolbyVision;
case THEORAPROFILE_ANY:
return kCodecTheora;
- case AV1PROFILE_PROFILE0:
+ case AV1PROFILE_PROFILE_MAIN:
+ case AV1PROFILE_PROFILE_HIGH:
+ case AV1PROFILE_PROFILE_PRO:
return kCodecAV1;
}
NOTREACHED();
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index b68ba405ce2..54a59242e50 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -17,6 +17,7 @@
#include "base/strings/stringprintf.h"
#include "build/build_config.h"
#include "media/base/limits.h"
+#include "media/base/scopedfd_helper.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_util.h"
#include "ui/gfx/geometry/point.h"
@@ -240,7 +241,40 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalData(
base::TimeDelta timestamp) {
return WrapExternalStorage(format, STORAGE_UNOWNED_MEMORY, coded_size,
visible_rect, natural_size, data, data_size,
- timestamp, base::SharedMemoryHandle(), 0);
+ timestamp, nullptr, nullptr,
+ base::SharedMemoryHandle(), 0);
+}
+
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalReadOnlySharedMemory(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::ReadOnlySharedMemoryRegion* region,
+ size_t data_offset,
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
+ natural_size, data, data_size, timestamp, region,
+ nullptr, base::SharedMemoryHandle(), data_offset);
+}
+
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalUnsafeSharedMemory(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::UnsafeSharedMemoryRegion* region,
+ size_t data_offset,
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
+ natural_size, data, data_size, timestamp, nullptr,
+ region, base::SharedMemoryHandle(), data_offset);
}
// static
@@ -255,8 +289,8 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
size_t data_offset,
base::TimeDelta timestamp) {
return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
- natural_size, data, data_size, timestamp, handle,
- data_offset);
+ natural_size, data, data_size, timestamp, nullptr,
+ nullptr, handle, data_offset);
}
// static
@@ -340,7 +374,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const std::vector<int>& dmabuf_fds,
+ std::vector<base::ScopedFD> dmabuf_fds,
base::TimeDelta timestamp) {
const StorageType storage = STORAGE_DMABUFS;
if (!IsValidConfig(format, storage, coded_size, visible_rect, natural_size)) {
@@ -350,14 +384,23 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
return nullptr;
}
+ if (dmabuf_fds.empty() || dmabuf_fds.size() > NumPlanes(format)) {
+ LOG(DFATAL) << __func__ << " Incorrect number of dmabuf fds provided, got: "
+ << dmabuf_fds.size() << ", expected 1 to " << NumPlanes(format);
+ return nullptr;
+ }
+
gpu::MailboxHolder mailbox_holders[kMaxPlanes];
scoped_refptr<VideoFrame> frame =
new VideoFrame(format, storage, coded_size, visible_rect, natural_size,
mailbox_holders, ReleaseMailboxCB(), timestamp);
- if (!frame || !frame->DuplicateFileDescriptors(dmabuf_fds)) {
- LOG(DFATAL) << __func__ << " Couldn't duplicate fds.";
+ if (!frame) {
+ LOG(DFATAL) << __func__ << " Couldn't create VideoFrame instance.";
return nullptr;
}
+
+ frame->dmabuf_fds_ = std::move(dmabuf_fds);
+
return frame;
}
#endif
@@ -448,18 +491,28 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
#if defined(OS_LINUX)
// If there are any |dmabuf_fds_| plugged in, we should duplicate them.
if (frame->storage_type() == STORAGE_DMABUFS) {
- std::vector<int> original_fds;
- for (size_t i = 0; i < kMaxPlanes; ++i)
- original_fds.push_back(frame->DmabufFd(i));
- if (!wrapping_frame->DuplicateFileDescriptors(original_fds)) {
+ wrapping_frame->dmabuf_fds_ = DuplicateFDs(frame->dmabuf_fds_);
+ if (wrapping_frame->dmabuf_fds_.empty()) {
LOG(DFATAL) << __func__ << " Couldn't duplicate fds.";
return nullptr;
}
}
#endif
- if (frame->storage_type() == STORAGE_SHMEM)
- wrapping_frame->AddSharedMemoryHandle(frame->shared_memory_handle_);
+ if (frame->storage_type() == STORAGE_SHMEM) {
+ if (frame->read_only_shared_memory_region_) {
+ DCHECK(frame->read_only_shared_memory_region_->IsValid());
+ wrapping_frame->AddReadOnlySharedMemoryRegion(
+ frame->read_only_shared_memory_region_);
+ } else if (frame->unsafe_shared_memory_region_) {
+ DCHECK(frame->unsafe_shared_memory_region_->IsValid());
+ wrapping_frame->AddUnsafeSharedMemoryRegion(
+ frame->unsafe_shared_memory_region_);
+ } else {
+ DCHECK(frame->shared_memory_handle_.IsValid());
+ wrapping_frame->AddSharedMemoryHandle(frame->shared_memory_handle_);
+ }
+ }
return wrapping_frame;
}
@@ -775,6 +828,22 @@ VideoFrame::mailbox_holder(size_t texture_index) const {
return mailbox_holders_[texture_index];
}
+base::ReadOnlySharedMemoryRegion* VideoFrame::read_only_shared_memory_region()
+ const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(read_only_shared_memory_region_ &&
+ read_only_shared_memory_region_->IsValid());
+ return read_only_shared_memory_region_;
+}
+
+base::UnsafeSharedMemoryRegion* VideoFrame::unsafe_shared_memory_region()
+ const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(unsafe_shared_memory_region_ &&
+ unsafe_shared_memory_region_->IsValid());
+ return unsafe_shared_memory_region_;
+}
+
base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
DCHECK_EQ(storage_type_, STORAGE_SHMEM);
DCHECK(shared_memory_handle_.IsValid());
@@ -783,47 +852,45 @@ base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
size_t VideoFrame::shared_memory_offset() const {
DCHECK_EQ(storage_type_, STORAGE_SHMEM);
- DCHECK(shared_memory_handle_.IsValid());
+ DCHECK((read_only_shared_memory_region_ &&
+ read_only_shared_memory_region_->IsValid()) ||
+ (unsafe_shared_memory_region_ &&
+ unsafe_shared_memory_region_->IsValid()) ||
+ shared_memory_handle_.IsValid());
return shared_memory_offset_;
}
#if defined(OS_LINUX)
-int VideoFrame::DmabufFd(size_t plane) const {
+std::vector<int> VideoFrame::DmabufFds() const {
DCHECK_EQ(storage_type_, STORAGE_DMABUFS);
- DCHECK(IsValidPlane(plane, format_));
- return dmabuf_fds_[plane].get();
-}
+ std::vector<int> ret;
-bool VideoFrame::DuplicateFileDescriptors(const std::vector<int>& in_fds) {
- // TODO(mcasas): Support offsets for e.g. multiplanar inside a single |in_fd|.
+ for (auto& fd : dmabuf_fds_)
+ ret.emplace_back(fd.get());
- storage_type_ = STORAGE_DMABUFS;
- // TODO(posciak): This is not exactly correct, it's possible for one
- // buffer to contain more than one plane.
- if (in_fds.size() != NumPlanes(format_)) {
- LOG(FATAL) << "Not enough dmabuf fds provided, got: " << in_fds.size()
- << ", expected: " << NumPlanes(format_);
- return false;
- }
+ return ret;
+}
+#endif
- // Make sure that all fds are closed if any dup() fails,
- base::ScopedFD temp_dmabuf_fds[kMaxPlanes];
- for (size_t i = 0; i < in_fds.size(); ++i) {
- temp_dmabuf_fds[i] = base::ScopedFD(HANDLE_EINTR(dup(in_fds[i])));
- if (!temp_dmabuf_fds[i].is_valid()) {
- DPLOG(ERROR) << "Failed duplicating a dmabuf fd";
- return false;
- }
- }
- for (size_t i = 0; i < kMaxPlanes; ++i)
- dmabuf_fds_[i] = std::move(temp_dmabuf_fds[i]);
+void VideoFrame::AddReadOnlySharedMemoryRegion(
+ base::ReadOnlySharedMemoryRegion* region) {
+ storage_type_ = STORAGE_SHMEM;
+ DCHECK(SharedMemoryUninitialized());
+ DCHECK(region && region->IsValid());
+ read_only_shared_memory_region_ = region;
+}
- return true;
+void VideoFrame::AddUnsafeSharedMemoryRegion(
+ base::UnsafeSharedMemoryRegion* region) {
+ storage_type_ = STORAGE_SHMEM;
+ DCHECK(SharedMemoryUninitialized());
+ DCHECK(region && region->IsValid());
+ unsafe_shared_memory_region_ = region;
}
-#endif
void VideoFrame::AddSharedMemoryHandle(base::SharedMemoryHandle handle) {
storage_type_ = STORAGE_SHMEM;
+ DCHECK(SharedMemoryUninitialized());
shared_memory_handle_ = handle;
}
@@ -921,6 +988,8 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
uint8_t* data,
size_t data_size,
base::TimeDelta timestamp,
+ base::ReadOnlySharedMemoryRegion* read_only_region,
+ base::UnsafeSharedMemoryRegion* unsafe_region,
base::SharedMemoryHandle handle,
size_t data_offset) {
DCHECK(IsStorageTypeMappable(storage_type));
@@ -945,8 +1014,15 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
scoped_refptr<VideoFrame> frame;
if (storage_type == STORAGE_SHMEM) {
- frame = new VideoFrame(format, storage_type, coded_size, visible_rect,
- natural_size, timestamp, handle, data_offset);
+ if (read_only_region || unsafe_region) {
+ DCHECK(!handle.IsValid());
+ frame = new VideoFrame(format, storage_type, coded_size, visible_rect,
+ natural_size, timestamp, read_only_region,
+ unsafe_region, data_offset);
+ } else {
+ frame = new VideoFrame(format, storage_type, coded_size, visible_rect,
+ natural_size, timestamp, handle, data_offset);
+ }
} else {
frame = new VideoFrame(format, storage_type, coded_size, visible_rect,
natural_size, timestamp);
@@ -1066,6 +1142,35 @@ VideoFrame::VideoFrame(VideoPixelFormat format,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp,
+ base::ReadOnlySharedMemoryRegion* read_only_region,
+ base::UnsafeSharedMemoryRegion* unsafe_region,
+ size_t shared_memory_offset)
+ : VideoFrame(format,
+ storage_type,
+ coded_size,
+ visible_rect,
+ natural_size,
+ timestamp) {
+ DCHECK_EQ(storage_type, STORAGE_SHMEM);
+ DCHECK_EQ(bool(read_only_region) ^ bool(unsafe_region), 1)
+ << "Expected exactly one read-only or unsafe region for STORAGE_SHMEM "
+ "VideoFrame";
+ if (read_only_region) {
+ read_only_shared_memory_region_ = read_only_region;
+ DCHECK(read_only_shared_memory_region_->IsValid());
+ } else if (unsafe_region) {
+ unsafe_shared_memory_region_ = unsafe_region;
+ DCHECK(unsafe_shared_memory_region_->IsValid());
+ }
+ shared_memory_offset_ = shared_memory_offset;
+}
+
+VideoFrame::VideoFrame(VideoPixelFormat format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
base::SharedMemoryHandle handle,
size_t shared_memory_offset)
: VideoFrame(format,
@@ -1124,6 +1229,11 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrameInternal(
return frame;
}
+bool VideoFrame::SharedMemoryUninitialized() {
+ return !read_only_shared_memory_region_ && !unsafe_shared_memory_region_ &&
+ !shared_memory_handle_.IsValid();
+}
+
// static
gfx::Size VideoFrame::SampleSize(VideoPixelFormat format, size_t plane) {
DCHECK(IsValidPlane(plane, format));
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index ea98a28b736..d70e19cc7a0 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -16,7 +16,10 @@
#include "base/macros.h"
#include "base/md5.h"
#include "base/memory/aligned_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/memory/shared_memory.h"
+#include "base/memory/shared_memory_handle.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/synchronization/lock.h"
#include "build/build_config.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
@@ -147,7 +150,38 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
size_t data_size,
base::TimeDelta timestamp);
- // Same as WrapExternalData() with SharedMemoryHandle and its offset.
+ // Same as WrapExternalData() with a ReadOnlySharedMemoryRegion and its
+ // offset. Neither |region| nor |data| are owned by this VideoFrame. The
+ // region and mapping which back |data| must outlive this instance; a
+ // destruction observer can be used in this case.
+ static scoped_refptr<VideoFrame> WrapExternalReadOnlySharedMemory(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::ReadOnlySharedMemoryRegion* region,
+ size_t shared_memory_offset,
+ base::TimeDelta timestamp);
+
+ // Same as WrapExternalData() with a UnsafeSharedMemoryRegion and its
+ // offset. Neither |region| nor |data| are owned by this VideoFrame. The owner
+ // of the region and mapping which back |data| must outlive this instance; a
+ // destruction observer can be used in this case.
+ static scoped_refptr<VideoFrame> WrapExternalUnsafeSharedMemory(
+ VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8_t* data,
+ size_t data_size,
+ base::UnsafeSharedMemoryRegion* region,
+ size_t shared_memory_offset,
+ base::TimeDelta timestamp);
+
+ // Legacy wrapping of old SharedMemoryHandle objects. Deprecated, use one of
+ // the shared memory region wrappers above instead.
static scoped_refptr<VideoFrame> WrapExternalSharedMemory(
VideoPixelFormat format,
const gfx::Size& coded_size,
@@ -194,10 +228,10 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
#if defined(OS_LINUX)
// Wraps provided dmabufs
// (https://www.kernel.org/doc/html/latest/driver-api/dma-buf.html) with a
- // VideoFrame. The dmabuf fds are dup()ed on creation, so that the VideoFrame
- // retains a reference to them, and are automatically close()d on destruction,
- // dropping the reference. The caller may safely close() its reference after
- // calling WrapExternalDmabufs().
+ // VideoFrame. The frame will take ownership of |dmabuf_fds|, and will
+ // automatically close() them on destruction. Callers can call
+ // media::DuplicateFDs() if they need to retain a copy of the FDs for
+ // themselves. Note that the FDs are consumed even in case of failure.
// The image data is only accessible via dmabuf fds, which are usually passed
// directly to a hardware device and/or to another process, or can also be
// mapped via mmap() for CPU access.
@@ -207,7 +241,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const std::vector<int>& dmabuf_fds,
+ std::vector<base::ScopedFD> dmabuf_fds,
base::TimeDelta timestamp);
#endif
@@ -341,22 +375,33 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// mailbox, the caller must wait for the included sync point.
const gpu::MailboxHolder& mailbox_holder(size_t texture_index) const;
- // Returns the shared-memory handle, if present
+ // Returns a pointer to the read-only shared-memory region, if present.
+ base::ReadOnlySharedMemoryRegion* read_only_shared_memory_region() const;
+
+ // Returns a pointer to the unsafe shared memory handle, if present.
+ base::UnsafeSharedMemoryRegion* unsafe_shared_memory_region() const;
+
+ // Retuns the legacy SharedMemoryHandle, if present.
base::SharedMemoryHandle shared_memory_handle() const;
// Returns the offset into the shared memory where the frame data begins.
size_t shared_memory_offset() const;
#if defined(OS_LINUX)
- // Returns backing DmaBuf file descriptor for given |plane|, if present, or
- // -1 if not.
- int DmabufFd(size_t plane) const;
-
- // Duplicates internally the |fds_in|, overwriting the current ones. Returns
- // false if something goes wrong, and leaves all internal fds closed.
- bool DuplicateFileDescriptors(const std::vector<int>& fds_in);
+ // Return a vector containing the backing DmaBufs for this frame. The number
+ // of returned DmaBufs will be equal or less than the number of planes of
+ // the frame. If there are less, this means that the last FD contains the
+ // remaining planes.
+ // Note that the returned FDs are still owned by the VideoFrame. This means
+ // that the caller shall not close them, or use them after the VideoFrame is
+ // destroyed.
+ std::vector<int> DmabufFds() const;
#endif
+ void AddReadOnlySharedMemoryRegion(base::ReadOnlySharedMemoryRegion* region);
+ void AddUnsafeSharedMemoryRegion(base::UnsafeSharedMemoryRegion* region);
+
+ // Legacy, use one of the Add*SharedMemoryRegion methods above instead.
void AddSharedMemoryHandle(base::SharedMemoryHandle handle);
#if defined(OS_MACOSX)
@@ -457,6 +502,15 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp,
+ base::ReadOnlySharedMemoryRegion* read_only_region,
+ base::UnsafeSharedMemoryRegion* unsafe_region,
+ size_t shared_memory_offset);
+ VideoFrame(VideoPixelFormat format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
base::SharedMemoryHandle handle,
size_t shared_memory_offset);
VideoFrame(VideoPixelFormat format,
@@ -477,6 +531,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
uint8_t* data,
size_t data_size,
base::TimeDelta timestamp,
+ base::ReadOnlySharedMemoryRegion* read_only_region,
+ base::UnsafeSharedMemoryRegion* unsafe_region,
base::SharedMemoryHandle handle,
size_t data_offset);
@@ -488,6 +544,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp,
bool zero_initialize_memory);
+ bool SharedMemoryUninitialized();
+
// Returns the pixel size of each subsample for a given |plane| and |format|.
// E.g. 2x2 for the U-plane in PIXEL_FORMAT_I420.
static gfx::Size SampleSize(VideoPixelFormat format, size_t plane);
@@ -535,14 +593,24 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
gpu::MailboxHolder mailbox_holders_[kMaxPlanes];
ReleaseMailboxCB mailbox_holders_release_cb_;
- // Shared memory handle and associated offset inside it, if this frame is
- // a STORAGE_SHMEM one.
+ // Shared memory handle and associated offset inside it, if this frame is a
+ // STORAGE_SHMEM one. Pointers to unowned shared memory regions. At most one
+ // of the memory regions will be set.
+ base::ReadOnlySharedMemoryRegion* read_only_shared_memory_region_ = nullptr;
+ base::UnsafeSharedMemoryRegion* unsafe_shared_memory_region_ = nullptr;
+
+ // Legacy handle.
base::SharedMemoryHandle shared_memory_handle_;
+
+ // If this is a STORAGE_SHMEM frame, the offset of the data within the shared
+ // memory.
size_t shared_memory_offset_;
#if defined(OS_LINUX)
- // Dmabufs for each plane. If set, this frame has DmaBuf backing in some way.
- base::ScopedFD dmabuf_fds_[kMaxPlanes];
+ // Dmabufs for the frame, used when storage is STORAGE_DMABUFS. Size is either
+ // equal or less than the number of planes of the frame. If it is less, then
+ // the memory area represented by the last FD contains the remaining planes.
+ std::vector<base::ScopedFD> dmabuf_fds_;
#endif
#if defined(OS_MACOSX)
diff --git a/chromium/media/base/video_frame_layout.cc b/chromium/media/base/video_frame_layout.cc
new file mode 100644
index 00000000000..43ac1a0964b
--- /dev/null
+++ b/chromium/media/base/video_frame_layout.cc
@@ -0,0 +1,57 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/video_frame_layout.h"
+
+#include <numeric>
+#include <sstream>
+
+namespace {
+
+template <class T>
+std::string VectorToString(const std::vector<T>& vec) {
+ std::ostringstream result;
+ std::string delim;
+ result << "[";
+ for (auto v : vec) {
+ result << delim << v;
+ if (delim.size() == 0)
+ delim = ", ";
+ }
+ result << "]";
+ return result.str();
+}
+
+} // namespace
+
+namespace media {
+
+VideoFrameLayout::VideoFrameLayout(VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<int32_t> strides,
+ std::vector<size_t> buffer_sizes)
+ : format_(format),
+ coded_size_(coded_size),
+ strides_(std::move(strides)),
+ buffer_sizes_(std::move(buffer_sizes)) {}
+
+VideoFrameLayout::VideoFrameLayout(const VideoFrameLayout& layout) = default;
+VideoFrameLayout::~VideoFrameLayout() = default;
+
+size_t VideoFrameLayout::GetTotalBufferSize() const {
+ return std::accumulate(buffer_sizes_.begin(), buffer_sizes_.end(), 0u);
+}
+
+std::string VideoFrameLayout::ToString() const {
+ std::ostringstream s;
+ s << "VideoFrameLayout format:" << VideoPixelFormatToString(format_)
+ << " coded_size:" << coded_size_.ToString()
+ << " num_buffers:" << num_buffers()
+ << " buffer_sizes:" << VectorToString(buffer_sizes_)
+ << " num_strides:" << num_strides()
+ << " strides:" << VectorToString(strides_);
+ return s.str();
+}
+
+} // namespace media
diff --git a/chromium/media/base/video_frame_layout.h b/chromium/media/base/video_frame_layout.h
new file mode 100644
index 00000000000..4eafad51b01
--- /dev/null
+++ b/chromium/media/base/video_frame_layout.h
@@ -0,0 +1,88 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_VIDEO_FRAME_LAYOUT_H_
+#define MEDIA_BASE_VIDEO_FRAME_LAYOUT_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "media/base/media_export.h"
+#include "media/base/video_types.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+// A class to describes how physical buffer is allocated for video frame.
+// In stores format, coded size of the frame and size of physical buffers
+// which can be used to allocate buffer(s) hardware expected.
+// Also, it stores stride (bytes per line) per color plane to calculate each
+// color plane's size (note that a buffer may contains multiple color planes.)
+class MEDIA_EXPORT VideoFrameLayout {
+ public:
+ // Constructor with strides and buffers' size.
+ VideoFrameLayout(VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ std::vector<int32_t> strides = std::vector<int32_t>(),
+ std::vector<size_t> buffer_sizes = std::vector<size_t>());
+
+ VideoFrameLayout(const VideoFrameLayout& layout);
+
+ ~VideoFrameLayout();
+
+ VideoPixelFormat format() const { return format_; }
+ const gfx::Size& coded_size() const { return coded_size_; }
+
+ // Return number of buffers. Note that num_strides >= num_buffers.
+ size_t num_buffers() const { return buffer_sizes_.size(); }
+
+ // Returns number of strides. Note that num_strides >= num_buffers.
+ size_t num_strides() const { return strides_.size(); }
+
+ const std::vector<int32_t>& strides() const { return strides_; }
+ const std::vector<size_t>& buffer_sizes() const { return buffer_sizes_; }
+
+ // Sets strides.
+ void set_strides(std::vector<int32_t> strides) {
+ strides_ = std::move(strides);
+ }
+
+ // Sets buffer_sizes.
+ void set_buffer_sizes(std::vector<size_t> buffer_sizes) {
+ buffer_sizes_ = std::move(buffer_sizes);
+ }
+
+ // Returns sum of bytes of all buffers.
+ size_t GetTotalBufferSize() const;
+
+ // Composes VideoFrameLayout as human readable string.
+ std::string ToString() const;
+
+ private:
+ const VideoPixelFormat format_;
+
+ // Width and height of the video frame in pixels. This must include pixel
+ // data for the whole image; i.e. for YUV formats with subsampled chroma
+ // planes, in the case that the visible portion of the image does not line up
+ // on a sample boundary, |coded_size_| must be rounded up appropriately and
+ // the pixel data provided for the odd pixels.
+ const gfx::Size coded_size_;
+
+ // Vector of strides for each buffer, typically greater or equal to the
+ // width of the surface divided by the horizontal sampling period. Note that
+ // strides can be negative if the image layout is bottom-up.
+ std::vector<int32_t> strides_;
+
+ // Vector of sizes for each buffer, typically greater or equal to the area of
+ // |coded_size_|.
+ std::vector<size_t> buffer_sizes_;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_VIDEO_FRAME_LAYOUT_H_
diff --git a/chromium/media/base/video_frame_layout_unittest.cc b/chromium/media/base/video_frame_layout_unittest.cc
new file mode 100644
index 00000000000..b15707f0e6c
--- /dev/null
+++ b/chromium/media/base/video_frame_layout_unittest.cc
@@ -0,0 +1,113 @@
+// Copyright (c) 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/video_frame_layout.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+#include <utility>
+
+#include "media/base/video_types.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+TEST(VideoFrameLayout, Constructor) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384, 192, 192};
+ std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
+ VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+
+ EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout.coded_size(), coded_size);
+ EXPECT_EQ(layout.num_strides(), 3u);
+ EXPECT_EQ(layout.num_buffers(), 3u);
+ EXPECT_EQ(layout.GetTotalBufferSize(), 110592u);
+ for (size_t i = 0; i < 3; ++i) {
+ EXPECT_EQ(layout.strides()[i], strides[i]);
+ EXPECT_EQ(layout.buffer_sizes()[i], buffer_sizes[i]);
+ }
+}
+
+TEST(VideoFrameLayout, ConstructorNoStrideBufferSize) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size);
+
+ EXPECT_EQ(layout.format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout.coded_size(), coded_size);
+ EXPECT_EQ(layout.GetTotalBufferSize(), 0u);
+ EXPECT_EQ(layout.num_strides(), 0u);
+ EXPECT_EQ(layout.num_buffers(), 0u);
+}
+
+TEST(VideoFrameLayout, CopyConstructor) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384, 192, 192};
+ std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
+ VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+
+ VideoFrameLayout layout_copy(layout);
+
+ EXPECT_EQ(layout_copy.format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(layout_copy.coded_size(), coded_size);
+ EXPECT_EQ(layout_copy.num_strides(), 3u);
+ EXPECT_EQ(layout_copy.num_buffers(), 3u);
+ EXPECT_EQ(layout_copy.GetTotalBufferSize(), 110592u);
+ for (size_t i = 0; i < 3; ++i) {
+ EXPECT_EQ(layout_copy.strides()[i], strides[i]);
+ EXPECT_EQ(layout_copy.buffer_sizes()[i], buffer_sizes[i]);
+ }
+}
+
+TEST(VideoFrameLayout, ToString) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384, 192, 192};
+ std::vector<size_t> buffer_sizes = {73728, 18432, 18432};
+ VideoFrameLayout layout(PIXEL_FORMAT_I420, coded_size, strides, buffer_sizes);
+
+ EXPECT_EQ(layout.ToString(),
+ "VideoFrameLayout format:PIXEL_FORMAT_I420 coded_size:320x180 "
+ "num_buffers:3 buffer_sizes:[73728, 18432, 18432] num_strides:3 "
+ "strides:[384, 192, 192]");
+}
+
+TEST(VideoFrameLayout, ToStringOneBuffer) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ std::vector<int32_t> strides = {384};
+ std::vector<size_t> buffer_sizes = {122880};
+ VideoFrameLayout layout(PIXEL_FORMAT_NV12, coded_size, strides, buffer_sizes);
+
+ EXPECT_EQ(layout.ToString(),
+ "VideoFrameLayout format:PIXEL_FORMAT_NV12 coded_size:320x180 "
+ "num_buffers:1 buffer_sizes:[122880] num_strides:1 strides:[384]");
+}
+
+TEST(VideoFrameLayout, ToStringNoBufferInfo) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ VideoFrameLayout layout(PIXEL_FORMAT_NV12, coded_size);
+
+ EXPECT_EQ(layout.ToString(),
+ "VideoFrameLayout format:PIXEL_FORMAT_NV12 coded_size:320x180 "
+ "num_buffers:0 buffer_sizes:[] num_strides:0 strides:[]");
+}
+
+TEST(VideoFrameLayout, SetStrideBufferSize) {
+ gfx::Size coded_size = gfx::Size(320, 180);
+ VideoFrameLayout layout(PIXEL_FORMAT_NV12, coded_size);
+
+ std::vector<int32_t> strides = {384, 192, 192};
+ layout.set_strides(std::move(strides));
+ std::vector<size_t> buffer_sizes = {122880};
+ layout.set_buffer_sizes(std::move(buffer_sizes));
+
+ EXPECT_EQ(layout.ToString(),
+ "VideoFrameLayout format:PIXEL_FORMAT_NV12 coded_size:320x180 "
+ "num_buffers:1 buffer_sizes:[122880] num_strides:3 "
+ "strides:[384, 192, 192]");
+}
+
+} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index cabe173609a..7b4d2cb4973 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -134,6 +134,8 @@ class MEDIA_EXPORT VideoFrameMetadata {
PAGE_SCALE_FACTOR,
ROOT_SCROLL_OFFSET_X,
ROOT_SCROLL_OFFSET_Y,
+ TOP_CONTROLS_HEIGHT,
+ TOP_CONTROLS_SHOWN_RATIO,
NUM_KEYS
};
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index 8f11bbc76ad..7f1401fb908 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -13,11 +13,39 @@
#include "base/format_macros.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
+#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/strings/stringprintf.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/libyuv/include/libyuv.h"
+namespace {
+// Creates the backing storage for a frame suitable for WrapExternalData. Note
+// that this is currently used only to test frame creation and tear-down, and so
+// may not have alignment or other properties correct further video processing.
+// |memory| must be at least 2 * coded_size.width() * coded_size.height() in
+// bytes.
+void CreateTestY16Frame(const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ void* memory) {
+ const int offset_x = visible_rect.x();
+ const int offset_y = visible_rect.y();
+ const int stride = coded_size.width();
+
+ // In the visible rect, fill upper byte with [0-255] and lower with [255-0].
+ uint16_t* data = static_cast<uint16_t*>(memory);
+ for (int j = 0; j < visible_rect.height(); j++) {
+ for (int i = 0; i < visible_rect.width(); i++) {
+ const int value = i + j * visible_rect.width();
+ data[(stride * (j + offset_y)) + i + offset_x] =
+ ((value & 0xFF) << 8) | (~value & 0xFF);
+ }
+ }
+}
+} // namespace
+
namespace media {
using base::MD5DigestToBase16;
@@ -283,6 +311,82 @@ TEST(VideoFrame, WrapVideoFrame) {
EXPECT_TRUE(done_callback_was_run);
}
+// Create a frame that wraps unowned memory.
+TEST(VideoFrame, WrapExternalData) {
+ uint8_t memory[2 * 256 * 256];
+ gfx::Size coded_size(256, 256);
+ gfx::Rect visible_rect(coded_size);
+ CreateTestY16Frame(coded_size, visible_rect, memory);
+ auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto frame = VideoFrame::WrapExternalData(media::PIXEL_FORMAT_Y16, coded_size,
+ visible_rect, visible_rect.size(),
+ memory, sizeof(memory), timestamp);
+
+ EXPECT_EQ(frame->coded_size(), coded_size);
+ EXPECT_EQ(frame->visible_rect(), visible_rect);
+ EXPECT_EQ(frame->timestamp(), timestamp);
+ EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+}
+
+// Create a frame that wraps read-only shared memory.
+TEST(VideoFrame, WrapExternalReadOnlySharedMemory) {
+ const size_t kDataSize = 2 * 256 * 256;
+ auto mapped_region = base::ReadOnlySharedMemoryRegion::Create(kDataSize);
+ gfx::Size coded_size(256, 256);
+ gfx::Rect visible_rect(coded_size);
+ CreateTestY16Frame(coded_size, visible_rect, mapped_region.mapping.memory());
+ auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto frame = VideoFrame::WrapExternalReadOnlySharedMemory(
+ media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
+ static_cast<uint8_t*>(mapped_region.mapping.memory()), kDataSize,
+ &mapped_region.region, 0, timestamp);
+
+ EXPECT_EQ(frame->coded_size(), coded_size);
+ EXPECT_EQ(frame->visible_rect(), visible_rect);
+ EXPECT_EQ(frame->timestamp(), timestamp);
+ EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+}
+
+// Create a frame that wraps unsafe shared memory.
+TEST(VideoFrame, WrapExternalUnsafeSharedMemory) {
+ const size_t kDataSize = 2 * 256 * 256;
+ auto region = base::UnsafeSharedMemoryRegion::Create(kDataSize);
+ auto mapping = region.Map();
+ gfx::Size coded_size(256, 256);
+ gfx::Rect visible_rect(coded_size);
+ CreateTestY16Frame(coded_size, visible_rect, mapping.memory());
+ auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto frame = VideoFrame::WrapExternalUnsafeSharedMemory(
+ media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
+ static_cast<uint8_t*>(mapping.memory()), kDataSize, &region, 0,
+ timestamp);
+
+ EXPECT_EQ(frame->coded_size(), coded_size);
+ EXPECT_EQ(frame->visible_rect(), visible_rect);
+ EXPECT_EQ(frame->timestamp(), timestamp);
+ EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+}
+
+// Create a frame that wraps a legacy shared memory handle.
+TEST(VideoFrame, WrapExternalSharedMemory) {
+ const size_t kDataSize = 2 * 256 * 256;
+ base::SharedMemory shm;
+ ASSERT_TRUE(shm.CreateAndMapAnonymous(kDataSize));
+ gfx::Size coded_size(256, 256);
+ gfx::Rect visible_rect(coded_size);
+ CreateTestY16Frame(coded_size, visible_rect, shm.memory());
+ auto timestamp = base::TimeDelta::FromMilliseconds(1);
+ auto frame = VideoFrame::WrapExternalSharedMemory(
+ media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
+ static_cast<uint8_t*>(shm.memory()), kDataSize, shm.handle(), 0,
+ timestamp);
+
+ EXPECT_EQ(frame->coded_size(), coded_size);
+ EXPECT_EQ(frame->visible_rect(), visible_rect);
+ EXPECT_EQ(frame->timestamp(), timestamp);
+ EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
+}
+
// Ensure each frame is properly sized and allocated. Will trigger OOB reads
// and writes as well as incorrect frame hashes otherwise.
TEST(VideoFrame, CheckFrameExtents) {
diff --git a/chromium/media/blink/BUILD.gn b/chromium/media/blink/BUILD.gn
index ce6a5393312..42535b55689 100644
--- a/chromium/media/blink/BUILD.gn
+++ b/chromium/media/blink/BUILD.gn
@@ -45,6 +45,8 @@ component("blink") {
"video_decode_stats_reporter.h",
"video_frame_compositor.cc",
"video_frame_compositor.h",
+ "watch_time_component.cc",
+ "watch_time_component.h",
"watch_time_reporter.cc",
"watch_time_reporter.h",
"webaudiosourceprovider_impl.cc",
@@ -115,7 +117,7 @@ test("media_blink_unittests") {
"//media:test_support",
"//media/mojo/interfaces",
"//media/mojo/services",
- "//mojo/edk",
+ "//mojo/core/embedder",
"//net",
"//testing/gmock",
"//testing/gtest",
@@ -156,8 +158,10 @@ test("media_blink_unittests") {
"url_index_unittest.cc",
"video_decode_stats_reporter_unittest.cc",
"video_frame_compositor_unittest.cc",
+ "watch_time_component_unittest.cc",
"watch_time_reporter_unittest.cc",
"webaudiosourceprovider_impl_unittest.cc",
+ "webmediacapabilitiesclient_impl_unittest.cc",
"webmediaplayer_impl_unittest.cc",
]
@@ -168,8 +172,5 @@ test("media_blink_unittests") {
} else {
deps += [ "//v8:v8_external_startup_data_assets" ]
}
-
- # KeySystemConfigSelectorTest fails on Android (crbug.com/608541).
- sources -= [ "key_system_config_selector_unittest.cc" ]
}
}
diff --git a/chromium/media/blink/DEPS b/chromium/media/blink/DEPS
index ac48ebad4be..efda6347518 100644
--- a/chromium/media/blink/DEPS
+++ b/chromium/media/blink/DEPS
@@ -1,14 +1,14 @@
include_rules = [
"+cc/layers/layer.h",
+ "+cc/layers/surface_layer.h",
"+cc/layers/video_frame_provider.h",
"+cc/layers/video_layer.h",
"+components/scheduler", # Only allowed in tests.
"+components/viz/common/gpu/context_provider.h",
- "+components/viz/common/resources/shared_bitmap_manager.h",
"+components/viz/common/surfaces/frame_sink_id.h",
"+gin",
"+media",
- "+mojo/edk/embedder",
+ "+mojo/core/embedder",
"+mojo/public/cpp/bindings",
"+net/base",
"+net/http",
diff --git a/chromium/media/blink/key_system_config_selector.cc b/chromium/media/blink/key_system_config_selector.cc
index 127c1bb714f..63e13bf1c92 100644
--- a/chromium/media/blink/key_system_config_selector.cc
+++ b/chromium/media/blink/key_system_config_selector.cc
@@ -29,10 +29,12 @@ namespace media {
using EmeFeatureRequirement =
blink::WebMediaKeySystemConfiguration::Requirement;
+using EmeEncryptionScheme =
+ blink::WebMediaKeySystemMediaCapability::EncryptionScheme;
namespace {
-static EmeConfigRule GetSessionTypeConfigRule(EmeSessionTypeSupport support) {
+EmeConfigRule GetSessionTypeConfigRule(EmeSessionTypeSupport support) {
switch (support) {
case EmeSessionTypeSupport::INVALID:
NOTREACHED();
@@ -48,7 +50,7 @@ static EmeConfigRule GetSessionTypeConfigRule(EmeSessionTypeSupport support) {
return EmeConfigRule::NOT_SUPPORTED;
}
-static EmeConfigRule GetDistinctiveIdentifierConfigRule(
+EmeConfigRule GetDistinctiveIdentifierConfigRule(
EmeFeatureSupport support,
EmeFeatureRequirement requirement) {
if (support == EmeFeatureSupport::INVALID) {
@@ -88,9 +90,8 @@ static EmeConfigRule GetDistinctiveIdentifierConfigRule(
return EmeConfigRule::IDENTIFIER_REQUIRED;
}
-static EmeConfigRule GetPersistentStateConfigRule(
- EmeFeatureSupport support,
- EmeFeatureRequirement requirement) {
+EmeConfigRule GetPersistentStateConfigRule(EmeFeatureSupport support,
+ EmeFeatureRequirement requirement) {
if (support == EmeFeatureSupport::INVALID) {
NOTREACHED();
return EmeConfigRule::NOT_SUPPORTED;
@@ -131,14 +132,13 @@ static EmeConfigRule GetPersistentStateConfigRule(
return EmeConfigRule::PERSISTENCE_REQUIRED;
}
-static bool IsPersistentSessionType(
- blink::WebEncryptedMediaSessionType sessionType) {
+bool IsPersistentSessionType(blink::WebEncryptedMediaSessionType sessionType) {
switch (sessionType) {
case blink::WebEncryptedMediaSessionType::kTemporary:
return false;
case blink::WebEncryptedMediaSessionType::kPersistentLicense:
return true;
- case blink::WebEncryptedMediaSessionType::kPersistentReleaseMessage:
+ case blink::WebEncryptedMediaSessionType::kPersistentUsageRecord:
return true;
case blink::WebEncryptedMediaSessionType::kUnknown:
break;
@@ -148,6 +148,25 @@ static bool IsPersistentSessionType(
return false;
}
+bool IsSupportedMediaType(const std::string& container_mime_type,
+ const std::string& codecs,
+ bool use_aes_decryptor) {
+ DVLOG(3) << __func__ << ": container_mime_type=" << container_mime_type
+ << ", codecs=" << codecs
+ << ", use_aes_decryptor=" << use_aes_decryptor;
+
+ std::vector<std::string> codec_vector;
+ SplitCodecsToVector(codecs, &codec_vector, false);
+
+ // AesDecryptor decrypts the stream in the demuxer before it reaches the
+ // decoder so check whether the media format is supported when clear.
+ SupportsType support_result =
+ use_aes_decryptor
+ ? IsSupportedMediaFormat(container_mime_type, codec_vector)
+ : IsSupportedEncryptedMediaFormat(container_mime_type, codec_vector);
+ return (support_result == IsSupported);
+}
+
} // namespace
struct KeySystemConfigSelector::SelectionRequest {
@@ -286,6 +305,7 @@ KeySystemConfigSelector::KeySystemConfigSelector(
MediaPermission* media_permission)
: key_systems_(key_systems),
media_permission_(media_permission),
+ is_supported_media_type_cb_(base::BindRepeating(&IsSupportedMediaType)),
weak_factory_(this) {
DCHECK(key_systems_);
DCHECK(media_permission_);
@@ -293,24 +313,6 @@ KeySystemConfigSelector::KeySystemConfigSelector(
KeySystemConfigSelector::~KeySystemConfigSelector() = default;
-bool IsSupportedMediaFormat(const std::string& container_mime_type,
- const std::string& codecs,
- bool use_aes_decryptor) {
- DVLOG(3) << __func__ << ": container_mime_type=" << container_mime_type
- << ", codecs=" << codecs
- << ", use_aes_decryptor=" << use_aes_decryptor;
-
- std::vector<std::string> codec_vector;
- SplitCodecsToVector(codecs, &codec_vector, false);
- // AesDecryptor decrypts the stream in the demuxer before it reaches the
- // decoder so check whether the media format is supported when clear.
- SupportsType support_result =
- use_aes_decryptor
- ? IsSupportedMediaFormat(container_mime_type, codec_vector)
- : IsSupportedEncryptedMediaFormat(container_mime_type, codec_vector);
- return (support_result == IsSupported);
-}
-
// TODO(sandersd): Move contentType parsing from Blink to here so that invalid
// parameters can be rejected. http://crbug.com/449690, http://crbug.com/690131
bool KeySystemConfigSelector::IsSupportedContentType(
@@ -337,8 +339,9 @@ bool KeySystemConfigSelector::IsSupportedContentType(
// is done primarily to validate extended codecs, but it also ensures that the
// CDM cannot support codecs that Chrome does not (which could complicate the
// robustness algorithm).
- if (!IsSupportedMediaFormat(container_lower, codecs,
- CanUseAesDecryptor(key_system))) {
+ if (!is_supported_media_type_cb_.Run(
+ container_lower, codecs,
+ key_systems_->CanUseAesDecryptor(key_system))) {
DVLOG(3) << "Container mime type and codecs are not supported";
return false;
}
@@ -359,6 +362,31 @@ bool KeySystemConfigSelector::IsSupportedContentType(
return true;
}
+EmeConfigRule KeySystemConfigSelector::GetEncryptionSchemeConfigRule(
+ const std::string& key_system,
+ const EmeEncryptionScheme encryption_scheme) {
+ switch (encryption_scheme) {
+ // https://github.com/WICG/encrypted-media-encryption-scheme/blob/master/explainer.md
+ // "A missing or null value indicates that any encryption scheme is
+ // acceptable."
+ // To fully implement this, we need to get the config rules for both kCenc
+ // and kCbcs, which could be conflicting, and choose a final config rule
+ // somehow. If we end up choosing the rule for kCbcs, we could actually
+ // break legacy players which serves kCenc streams. Therefore, for backward
+ // compatibility and simplicity, we treat kNotSpecified the same as kCenc.
+ case EmeEncryptionScheme::kNotSpecified:
+ case EmeEncryptionScheme::kCenc:
+ return key_systems_->GetEncryptionSchemeConfigRule(key_system,
+ EncryptionMode::kCenc);
+ case EmeEncryptionScheme::kCbcs:
+ return key_systems_->GetEncryptionSchemeConfigRule(key_system,
+ EncryptionMode::kCbcs);
+ }
+
+ NOTREACHED();
+ return EmeConfigRule::NOT_SUPPORTED;
+}
+
bool KeySystemConfigSelector::GetSupportedCapabilities(
const std::string& key_system,
EmeMediaType media_type,
@@ -425,13 +453,23 @@ bool KeySystemConfigSelector::GetSupportedCapabilities(
DVLOG(3) << "The current robustness rule is not supported.";
continue;
}
+ proposed_config_state.AddRule(robustness_rule);
+
+ // Check for encryption scheme support.
+ // https://github.com/WICG/encrypted-media-encryption-scheme/blob/master/explainer.md.
+ EmeConfigRule encryption_scheme_rule =
+ GetEncryptionSchemeConfigRule(key_system, capability.encryption_scheme);
+ if (!proposed_config_state.IsRuleSupported(encryption_scheme_rule)) {
+ DVLOG(3) << "The current encryption scheme rule is not supported.";
+ continue;
+ }
// 3.13.1. Add requested media capability to supported media capabilities.
supported_media_capabilities->push_back(capability);
// 3.13.2. Add requested media capability to the {audio|video}Capabilities
// member of local accumulated configuration.
- proposed_config_state.AddRule(robustness_rule);
+ proposed_config_state.AddRule(encryption_scheme_rule);
// This is used as an intermediate variable so that |proposed_config_state|
// is updated in the next iteration of the for loop.
@@ -633,10 +671,9 @@ KeySystemConfigSelector::GetSupportedConfiguration(
session_type_rule = GetSessionTypeConfigRule(
key_systems_->GetPersistentLicenseSessionSupport(key_system));
break;
- case blink::WebEncryptedMediaSessionType::kPersistentReleaseMessage:
+ case blink::WebEncryptedMediaSessionType::kPersistentUsageRecord:
session_type_rule = GetSessionTypeConfigRule(
- key_systems_->GetPersistentReleaseMessageSessionSupport(
- key_system));
+ key_systems_->GetPersistentUsageRecordSessionSupport(key_system));
break;
}
diff --git a/chromium/media/blink/key_system_config_selector.h b/chromium/media/blink/key_system_config_selector.h
index 83bc6680635..f0dcd5df9f4 100644
--- a/chromium/media/blink/key_system_config_selector.h
+++ b/chromium/media/blink/key_system_config_selector.h
@@ -15,12 +15,12 @@
#include "base/memory/weak_ptr.h"
#include "media/base/eme_constants.h"
#include "media/blink/media_blink_export.h"
+#include "third_party/blink/public/platform/web_media_key_system_media_capability.h"
#include "third_party/blink/public/platform/web_vector.h"
namespace blink {
struct WebMediaKeySystemConfiguration;
-struct WebMediaKeySystemMediaCapability;
class WebString;
} // namespace blink
@@ -46,6 +46,15 @@ class MEDIA_BLINK_EXPORT KeySystemConfigSelector {
const CdmConfig&)> succeeded_cb,
base::Closure not_supported_cb);
+ using IsSupportedMediaTypeCB =
+ base::RepeatingCallback<bool(const std::string& container_mime_type,
+ const std::string& codecs,
+ bool use_aes_decryptor)>;
+
+ void SetIsSupportedMediaTypeCBForTesting(IsSupportedMediaTypeCB cb) {
+ is_supported_media_type_cb_ = std::move(cb);
+ }
+
private:
struct SelectionRequest;
class ConfigState;
@@ -82,8 +91,18 @@ class MEDIA_BLINK_EXPORT KeySystemConfigSelector {
const std::string& codecs,
ConfigState* config_state);
+ EmeConfigRule GetEncryptionSchemeConfigRule(
+ const std::string& key_system,
+ const blink::WebMediaKeySystemMediaCapability::EncryptionScheme
+ encryption_scheme);
+
const KeySystems* key_systems_;
MediaPermission* media_permission_;
+
+ // A callback used to check whether a media type is supported. Only set in
+ // tests. If null the implementation will check the support using MimeUtil.
+ IsSupportedMediaTypeCB is_supported_media_type_cb_;
+
base::WeakPtrFactory<KeySystemConfigSelector> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(KeySystemConfigSelector);
diff --git a/chromium/media/blink/key_system_config_selector_unittest.cc b/chromium/media/blink/key_system_config_selector_unittest.cc
index 25d2be09bf9..5de42a084bb 100644
--- a/chromium/media/blink/key_system_config_selector_unittest.cc
+++ b/chromium/media/blink/key_system_config_selector_unittest.cc
@@ -7,54 +7,162 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/run_loop.h"
+#include "base/strings/pattern.h"
+#include "media/base/cdm_config.h"
#include "media/base/eme_constants.h"
#include "media/base/key_systems.h"
#include "media/base/media_permission.h"
+#include "media/base/mime_util.h"
#include "media/blink/key_system_config_selector.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_encrypted_media_types.h"
#include "third_party/blink/public/platform/web_media_key_system_configuration.h"
#include "third_party/blink/public/platform/web_string.h"
-#include "url/gurl.h"
namespace media {
namespace {
-const char kSupported[] = "supported";
-const char kRecommendIdentifier[] = "recommend_identifier";
-const char kRequireIdentifier[] = "require_identifier";
-const char kUnsupported[] = "unsupported";
-
-const char kSupportedVideoContainer[] = "video/webm";
-const char kSupportedAudioContainer[] = "audio/webm";
-const char kUnsupportedContainer[] = "video/foo";
-
+using blink::WebEncryptedMediaInitDataType;
+using blink::WebEncryptedMediaSessionType;
+using blink::WebMediaKeySystemConfiguration;
+using blink::WebMediaKeySystemMediaCapability;
+using blink::WebString;
+using MediaKeysRequirement = WebMediaKeySystemConfiguration::Requirement;
+using EncryptionScheme = WebMediaKeySystemMediaCapability::EncryptionScheme;
+
+// Key system strings. Clear Key support is hardcoded in KeySystemConfigSelector
+// so kClearKeyKeySystem is the real key system string. The rest key system
+// strings are for testing purpose only.
+const char kClearKeyKeySystem[] = "org.w3.clearkey";
+const char kSupportedKeySystem[] = "keysystem.test.supported";
+const char kUnsupportedKeySystem[] = "keysystem.test.unsupported";
+
+// Robustness strings for kSupportedKeySystem.
+const char kSupportedRobustness[] = "supported";
+const char kRecommendIdentifierRobustness[] = "recommend_identifier";
+const char kRequireIdentifierRobustness[] = "require_identifier";
+const char kDisallowHwSecureCodecRobustness[] = "disallow_hw_secure_codec";
+const char kRequireHwSecureCodecRobustness[] = "require_hw_secure_codec";
+const char kUnsupportedRobustness[] = "unsupported";
+
+// Test container mime types. Supported types are prefixed with audio/video so
+// that the test can perform EmeMediaType check.
+const char kSupportedVideoContainer[] = "video/supported";
+const char kSupportedAudioContainer[] = "audio/supported";
+const char kUnsupportedContainer[] = "video/unsupported";
+const char kInvalidContainer[] = "video/invalid";
+
+// The codec strings. Supported types are prefixed with audio/video so
+// that the test can perform EmeMediaType check.
// TODO(sandersd): Extended codec variants (requires proprietary codec support).
// TODO(xhwang): Platform Opus is not available on all Android versions, where
// some encrypted Opus related tests may fail. See PlatformHasOpusSupport()
// for more details.
-const char kSupportedAudioCodec[] = "opus";
-const char kSupportedVideoCodec[] = "vp8";
-const char kUnsupportedCodec[] = "foo";
-const char kUnsupportedCodecs[] = "vp8,foo";
-const char kSupportedVideoCodecs[] = "vp8,vp8";
+const char kSupportedAudioCodec[] = "audio_codec";
+const char kSupportedVideoCodec[] = "video_codec";
+const char kUnsupportedCodec[] = "unsupported_codec";
+const char kInvalidCodec[] = "foo";
+const char kRequireHwSecureCodec[] = "require_hw_secure_codec";
+const char kDisallowHwSecureCodec[] = "disallow_hw_secure_codec";
+const char kExtendedVideoCodec[] = "video_extended_codec.extended";
+const char kExtendedVideoCodecStripped[] = "video_extended_codec";
+// A special codec that is supported by the key systems, but is not supported
+// in IsSupportedMediaType() when |use_aes_decryptor| is true.
+const char kUnsupportedByAesDecryptorCodec[] = "unsupported_by_aes_decryptor";
+
+// Encryption schemes. For testing 'cenc' is supported, while 'cbcs' is not.
+// Note that WebMediaKeySystemMediaCapability defaults to kNotSpecified,
+// which is treated as 'cenc' by KeySystemConfigSelector.
+constexpr EncryptionScheme kSupportedEncryptionScheme = EncryptionScheme::kCenc;
+constexpr EncryptionScheme kDisallowHwSecureCodecEncryptionScheme =
+ EncryptionScheme::kCbcs;
+
+EncryptionMode ConvertEncryptionScheme(EncryptionScheme encryption_scheme) {
+ switch (encryption_scheme) {
+ case EncryptionScheme::kNotSpecified:
+ case EncryptionScheme::kCenc:
+ return EncryptionMode::kCenc;
+ case EncryptionScheme::kCbcs:
+ return EncryptionMode::kCbcs;
+ }
+
+ NOTREACHED();
+ return EncryptionMode::kUnencrypted;
+}
+
+WebString MakeCodecs(const std::string& a, const std::string& b) {
+ return WebString::FromUTF8(a + "," + b);
+}
+
+WebString GetSupportedVideoCodecs() {
+ return MakeCodecs(kSupportedVideoCodec, kSupportedVideoCodec);
+}
-const char kClearKey[] = "org.w3.clearkey";
+WebString GetSubsetSupportedVideoCodecs() {
+ return MakeCodecs(kSupportedVideoCodec, kUnsupportedCodec);
+}
+
+WebString GetSubsetInvalidVideoCodecs() {
+ return MakeCodecs(kSupportedVideoCodec, kInvalidCodec);
+}
+
+bool IsValidContainerMimeType(const std::string& container_mime_type) {
+ return container_mime_type != kInvalidContainer;
+}
+
+bool IsValidCodec(const std::string& codec) {
+ return codec != kInvalidCodec;
+}
+
+// Returns whether |type| is compatible with |media_type|.
+bool IsCompatibleWithEmeMediaType(EmeMediaType media_type,
+ const std::string& type) {
+ if (media_type == EmeMediaType::AUDIO && base::MatchPattern(type, "video*"))
+ return false;
+
+ if (media_type == EmeMediaType::VIDEO && base::MatchPattern(type, "audio*"))
+ return false;
+
+ return true;
+}
+
+// Pretend that we support all |container_mime_type| and |codecs| except for
+// those explicitly marked as invalid.
+bool IsSupportedMediaType(const std::string& container_mime_type,
+ const std::string& codecs,
+ bool use_aes_decryptor) {
+ if (container_mime_type == kInvalidContainer)
+ return false;
+
+ std::vector<std::string> codec_vector;
+ SplitCodecsToVector(codecs, &codec_vector, false);
+ for (const std::string& codec : codec_vector) {
+ DCHECK_NE(codec, kExtendedVideoCodecStripped)
+ << "codecs passed into this function should not be stripped";
+
+ if (codec == kInvalidCodec)
+ return false;
+
+ if (use_aes_decryptor && codec == kUnsupportedByAesDecryptorCodec)
+ return false;
+ }
+
+ return true;
+}
// The IDL for MediaKeySystemConfiguration specifies some defaults, so
// create a config object that mimics what would be created if an empty
// dictionary was passed in.
-blink::WebMediaKeySystemConfiguration EmptyConfiguration() {
+WebMediaKeySystemConfiguration EmptyConfiguration() {
// http://w3c.github.io/encrypted-media/#mediakeysystemconfiguration-dictionary
// If this member (sessionTypes) is not present when the dictionary
// is passed to requestMediaKeySystemAccess(), the dictionary will
// be treated as if this member is set to [ "temporary" ].
- std::vector<blink::WebEncryptedMediaSessionType> session_types;
- session_types.push_back(blink::WebEncryptedMediaSessionType::kTemporary);
+ std::vector<WebEncryptedMediaSessionType> session_types;
+ session_types.push_back(WebEncryptedMediaSessionType::kTemporary);
- blink::WebMediaKeySystemConfiguration config;
+ WebMediaKeySystemConfiguration config;
config.label = "";
config.session_types = session_types;
return config;
@@ -63,14 +171,14 @@ blink::WebMediaKeySystemConfiguration EmptyConfiguration() {
// EME spec requires that at least one of |video_capabilities| and
// |audio_capabilities| be specified. Add a single valid audio capability
// to the EmptyConfiguration().
-blink::WebMediaKeySystemConfiguration UsableConfiguration() {
+WebMediaKeySystemConfiguration UsableConfiguration() {
// Blink code parses the contentType into mimeType and codecs, so mimic
// that here.
- std::vector<blink::WebMediaKeySystemMediaCapability> audio_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(1);
audio_capabilities[0].mime_type = kSupportedAudioContainer;
audio_capabilities[0].codecs = kSupportedAudioCodec;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.audio_capabilities = audio_capabilities;
return config;
}
@@ -81,9 +189,12 @@ class FakeKeySystems : public KeySystems {
bool IsSupportedKeySystem(const std::string& key_system) const override {
// Based on EME spec, Clear Key key system is always supported.
- if (key_system == kSupported || key_system == kClearKey)
- return true;
- return false;
+ return key_system == kSupportedKeySystem ||
+ key_system == kClearKeyKeySystem;
+ }
+
+ bool CanUseAesDecryptor(const std::string& key_system) const override {
+ return key_system == kClearKeyKeySystem;
}
// TODO(sandersd): Move implementation into KeySystemConfigSelector?
@@ -103,42 +214,62 @@ class FakeKeySystems : public KeySystems {
return false;
}
- bool IsEncryptionSchemeSupported(
+ EmeConfigRule GetEncryptionSchemeConfigRule(
const std::string& key_system,
EncryptionMode encryption_scheme) const override {
- // TODO(crbug.com/658026): Implement this once value passed from blink.
- NOTREACHED();
- return false;
+ if (encryption_scheme ==
+ ConvertEncryptionScheme(kSupportedEncryptionScheme)) {
+ return EmeConfigRule::SUPPORTED;
+ }
+
+ if (encryption_scheme ==
+ ConvertEncryptionScheme(kDisallowHwSecureCodecEncryptionScheme)) {
+ return EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
+ }
+
+ return EmeConfigRule::NOT_SUPPORTED;
}
- // TODO(sandersd): Secure codec simulation.
EmeConfigRule GetContentTypeConfigRule(
const std::string& key_system,
EmeMediaType media_type,
const std::string& container_mime_type,
const std::vector<std::string>& codecs) const override {
- if (container_mime_type == kUnsupportedContainer)
+ DCHECK(IsValidContainerMimeType(container_mime_type))
+ << "Invalid container mime type should not be passed in";
+ if (container_mime_type == kUnsupportedContainer ||
+ !IsCompatibleWithEmeMediaType(media_type, container_mime_type)) {
return EmeConfigRule::NOT_SUPPORTED;
- switch (media_type) {
- case EmeMediaType::AUDIO:
- DCHECK_EQ(kSupportedAudioContainer, container_mime_type);
- break;
- case EmeMediaType::VIDEO:
- DCHECK_EQ(kSupportedVideoContainer, container_mime_type);
- break;
}
+
+ bool hw_secure_codec_required_ = false;
+ bool hw_secure_codec_not_allowed_ = false;
+
for (const std::string& codec : codecs) {
- if (codec == kUnsupportedCodec)
+ DCHECK(IsValidCodec(codec)) << "Invalid codec should not be passed in";
+ DCHECK_NE(codec, kExtendedVideoCodec)
+ << "Extended codec should already been stripped";
+
+ if (codec == kUnsupportedCodec ||
+ !IsCompatibleWithEmeMediaType(media_type, codec)) {
return EmeConfigRule::NOT_SUPPORTED;
- switch (media_type) {
- case EmeMediaType::AUDIO:
- DCHECK_EQ(kSupportedAudioCodec, codec);
- break;
- case EmeMediaType::VIDEO:
- DCHECK_EQ(kSupportedVideoCodec, codec);
- break;
+ } else if (codec == kRequireHwSecureCodec) {
+ hw_secure_codec_required_ = true;
+ } else if (codec == kDisallowHwSecureCodec) {
+ hw_secure_codec_not_allowed_ = true;
}
}
+
+ if (hw_secure_codec_required_) {
+ if (hw_secure_codec_not_allowed_)
+ return EmeConfigRule::NOT_SUPPORTED;
+ else
+ return EmeConfigRule::HW_SECURE_CODECS_REQUIRED;
+ }
+
+ if (hw_secure_codec_not_allowed_)
+ return EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
+
return EmeConfigRule::SUPPORTED;
}
@@ -148,14 +279,19 @@ class FakeKeySystems : public KeySystems {
const std::string& requested_robustness) const override {
if (requested_robustness.empty())
return EmeConfigRule::SUPPORTED;
- if (requested_robustness == kUnsupported)
- return EmeConfigRule::NOT_SUPPORTED;
- if (requested_robustness == kRequireIdentifier)
+ if (requested_robustness == kSupportedRobustness)
+ return EmeConfigRule::SUPPORTED;
+ if (requested_robustness == kRequireIdentifierRobustness)
return EmeConfigRule::IDENTIFIER_REQUIRED;
- if (requested_robustness == kRecommendIdentifier)
+ if (requested_robustness == kRecommendIdentifierRobustness)
return EmeConfigRule::IDENTIFIER_RECOMMENDED;
- if (requested_robustness == kSupported)
- return EmeConfigRule::SUPPORTED;
+ if (requested_robustness == kDisallowHwSecureCodecRobustness)
+ return EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
+ if (requested_robustness == kRequireHwSecureCodecRobustness)
+ return EmeConfigRule::HW_SECURE_CODECS_REQUIRED;
+ if (requested_robustness == kUnsupportedRobustness)
+ return EmeConfigRule::NOT_SUPPORTED;
+
NOTREACHED();
return EmeConfigRule::NOT_SUPPORTED;
}
@@ -165,9 +301,9 @@ class FakeKeySystems : public KeySystems {
return persistent_license;
}
- EmeSessionTypeSupport GetPersistentReleaseMessageSessionSupport(
+ EmeSessionTypeSupport GetPersistentUsageRecordSessionSupport(
const std::string& key_system) const override {
- return persistent_release_message;
+ return persistent_usage_record;
}
EmeFeatureSupport GetPersistentStateSupport(
@@ -186,7 +322,7 @@ class FakeKeySystems : public KeySystems {
// INVALID so that they must be set in any test that needs them.
EmeSessionTypeSupport persistent_license = EmeSessionTypeSupport::INVALID;
- EmeSessionTypeSupport persistent_release_message =
+ EmeSessionTypeSupport persistent_usage_record =
EmeSessionTypeSupport::INVALID;
// Every test implicitly requires these, so they must be set. They are set to
@@ -231,50 +367,57 @@ class KeySystemConfigSelectorTest : public testing::Test {
media_permission_->requests = 0;
succeeded_count_ = 0;
not_supported_count_ = 0;
- KeySystemConfigSelector(key_systems_.get(), media_permission_.get())
- .SelectConfig(key_system_, configs_,
- base::Bind(&KeySystemConfigSelectorTest::OnSucceeded,
- base::Unretained(this)),
- base::Bind(&KeySystemConfigSelectorTest::OnNotSupported,
- base::Unretained(this)));
+ KeySystemConfigSelector key_system_config_selector(key_systems_.get(),
+ media_permission_.get());
+
+ key_system_config_selector.SetIsSupportedMediaTypeCBForTesting(
+ base::BindRepeating(&IsSupportedMediaType));
+
+ key_system_config_selector.SelectConfig(
+ key_system_, configs_,
+ base::BindRepeating(&KeySystemConfigSelectorTest::OnSucceeded,
+ base::Unretained(this)),
+ base::BindRepeating(&KeySystemConfigSelectorTest::OnNotSupported,
+ base::Unretained(this)));
}
- bool SelectConfigReturnsConfig() {
+ void SelectConfigReturnsConfig() {
SelectConfig();
EXPECT_EQ(0, media_permission_->requests);
EXPECT_EQ(1, succeeded_count_);
EXPECT_EQ(0, not_supported_count_);
- return (succeeded_count_ != 0);
+ ASSERT_TRUE(succeeded_count_ != 0);
}
- bool SelectConfigReturnsError() {
+ void SelectConfigReturnsError() {
SelectConfig();
EXPECT_EQ(0, media_permission_->requests);
EXPECT_EQ(0, succeeded_count_);
EXPECT_EQ(1, not_supported_count_);
- return (not_supported_count_ != 0);
+ ASSERT_TRUE(not_supported_count_ != 0);
}
- bool SelectConfigRequestsPermissionAndReturnsConfig() {
+ void SelectConfigRequestsPermissionAndReturnsConfig() {
SelectConfig();
EXPECT_EQ(1, media_permission_->requests);
EXPECT_EQ(1, succeeded_count_);
EXPECT_EQ(0, not_supported_count_);
- return (media_permission_->requests != 0 && succeeded_count_ != 0);
+ ASSERT_TRUE(media_permission_->requests != 0 && succeeded_count_ != 0);
}
- bool SelectConfigRequestsPermissionAndReturnsError() {
+ void SelectConfigRequestsPermissionAndReturnsError() {
SelectConfig();
EXPECT_EQ(1, media_permission_->requests);
EXPECT_EQ(0, succeeded_count_);
EXPECT_EQ(1, not_supported_count_);
- return (media_permission_->requests != 0 && not_supported_count_ != 0);
+ ASSERT_TRUE(media_permission_->requests != 0 && not_supported_count_ != 0);
}
- void OnSucceeded(const blink::WebMediaKeySystemConfiguration& result,
+ void OnSucceeded(const WebMediaKeySystemConfiguration& config,
const CdmConfig& cdm_config) {
succeeded_count_++;
- config_ = result;
+ config_ = config;
+ cdm_config_ = cdm_config;
}
void OnNotSupported() { not_supported_count_++; }
@@ -283,11 +426,12 @@ class KeySystemConfigSelectorTest : public testing::Test {
std::unique_ptr<FakeMediaPermission> media_permission_;
// Held values for the call to SelectConfig().
- blink::WebString key_system_ = blink::WebString::FromUTF8(kSupported);
- std::vector<blink::WebMediaKeySystemConfiguration> configs_;
+ WebString key_system_ = WebString::FromUTF8(kSupportedKeySystem);
+ std::vector<WebMediaKeySystemConfiguration> configs_;
- // Holds the last successful accumulated configuration.
- blink::WebMediaKeySystemConfiguration config_;
+ // Holds the selected configuration and CdmConfig.
+ WebMediaKeySystemConfiguration config_;
+ CdmConfig cdm_config_;
int succeeded_count_;
int not_supported_count_;
@@ -298,11 +442,11 @@ class KeySystemConfigSelectorTest : public testing::Test {
// --- Basics ---
TEST_F(KeySystemConfigSelectorTest, NoConfigs) {
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, DefaultConfig) {
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
// label = "";
ASSERT_EQ("", config.label);
@@ -317,26 +461,23 @@ TEST_F(KeySystemConfigSelectorTest, DefaultConfig) {
ASSERT_EQ(0u, config.video_capabilities.size());
// distinctiveIdentifier = "optional";
- ASSERT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kOptional,
- config.distinctive_identifier);
+ ASSERT_EQ(MediaKeysRequirement::kOptional, config.distinctive_identifier);
// persistentState = "optional";
- ASSERT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kOptional,
- config.persistent_state);
+ ASSERT_EQ(MediaKeysRequirement::kOptional, config.persistent_state);
// If this member is not present when the dictionary is passed to
// requestMediaKeySystemAccess(), the dictionary will be treated as
// if this member is set to [ "temporary" ].
ASSERT_EQ(1u, config.session_types.size());
- ASSERT_EQ(blink::WebEncryptedMediaSessionType::kTemporary,
- config.session_types[0]);
+ ASSERT_EQ(WebEncryptedMediaSessionType::kTemporary, config.session_types[0]);
}
TEST_F(KeySystemConfigSelectorTest, EmptyConfig) {
// EME spec requires that at least one of |video_capabilities| and
// |audio_capabilities| be specified.
configs_.push_back(EmptyConfiguration());
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
// Most of the tests below assume that the the usable config is valid.
@@ -346,26 +487,28 @@ TEST_F(KeySystemConfigSelectorTest, EmptyConfig) {
TEST_F(KeySystemConfigSelectorTest, UsableConfig) {
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
+
EXPECT_EQ("", config_.label);
EXPECT_TRUE(config_.init_data_types.IsEmpty());
EXPECT_EQ(1u, config_.audio_capabilities.size());
EXPECT_TRUE(config_.video_capabilities.IsEmpty());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed,
- config_.distinctive_identifier);
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed,
- config_.persistent_state);
+ EXPECT_EQ(MediaKeysRequirement::kNotAllowed, config_.distinctive_identifier);
+ EXPECT_EQ(MediaKeysRequirement::kNotAllowed, config_.persistent_state);
ASSERT_EQ(1u, config_.session_types.size());
- EXPECT_EQ(blink::WebEncryptedMediaSessionType::kTemporary,
- config_.session_types[0]);
+ EXPECT_EQ(WebEncryptedMediaSessionType::kTemporary, config_.session_types[0]);
+
+ EXPECT_FALSE(cdm_config_.allow_distinctive_identifier);
+ EXPECT_FALSE(cdm_config_.allow_persistent_state);
+ EXPECT_FALSE(cdm_config_.use_hw_secure_codecs);
}
TEST_F(KeySystemConfigSelectorTest, Label) {
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
config.label = "foo";
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
EXPECT_EQ("foo", config_.label);
}
@@ -375,19 +518,19 @@ TEST_F(KeySystemConfigSelectorTest, Label) {
TEST_F(KeySystemConfigSelectorTest, KeySystem_NonAscii) {
key_system_ = "\xde\xad\xbe\xef";
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, KeySystem_Unsupported) {
- key_system_ = kUnsupported;
+ key_system_ = kUnsupportedKeySystem;
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, KeySystem_ClearKey) {
- key_system_ = kClearKey;
+ key_system_ = kClearKeyKeySystem;
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
}
// --- Disable EncryptedMedia ---
@@ -396,59 +539,58 @@ TEST_F(KeySystemConfigSelectorTest, EncryptedMediaDisabled_ClearKey) {
media_permission_->is_encrypted_media_enabled = false;
// Clear Key key system is always supported.
- key_system_ = kClearKey;
+ key_system_ = kClearKeyKeySystem;
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
}
TEST_F(KeySystemConfigSelectorTest, EncryptedMediaDisabled_Supported) {
media_permission_->is_encrypted_media_enabled = false;
// Other key systems are not supported.
- key_system_ = kSupported;
+ key_system_ = kSupportedKeySystem;
configs_.push_back(UsableConfiguration());
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
// --- initDataTypes ---
TEST_F(KeySystemConfigSelectorTest, InitDataTypes_Empty) {
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
}
TEST_F(KeySystemConfigSelectorTest, InitDataTypes_NoneSupported) {
key_systems_->init_data_type_webm_supported_ = true;
- std::vector<blink::WebEncryptedMediaInitDataType> init_data_types;
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kUnknown);
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kCenc);
+ std::vector<WebEncryptedMediaInitDataType> init_data_types;
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kUnknown);
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kCenc);
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
config.init_data_types = init_data_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, InitDataTypes_SubsetSupported) {
key_systems_->init_data_type_webm_supported_ = true;
- std::vector<blink::WebEncryptedMediaInitDataType> init_data_types;
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kUnknown);
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kCenc);
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kWebm);
+ std::vector<WebEncryptedMediaInitDataType> init_data_types;
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kUnknown);
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kCenc);
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kWebm);
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
config.init_data_types = init_data_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.init_data_types.size());
- EXPECT_EQ(blink::WebEncryptedMediaInitDataType::kWebm,
- config_.init_data_types[0]);
+ EXPECT_EQ(WebEncryptedMediaInitDataType::kWebm, config_.init_data_types[0]);
}
// --- distinctiveIdentifier ---
@@ -456,65 +598,60 @@ TEST_F(KeySystemConfigSelectorTest, InitDataTypes_SubsetSupported) {
TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_Default) {
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kOptional;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed,
- config_.distinctive_identifier);
+ SelectConfigReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kNotAllowed, config_.distinctive_identifier);
+ EXPECT_FALSE(cdm_config_.allow_distinctive_identifier);
}
TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_Forced) {
media_permission_->is_granted = true;
key_systems_->distinctive_identifier = EmeFeatureSupport::ALWAYS_ENABLED;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kOptional;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.distinctive_identifier);
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
+ EXPECT_TRUE(cdm_config_.allow_distinctive_identifier);
}
TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_Blocked) {
key_systems_->distinctive_identifier = EmeFeatureSupport::NOT_SUPPORTED;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kRequired;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kRequired;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_RequestsPermission) {
media_permission_->is_granted = true;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kRequired;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kRequired;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.distinctive_identifier);
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
+ EXPECT_TRUE(cdm_config_.allow_distinctive_identifier);
}
TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_RespectsPermission) {
media_permission_->is_granted = false;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kRequired;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kRequired;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsError());
+ SelectConfigRequestsPermissionAndReturnsError();
}
// --- persistentState ---
@@ -522,52 +659,49 @@ TEST_F(KeySystemConfigSelectorTest, DistinctiveIdentifier_RespectsPermission) {
TEST_F(KeySystemConfigSelectorTest, PersistentState_Default) {
key_systems_->persistent_state = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.persistent_state =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.persistent_state = MediaKeysRequirement::kOptional;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed,
- config_.persistent_state);
+ SelectConfigReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kNotAllowed, config_.persistent_state);
+ EXPECT_FALSE(cdm_config_.allow_persistent_state);
}
TEST_F(KeySystemConfigSelectorTest, PersistentState_Forced) {
key_systems_->persistent_state = EmeFeatureSupport::ALWAYS_ENABLED;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.persistent_state =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.persistent_state = MediaKeysRequirement::kOptional;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.persistent_state);
+ SelectConfigReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.persistent_state);
+ EXPECT_TRUE(cdm_config_.allow_persistent_state);
}
TEST_F(KeySystemConfigSelectorTest, PersistentState_Blocked) {
key_systems_->persistent_state = EmeFeatureSupport::ALWAYS_ENABLED;
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.persistent_state =
- blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed;
+ auto config = UsableConfiguration();
+ config.persistent_state = MediaKeysRequirement::kNotAllowed;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
// --- sessionTypes ---
TEST_F(KeySystemConfigSelectorTest, SessionTypes_Empty) {
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
// Usable configuration has [ "temporary" ].
- std::vector<blink::WebEncryptedMediaSessionType> session_types;
+ std::vector<WebEncryptedMediaSessionType> session_types;
config.session_types = session_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
EXPECT_TRUE(config_.session_types.IsEmpty());
}
@@ -576,16 +710,15 @@ TEST_F(KeySystemConfigSelectorTest, SessionTypes_SubsetSupported) {
key_systems_->persistent_state = EmeFeatureSupport::REQUESTABLE;
key_systems_->persistent_license = EmeSessionTypeSupport::NOT_SUPPORTED;
- std::vector<blink::WebEncryptedMediaSessionType> session_types;
- session_types.push_back(blink::WebEncryptedMediaSessionType::kTemporary);
- session_types.push_back(
- blink::WebEncryptedMediaSessionType::kPersistentLicense);
+ std::vector<WebEncryptedMediaSessionType> session_types;
+ session_types.push_back(WebEncryptedMediaSessionType::kTemporary);
+ session_types.push_back(WebEncryptedMediaSessionType::kPersistentLicense);
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
config.session_types = session_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, SessionTypes_AllSupported) {
@@ -593,24 +726,20 @@ TEST_F(KeySystemConfigSelectorTest, SessionTypes_AllSupported) {
key_systems_->persistent_state = EmeFeatureSupport::REQUESTABLE;
key_systems_->persistent_license = EmeSessionTypeSupport::SUPPORTED;
- std::vector<blink::WebEncryptedMediaSessionType> session_types;
- session_types.push_back(blink::WebEncryptedMediaSessionType::kTemporary);
- session_types.push_back(
- blink::WebEncryptedMediaSessionType::kPersistentLicense);
+ std::vector<WebEncryptedMediaSessionType> session_types;
+ session_types.push_back(WebEncryptedMediaSessionType::kTemporary);
+ session_types.push_back(WebEncryptedMediaSessionType::kPersistentLicense);
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.persistent_state =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.persistent_state = MediaKeysRequirement::kOptional;
config.session_types = session_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.persistent_state);
+ SelectConfigReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.persistent_state);
ASSERT_EQ(2u, config_.session_types.size());
- EXPECT_EQ(blink::WebEncryptedMediaSessionType::kTemporary,
- config_.session_types[0]);
- EXPECT_EQ(blink::WebEncryptedMediaSessionType::kPersistentLicense,
+ EXPECT_EQ(WebEncryptedMediaSessionType::kTemporary, config_.session_types[0]);
+ EXPECT_EQ(WebEncryptedMediaSessionType::kPersistentLicense,
config_.session_types[1]);
}
@@ -621,169 +750,288 @@ TEST_F(KeySystemConfigSelectorTest, SessionTypes_PermissionCanBeRequired) {
key_systems_->persistent_license =
EmeSessionTypeSupport::SUPPORTED_WITH_IDENTIFIER;
- std::vector<blink::WebEncryptedMediaSessionType> session_types;
- session_types.push_back(
- blink::WebEncryptedMediaSessionType::kPersistentLicense);
+ std::vector<WebEncryptedMediaSessionType> session_types;
+ session_types.push_back(WebEncryptedMediaSessionType::kPersistentLicense);
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
- config.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
- config.persistent_state =
- blink::WebMediaKeySystemConfiguration::Requirement::kOptional;
+ auto config = UsableConfiguration();
+ config.distinctive_identifier = MediaKeysRequirement::kOptional;
+ config.persistent_state = MediaKeysRequirement::kOptional;
config.session_types = session_types;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.distinctive_identifier);
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
}
// --- videoCapabilities ---
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Empty) {
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_ExtendedCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kExtendedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_InvalidContainer) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kInvalidContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsError();
}
-TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_NoneSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(2);
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_UnsupportedContainer) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kUnsupportedContainer;
- video_capabilities[1].content_type = "b";
- video_capabilities[1].mime_type = kSupportedVideoContainer;
- video_capabilities[1].codecs = kUnsupportedCodec;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_IncompatibleContainer) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedAudioContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_InvalidCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kInvalidCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_UnsupportedCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kInvalidContainer;
+ video_capabilities[0].codecs = kUnsupportedCodec;
+
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_IncompatibleCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedAudioCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ VideoCapabilities_UnsupportedByAesDecryptorCodec_ClearKey) {
+ key_system_ = kClearKeyKeySystem;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kUnsupportedByAesDecryptorCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ VideoCapabilities_UnsupportedByAesDecryptorCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kUnsupportedByAesDecryptorCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_SubsetSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(2);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
video_capabilities[0].content_type = "a";
- video_capabilities[0].mime_type = kUnsupportedContainer;
+ video_capabilities[0].mime_type = kInvalidContainer;
video_capabilities[1].content_type = "b";
video_capabilities[1].mime_type = kSupportedVideoContainer;
video_capabilities[1].codecs = kSupportedVideoCodec;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.video_capabilities.size());
EXPECT_EQ("b", config_.video_capabilities[0].content_type);
EXPECT_EQ(kSupportedVideoContainer, config_.video_capabilities[0].mime_type);
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_AllSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(2);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
- video_capabilities[0].codecs = kSupportedVideoCodecs;
+ video_capabilities[0].codecs = GetSupportedVideoCodecs();
video_capabilities[1].content_type = "b";
video_capabilities[1].mime_type = kSupportedVideoContainer;
- video_capabilities[1].codecs = kSupportedVideoCodecs;
+ video_capabilities[1].codecs = GetSupportedVideoCodecs();
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(2u, config_.video_capabilities.size());
EXPECT_EQ("a", config_.video_capabilities[0].content_type);
EXPECT_EQ("b", config_.video_capabilities[1].content_type);
}
-TEST_F(KeySystemConfigSelectorTest,
- VideoCapabilities_Codecs_SubsetSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+// --- videoCapabilities Codecs ---
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Codecs_SubsetInvalid) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
- video_capabilities[0].codecs = kUnsupportedCodecs;
+ video_capabilities[0].codecs = GetSubsetInvalidVideoCodecs();
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Codecs_SubsetSupported) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = GetSubsetSupportedVideoCodecs();
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Codecs_AllSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
- video_capabilities[0].codecs = kSupportedVideoCodecs;
+ video_capabilities[0].codecs = GetSupportedVideoCodecs();
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.video_capabilities.size());
- EXPECT_EQ(kSupportedVideoCodecs, config_.video_capabilities[0].codecs);
+ EXPECT_EQ(GetSupportedVideoCodecs(), config_.video_capabilities[0].codecs);
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Missing_Codecs) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
+// --- videoCapabilities Robustness ---
+
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Robustness_Empty) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
video_capabilities[0].codecs = kSupportedVideoCodec;
ASSERT_TRUE(video_capabilities[0].robustness.IsEmpty());
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.video_capabilities.size());
EXPECT_TRUE(config_.video_capabilities[0].robustness.IsEmpty());
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Robustness_Supported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
video_capabilities[0].codecs = kSupportedVideoCodec;
- video_capabilities[0].robustness = kSupported;
+ video_capabilities[0].robustness = kSupportedRobustness;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.video_capabilities.size());
- EXPECT_EQ(kSupported, config_.video_capabilities[0].robustness);
+ EXPECT_EQ(kSupportedRobustness, config_.video_capabilities[0].robustness);
}
TEST_F(KeySystemConfigSelectorTest, VideoCapabilities_Robustness_Unsupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
video_capabilities[0].codecs = kSupportedVideoCodec;
- video_capabilities[0].robustness = kUnsupported;
+ video_capabilities[0].robustness = kUnsupportedRobustness;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsError());
+ SelectConfigReturnsError();
}
TEST_F(KeySystemConfigSelectorTest,
@@ -791,19 +1039,18 @@ TEST_F(KeySystemConfigSelectorTest,
media_permission_->is_granted = true;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
video_capabilities[0].codecs = kSupportedVideoCodec;
- video_capabilities[0].robustness = kRequireIdentifier;
+ video_capabilities[0].robustness = kRequireIdentifierRobustness;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kRequired,
- config_.distinctive_identifier);
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
}
TEST_F(KeySystemConfigSelectorTest,
@@ -811,19 +1058,266 @@ TEST_F(KeySystemConfigSelectorTest,
media_permission_->is_granted = false;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+ video_capabilities[0].robustness = kRecommendIdentifierRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kNotAllowed, config_.distinctive_identifier);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ VideoCapabilities_EncryptionScheme_Supported) {
std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
video_capabilities[0].content_type = "a";
video_capabilities[0].mime_type = kSupportedVideoContainer;
video_capabilities[0].codecs = kSupportedVideoCodec;
- video_capabilities[0].robustness = kRecommendIdentifier;
+ video_capabilities[0].encryption_scheme = kSupportedEncryptionScheme;
blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
config.video_capabilities = video_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
- EXPECT_EQ(blink::WebMediaKeySystemConfiguration::Requirement::kNotAllowed,
- config_.distinctive_identifier);
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ(kSupportedEncryptionScheme,
+ config_.video_capabilities[0].encryption_scheme);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ VideoCapabilities_EncryptionScheme_DisallowHwSecureCodec) {
+ std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+ video_capabilities[0].encryption_scheme =
+ kDisallowHwSecureCodecEncryptionScheme;
+
+ blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ(kDisallowHwSecureCodecEncryptionScheme,
+ config_.video_capabilities[0].encryption_scheme);
+}
+
+// --- HW Secure Codecs and Robustness ---
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_RequireHwSecureCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kRequireHwSecureCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_DisallowHwSecureCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ EXPECT_FALSE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodec_IncompatibleCodecAndRobustness) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+ video_capabilities[0].robustness = kRequireHwSecureCodecRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_CompatibleCodecs) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs =
+ MakeCodecs(kRequireHwSecureCodec, kSupportedVideoCodec);
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_IncompatibleCodecs) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs =
+ MakeCodecs(kRequireHwSecureCodec, kDisallowHwSecureCodec);
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_CompatibleCapabilityCodec) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "require_hw_secure_codec";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kRequireHwSecureCodec;
+ video_capabilities[1].content_type = "supported_video_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(2u, config_.video_capabilities.size());
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_RequireAndDisallow) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "require_hw_secure_codec";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kRequireHwSecureCodec;
+ video_capabilities[1].content_type = "disallow_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kDisallowHwSecureCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("require_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_DisallowAndRequire) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "disallow_hw_secure_codec";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+ video_capabilities[1].content_type = "require_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kRequireHwSecureCodec;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("disallow_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_FALSE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_IncompatibleCapabilities) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "require_hw_secure_codec";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+ video_capabilities[0].robustness = kRequireHwSecureCodecRobustness;
+ video_capabilities[1].content_type = "disallow_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kDisallowHwSecureCodec;
+ video_capabilities[1].robustness = kUnsupportedRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("require_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodec_UnsupportedCapabilityNotAffectingRules) {
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "unsupported_robustness";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+ video_capabilities[0].robustness = kUnsupportedRobustness;
+ video_capabilities[1].content_type = "require_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kRequireHwSecureCodec;
+ video_capabilities[1].robustness = kRequireHwSecureCodecRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("require_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest, HwSecureCodec_EncryptionScheme_Supported) {
+ std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kRequireHwSecureCodec;
+ video_capabilities[0].encryption_scheme = kSupportedEncryptionScheme;
+
+ blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ(kSupportedEncryptionScheme,
+ config_.video_capabilities[0].encryption_scheme);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodec_EncryptionScheme_DisallowHwSecureCodec) {
+ std::vector<blink::WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kRequireHwSecureCodec;
+ video_capabilities[0].encryption_scheme =
+ kDisallowHwSecureCodecEncryptionScheme;
+
+ blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
}
// --- audioCapabilities ---
@@ -831,49 +1325,145 @@ TEST_F(KeySystemConfigSelectorTest,
// additional testing is done.
TEST_F(KeySystemConfigSelectorTest, AudioCapabilities_SubsetSupported) {
- std::vector<blink::WebMediaKeySystemMediaCapability> audio_capabilities(2);
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(2);
audio_capabilities[0].content_type = "a";
- audio_capabilities[0].mime_type = kUnsupportedContainer;
+ audio_capabilities[0].mime_type = kInvalidContainer;
audio_capabilities[1].content_type = "b";
audio_capabilities[1].mime_type = kSupportedAudioContainer;
audio_capabilities[1].codecs = kSupportedAudioCodec;
- blink::WebMediaKeySystemConfiguration config = EmptyConfiguration();
+ auto config = EmptyConfiguration();
config.audio_capabilities = audio_capabilities;
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ(1u, config_.audio_capabilities.size());
EXPECT_EQ("b", config_.audio_capabilities[0].content_type);
EXPECT_EQ(kSupportedAudioContainer, config_.audio_capabilities[0].mime_type);
}
+// --- audioCapabilities and videoCapabilities ---
+
+TEST_F(KeySystemConfigSelectorTest, AudioAndVideoCapabilities_AllSupported) {
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(1);
+ audio_capabilities[0].content_type = "a";
+ audio_capabilities[0].mime_type = kSupportedAudioContainer;
+ audio_capabilities[0].codecs = kSupportedAudioCodec;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "b";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.audio_capabilities = audio_capabilities;
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.audio_capabilities.size());
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ AudioAndVideoCapabilities_AudioUnsupported) {
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(1);
+ audio_capabilities[0].content_type = "a";
+ audio_capabilities[0].mime_type = kUnsupportedContainer;
+ audio_capabilities[0].codecs = kSupportedAudioCodec;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "b";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.audio_capabilities = audio_capabilities;
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ AudioAndVideoCapabilities_VideoUnsupported) {
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(1);
+ audio_capabilities[0].content_type = "a";
+ audio_capabilities[0].mime_type = kSupportedAudioContainer;
+ audio_capabilities[0].codecs = kSupportedAudioCodec;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "b";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kUnsupportedCodec;
+
+ auto config = EmptyConfiguration();
+ config.audio_capabilities = audio_capabilities;
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+// Only "a2" and "v2" are supported types.
+TEST_F(KeySystemConfigSelectorTest, AudioAndVideoCapabilities_SubsetSupported) {
+ std::vector<WebMediaKeySystemMediaCapability> audio_capabilities(3);
+ audio_capabilities[0].content_type = "a1";
+ audio_capabilities[0].mime_type = kUnsupportedContainer;
+ audio_capabilities[0].codecs = kSupportedAudioCodec;
+ audio_capabilities[1].content_type = "a2";
+ audio_capabilities[1].mime_type = kSupportedAudioContainer;
+ audio_capabilities[1].codecs = kSupportedAudioCodec;
+ audio_capabilities[2].content_type = "a3";
+ audio_capabilities[2].mime_type = kSupportedAudioContainer;
+ audio_capabilities[2].codecs = kUnsupportedCodec;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "v1";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kUnsupportedCodec;
+ video_capabilities[1].content_type = "v2";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kSupportedVideoCodec;
+
+ auto config = EmptyConfiguration();
+ config.audio_capabilities = audio_capabilities;
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsConfig();
+ ASSERT_EQ(1u, config_.audio_capabilities.size());
+ EXPECT_EQ("a2", config_.audio_capabilities[0].content_type);
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("v2", config_.video_capabilities[0].content_type);
+}
+
// --- Multiple configurations ---
TEST_F(KeySystemConfigSelectorTest, Configurations_AllSupported) {
- blink::WebMediaKeySystemConfiguration config = UsableConfiguration();
+ auto config = UsableConfiguration();
config.label = "a";
configs_.push_back(config);
config.label = "b";
configs_.push_back(config);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ("a", config_.label);
}
TEST_F(KeySystemConfigSelectorTest, Configurations_SubsetSupported) {
- blink::WebMediaKeySystemConfiguration config1 = UsableConfiguration();
+ auto config1 = UsableConfiguration();
config1.label = "a";
- std::vector<blink::WebEncryptedMediaInitDataType> init_data_types;
- init_data_types.push_back(blink::WebEncryptedMediaInitDataType::kUnknown);
+ std::vector<WebEncryptedMediaInitDataType> init_data_types;
+ init_data_types.push_back(WebEncryptedMediaInitDataType::kUnknown);
config1.init_data_types = init_data_types;
configs_.push_back(config1);
- blink::WebMediaKeySystemConfiguration config2 = UsableConfiguration();
+ auto config2 = UsableConfiguration();
config2.label = "b";
configs_.push_back(config2);
- ASSERT_TRUE(SelectConfigReturnsConfig());
+ SelectConfigReturnsConfig();
ASSERT_EQ("b", config_.label);
}
@@ -882,17 +1472,16 @@ TEST_F(KeySystemConfigSelectorTest,
media_permission_->is_granted = true;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config1 = UsableConfiguration();
+ auto config1 = UsableConfiguration();
config1.label = "a";
- config1.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kRequired;
+ config1.distinctive_identifier = MediaKeysRequirement::kRequired;
configs_.push_back(config1);
- blink::WebMediaKeySystemConfiguration config2 = UsableConfiguration();
+ auto config2 = UsableConfiguration();
config2.label = "b";
configs_.push_back(config2);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
+ SelectConfigRequestsPermissionAndReturnsConfig();
ASSERT_EQ("a", config_.label);
}
@@ -901,17 +1490,16 @@ TEST_F(KeySystemConfigSelectorTest,
media_permission_->is_granted = false;
key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
- blink::WebMediaKeySystemConfiguration config1 = UsableConfiguration();
+ auto config1 = UsableConfiguration();
config1.label = "a";
- config1.distinctive_identifier =
- blink::WebMediaKeySystemConfiguration::Requirement::kRequired;
+ config1.distinctive_identifier = MediaKeysRequirement::kRequired;
configs_.push_back(config1);
- blink::WebMediaKeySystemConfiguration config2 = UsableConfiguration();
+ auto config2 = UsableConfiguration();
config2.label = "b";
configs_.push_back(config2);
- ASSERT_TRUE(SelectConfigRequestsPermissionAndReturnsConfig());
+ SelectConfigRequestsPermissionAndReturnsConfig();
ASSERT_EQ("b", config_.label);
}
diff --git a/chromium/media/blink/run_all_unittests.cc b/chromium/media/blink/run_all_unittests.cc
index fbdf9140204..72eea59607a 100644
--- a/chromium/media/blink/run_all_unittests.cc
+++ b/chromium/media/blink/run_all_unittests.cc
@@ -11,7 +11,7 @@
#include "media/base/media.h"
#include "services/service_manager/public/cpp/binder_registry.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
-#include "third_party/blink/public/platform/scheduler/web_main_thread_scheduler.h"
+#include "third_party/blink/public/platform/scheduler/web_thread_scheduler.h"
#include "third_party/blink/public/platform/web_thread.h"
#include "third_party/blink/public/web/blink.h"
@@ -20,7 +20,7 @@
#endif
#if !defined(OS_IOS)
-#include "mojo/edk/embedder/embedder.h"
+#include "mojo/core/embedder/embedder.h"
#endif
#if defined(V8_USE_EXTERNAL_STARTUP_DATA)
@@ -55,8 +55,7 @@ class BlinkPlatformWithTaskEnvironment : public blink::Platform {
private:
base::test::ScopedTaskEnvironment scoped_task_environment_;
- std::unique_ptr<blink::scheduler::WebMainThreadScheduler>
- main_thread_scheduler_;
+ std::unique_ptr<blink::scheduler::WebThreadScheduler> main_thread_scheduler_;
std::unique_ptr<blink::WebThread> main_thread_;
DISALLOW_COPY_AND_ASSIGN(BlinkPlatformWithTaskEnvironment);
@@ -79,7 +78,7 @@ static int RunTests(base::TestSuite* test_suite) {
#if !defined(OS_IOS)
// Initialize mojo firstly to enable Blink initialization to use it.
- mojo::edk::Init();
+ mojo::core::Init();
#endif
BlinkPlatformWithTaskEnvironment platform_;
diff --git a/chromium/media/blink/video_frame_compositor.cc b/chromium/media/blink/video_frame_compositor.cc
index b71fbc5ec67..dae188a5d4b 100644
--- a/chromium/media/blink/video_frame_compositor.cc
+++ b/chromium/media/blink/video_frame_compositor.cc
@@ -9,6 +9,7 @@
#include "base/time/default_tick_clock.h"
#include "base/trace_event/auto_open_close_event.h"
#include "base/trace_event/trace_event.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
#include "media/blink/webmediaplayer_params.h"
@@ -30,9 +31,7 @@ VideoFrameCompositor::VideoFrameCompositor(
FROM_HERE,
base::TimeDelta::FromMilliseconds(kBackgroundRenderingTimeoutMs),
base::Bind(&VideoFrameCompositor::BackgroundRender,
- base::Unretained(this)),
- // Task is not repeating, CallRender() will reset the task as needed.
- false),
+ base::Unretained(this))),
client_(nullptr),
rendering_(false),
rendered_last_frame_(false),
@@ -48,9 +47,23 @@ VideoFrameCompositor::VideoFrameCompositor(
task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoFrameCompositor::InitializeSubmitter,
weak_ptr_factory_.GetWeakPtr()));
+ update_submission_state_callback_ = media::BindToLoop(
+ task_runner_,
+ base::BindRepeating(&VideoFrameCompositor::UpdateSubmissionState,
+ weak_ptr_factory_.GetWeakPtr()));
}
}
+cc::UpdateSubmissionStateCB
+VideoFrameCompositor::GetUpdateSubmissionStateCallback() {
+ return update_submission_state_callback_;
+}
+
+void VideoFrameCompositor::UpdateSubmissionState(bool is_visible) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ submitter_->UpdateSubmissionState(is_visible);
+}
+
void VideoFrameCompositor::InitializeSubmitter() {
DCHECK(task_runner_->BelongsToCurrentThread());
submitter_->Initialize(this);
@@ -65,11 +78,15 @@ VideoFrameCompositor::~VideoFrameCompositor() {
}
void VideoFrameCompositor::EnableSubmission(
- const viz::FrameSinkId& id,
+ const viz::SurfaceId& id,
media::VideoRotation rotation,
+ bool force_submit,
+ bool is_opaque,
blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback) {
DCHECK(task_runner_->BelongsToCurrentThread());
submitter_->SetRotation(rotation);
+ submitter_->SetForceSubmit(force_submit);
+ submitter_->SetIsOpaque(is_opaque);
submitter_->EnableSubmission(id, std::move(frame_sink_destroyed_callback));
client_ = submitter_.get();
}
@@ -203,9 +220,18 @@ void VideoFrameCompositor::PaintSingleFrame(
void VideoFrameCompositor::UpdateCurrentFrameIfStale() {
DCHECK(task_runner_->BelongsToCurrentThread());
- if (IsClientSinkAvailable() || !rendering_ || !is_background_rendering_)
+ // If we're not rendering, then the frame can't be stale.
+ if (!rendering_ || !is_background_rendering_)
return;
+ // If we have a client, and it is currently rendering, then it's not stale
+ // since the client is driving the frame updates at the proper rate.
+ if (IsClientSinkAvailable() && client_->IsDrivingFrameUpdates())
+ return;
+
+ // We're rendering, but the client isn't driving the updates. See if the
+ // frame is stale, and update it.
+
DCHECK(!last_background_render_.is_null());
const base::TimeTicks now = tick_clock_->NowTicks();
@@ -300,7 +326,21 @@ bool VideoFrameCompositor::CallRender(base::TimeTicks deadline_min,
}
void VideoFrameCompositor::UpdateRotation(media::VideoRotation rotation) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
submitter_->SetRotation(rotation);
}
+void VideoFrameCompositor::SetForceSubmit(bool force_submit) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ submitter_->SetForceSubmit(force_submit);
+}
+
+void VideoFrameCompositor::UpdateIsOpaque(bool is_opaque) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ submitter_->SetIsOpaque(is_opaque);
+}
+
} // namespace media
diff --git a/chromium/media/blink/video_frame_compositor.h b/chromium/media/blink/video_frame_compositor.h
index bb2e58c352b..bf61a04afe3 100644
--- a/chromium/media/blink/video_frame_compositor.h
+++ b/chromium/media/blink/video_frame_compositor.h
@@ -15,6 +15,7 @@
#include "base/time/tick_clock.h"
#include "base/time/time.h"
#include "base/timer/timer.h"
+#include "cc/layers/surface_layer.h"
#include "cc/layers/video_frame_provider.h"
#include "media/base/video_renderer_sink.h"
#include "media/blink/media_blink_export.h"
@@ -29,7 +30,7 @@ class AutoOpenCloseEvent;
}
namespace viz {
-class FrameSinkId;
+class SurfaceId;
}
namespace media {
@@ -68,8 +69,6 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// |task_runner| is the task runner on which this class will live,
// though it may be constructed on any thread.
- // |media_context_provider_callback| requires being called on the media
- // thread.
VideoFrameCompositor(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
std::unique_ptr<blink::WebVideoFrameSubmitter> submitter);
@@ -78,11 +77,16 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// called before destruction starts.
~VideoFrameCompositor() override;
+ // Can be called from any thread.
+ cc::UpdateSubmissionStateCB GetUpdateSubmissionStateCallback();
+
// Signals the VideoFrameSubmitter to prepare to receive BeginFrames and
// submit video frames given by VideoFrameCompositor.
virtual void EnableSubmission(
- const viz::FrameSinkId& id,
+ const viz::SurfaceId& id,
media::VideoRotation rotation,
+ bool force_submit,
+ bool is_opaque,
blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback);
// cc::VideoFrameProvider implementation. These methods must be called on the
@@ -129,6 +133,12 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Updates the rotation information for frames given to |submitter_|.
void UpdateRotation(media::VideoRotation rotation);
+ // Notifies the |submitter_| that the frames must be submitted.
+ void SetForceSubmit(bool);
+
+ // Updates the opacity inforamtion for frames given to |submitter_|.
+ void UpdateIsOpaque(bool);
+
void set_tick_clock_for_testing(const base::TickClock* tick_clock) {
tick_clock_ = tick_clock;
}
@@ -152,9 +162,11 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Ran on the |task_runner_| to initalize |submitter_|;
void InitializeSubmitter();
+ // Signals the VideoFrameSubmitter to stop submitting frames. |is_visible|
+ // indicates whether or not the consumer of the frames is (probably) visible.
+ void UpdateSubmissionState(bool is_visible);
+
// Indicates whether the endpoint for the VideoFrame exists.
- // TODO(lethalantidote): Update this function to read creation/destruction
- // signals of the SurfaceLayerImpl.
bool IsClientSinkAvailable();
// Called on the compositor thread in response to Start() or Stop() calls;
@@ -192,7 +204,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Manages UpdateCurrentFrame() callbacks if |client_| has stopped sending
// them for various reasons. Runs on |task_runner_| and is reset
// after each successful UpdateCurrentFrame() call.
- base::Timer background_rendering_timer_;
+ base::RetainingOneShotTimer background_rendering_timer_;
// These values are only set and read on the compositor thread.
cc::VideoFrameProvider::Client* client_;
@@ -203,6 +215,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
base::TimeDelta last_interval_;
base::TimeTicks last_background_render_;
OnNewProcessedFrameCB new_processed_frame_cb_;
+ cc::UpdateSubmissionStateCB update_submission_state_callback_;
// Set on the compositor thread, but also read on the media thread.
base::Lock current_frame_lock_;
diff --git a/chromium/media/blink/video_frame_compositor_unittest.cc b/chromium/media/blink/video_frame_compositor_unittest.cc
index 8585ce97874..7098e1aafb5 100644
--- a/chromium/media/blink/video_frame_compositor_unittest.cc
+++ b/chromium/media/blink/video_frame_compositor_unittest.cc
@@ -16,6 +16,7 @@
#include "third_party/blink/public/platform/web_video_frame_submitter.h"
using testing::_;
+using testing::AnyNumber;
using testing::DoAll;
using testing::Eq;
using testing::Return;
@@ -28,11 +29,15 @@ class MockWebVideoFrameSubmitter : public blink::WebVideoFrameSubmitter {
// blink::WebVideoFrameSubmitter implementation.
void StopUsingProvider() override {}
MOCK_METHOD2(EnableSubmission,
- void(viz::FrameSinkId, blink::WebFrameSinkDestroyedCallback));
+ void(viz::SurfaceId, blink::WebFrameSinkDestroyedCallback));
MOCK_METHOD0(StartRendering, void());
MOCK_METHOD0(StopRendering, void());
+ MOCK_CONST_METHOD0(IsDrivingFrameUpdates, bool(void));
MOCK_METHOD1(Initialize, void(cc::VideoFrameProvider*));
MOCK_METHOD1(SetRotation, void(media::VideoRotation));
+ MOCK_METHOD1(SetIsOpaque, void(bool));
+ MOCK_METHOD1(UpdateSubmissionState, void(bool));
+ MOCK_METHOD1(SetForceSubmit, void(bool));
void DidReceiveFrame() override { ++did_receive_frame_count_; }
int did_receive_frame_count() { return did_receive_frame_count_; }
@@ -69,10 +74,12 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
base::RunLoop().RunUntilIdle();
EXPECT_CALL(*submitter_,
SetRotation(Eq(media::VideoRotation::VIDEO_ROTATION_90)));
- EXPECT_CALL(*submitter_, EnableSubmission(Eq(viz::FrameSinkId(1, 1)), _));
- compositor_->EnableSubmission(viz::FrameSinkId(1, 1),
+ EXPECT_CALL(*submitter_, SetForceSubmit(false));
+ EXPECT_CALL(*submitter_, EnableSubmission(Eq(viz::SurfaceId()), _));
+ EXPECT_CALL(*submitter_, SetIsOpaque(true));
+ compositor_->EnableSubmission(viz::SurfaceId(),
media::VideoRotation::VIDEO_ROTATION_90,
- base::BindRepeating([] {}));
+ false, true, base::BindRepeating([] {}));
}
compositor_->set_tick_clock_for_testing(&tick_clock_);
@@ -256,6 +263,10 @@ TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
scoped_refptr<VideoFrame> opaque_frame_2 = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
+ EXPECT_CALL(*submitter_, IsDrivingFrameUpdates)
+ .Times(AnyNumber())
+ .WillRepeatedly(Return(true));
+
// Starting the video renderer should return a single frame.
EXPECT_CALL(*this, Render(_, _, true)).WillOnce(Return(opaque_frame_1));
StartVideoRendererSink();
@@ -267,8 +278,11 @@ TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
EXPECT_CALL(*this, Render(_, _, _)).Times(0);
compositor()->UpdateCurrentFrameIfStale();
- // Clear our client, which means no mock function calls for Client.
- compositor()->SetVideoFrameProviderClient(nullptr);
+ // Have the client signal that it will not drive the frame clock, so that
+ // calling UpdateCurrentFrameIfStale may update the frame.
+ EXPECT_CALL(*submitter_, IsDrivingFrameUpdates)
+ .Times(AnyNumber())
+ .WillRepeatedly(Return(false));
// Wait for background rendering to tick.
base::RunLoop run_loop;
@@ -289,6 +303,16 @@ TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
compositor()->UpdateCurrentFrameIfStale();
EXPECT_EQ(opaque_frame_1, compositor()->GetCurrentFrame());
+ // Clear our client, which means no mock function calls for Client. It will
+ // also permit UpdateCurrentFrameIfStale to update the frame.
+ compositor()->SetVideoFrameProviderClient(nullptr);
+
+ // Advancing the tick clock should allow a new frame to be requested.
+ tick_clock_.Advance(base::TimeDelta::FromMilliseconds(10));
+ EXPECT_CALL(*this, Render(_, _, true)).WillOnce(Return(opaque_frame_2));
+ compositor()->UpdateCurrentFrameIfStale();
+ EXPECT_EQ(opaque_frame_2, compositor()->GetCurrentFrame());
+
// Background rendering should tick another render callback.
StopVideoRendererSink(false);
}
diff --git a/chromium/media/blink/watch_time_component.cc b/chromium/media/blink/watch_time_component.cc
new file mode 100644
index 00000000000..7e6a61e4014
--- /dev/null
+++ b/chromium/media/blink/watch_time_component.cc
@@ -0,0 +1,133 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/blink/watch_time_component.h"
+
+#include "media/blink/media_blink_export.h"
+#include "third_party/blink/public/platform/web_media_player.h"
+
+namespace media {
+
+template <typename T>
+WatchTimeComponent<T>::WatchTimeComponent(
+ T initial_value,
+ std::vector<WatchTimeKey> keys_to_finalize,
+ ValueToKeyCB value_to_key_cb,
+ GetMediaTimeCB get_media_time_cb,
+ mojom::WatchTimeRecorder* recorder)
+ : keys_to_finalize_(std::move(keys_to_finalize)),
+ value_to_key_cb_(std::move(value_to_key_cb)),
+ get_media_time_cb_(std::move(get_media_time_cb)),
+ recorder_(recorder),
+ current_value_(initial_value),
+ pending_value_(initial_value) {}
+
+template <typename T>
+WatchTimeComponent<T>::~WatchTimeComponent() = default;
+
+template <typename T>
+void WatchTimeComponent<T>::OnReportingStarted(
+ base::TimeDelta start_timestamp) {
+ start_timestamp_ = start_timestamp;
+ end_timestamp_ = last_timestamp_ = kNoTimestamp;
+}
+
+template <typename T>
+void WatchTimeComponent<T>::SetPendingValue(T new_value) {
+ pending_value_ = new_value;
+ if (current_value_ != new_value) {
+ // Don't trample an existing finalize; the first takes precedence.
+ //
+ // Note: For components with trinary or higher state, which experience
+ // multiple state changes during an existing finalize, this will drop all
+ // watch time between the current and final state. E.g., state=0 {0ms} ->
+ // state=1 {1ms} -> state=2 {2ms} will result in loss of state=1 watch time.
+ if (end_timestamp_ != kNoTimestamp)
+ return;
+
+ end_timestamp_ = get_media_time_cb_.Run();
+ return;
+ }
+
+ // Clear any pending finalize since we returned to the previous value before
+ // the finalize could completed. I.e., assume this is a continuation.
+ end_timestamp_ = kNoTimestamp;
+}
+
+template <typename T>
+void WatchTimeComponent<T>::SetCurrentValue(T new_value) {
+ current_value_ = new_value;
+}
+
+template <typename T>
+void WatchTimeComponent<T>::RecordWatchTime(base::TimeDelta current_timestamp) {
+ DCHECK_NE(current_timestamp, kNoTimestamp);
+ DCHECK_NE(current_timestamp, kInfiniteDuration);
+ DCHECK_GE(current_timestamp, base::TimeDelta());
+
+ // If we're finalizing, use the media time at time of finalization. We only
+ // use the |end_timestamp_| if it's less than the current timestamp, otherwise
+ // we may report more watch time than expected.
+ if (NeedsFinalize() && end_timestamp_ < current_timestamp)
+ current_timestamp = end_timestamp_;
+
+ // Don't update watch time if media time hasn't changed since the last run;
+ // this may occur if a seek is taking some time to complete or the playback
+ // is stalled for some reason.
+ if (last_timestamp_ == current_timestamp)
+ return;
+
+ last_timestamp_ = current_timestamp;
+ const base::TimeDelta elapsed = last_timestamp_ - start_timestamp_;
+ if (elapsed <= base::TimeDelta())
+ return;
+
+ // If no value to key callback has been provided, record |elapsed| to every
+ // key in the |keys_to_finalize_| list.
+ if (value_to_key_cb_.is_null()) {
+ for (auto k : keys_to_finalize_)
+ recorder_->RecordWatchTime(k, elapsed);
+ return;
+ }
+
+ // A conversion callback has been specified, so only report elapsed to the
+ // key provided by the callback.
+ //
+ // Record watch time using |current_value_| and not |pending_value_| since
+ // that transition should not happen until Finalize().
+ recorder_->RecordWatchTime(value_to_key_cb_.Run(current_value_), elapsed);
+}
+
+template <typename T>
+void WatchTimeComponent<T>::Finalize(
+ std::vector<WatchTimeKey>* keys_to_finalize) {
+ DCHECK(NeedsFinalize());
+ // Update |current_value_| and |start_timestamp_| to |end_timestamp_| since
+ // that's when the |pending_value_| was set.
+ current_value_ = pending_value_;
+ start_timestamp_ = end_timestamp_;
+
+ // Complete the finalize and indicate which keys need to be finalized.
+ end_timestamp_ = kNoTimestamp;
+ keys_to_finalize->insert(keys_to_finalize->end(), keys_to_finalize_.begin(),
+ keys_to_finalize_.end());
+ DCHECK(!NeedsFinalize());
+}
+
+template <typename T>
+bool WatchTimeComponent<T>::NeedsFinalize() const {
+ return end_timestamp_ != kNoTimestamp;
+}
+
+// Required to avoid linking errors since we've split this file into a .cc + .h
+// file set instead of putting the function definitions in the header file. Any
+// new component type must be added here.
+//
+// Note: These must be the last line in this file, otherwise you will also see
+// linking errors since the templates won't have been fully defined prior.
+template class MEDIA_BLINK_EXPORT WatchTimeComponent<bool>;
+template class MEDIA_BLINK_EXPORT
+ WatchTimeComponent<blink::WebMediaPlayer::DisplayType>;
+
+} // namespace media
diff --git a/chromium/media/blink/watch_time_component.h b/chromium/media/blink/watch_time_component.h
new file mode 100644
index 00000000000..522e3cf638f
--- /dev/null
+++ b/chromium/media/blink/watch_time_component.h
@@ -0,0 +1,135 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
+#define MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
+
+#include <vector>
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/time/time.h"
+#include "media/base/timestamp_constants.h"
+#include "media/base/watch_time_keys.h"
+#include "media/blink/media_blink_export.h"
+#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
+
+namespace media {
+
+// Every input used to calculate watch time functions the same way, so we use a
+// common WatchTimeComponent class to avoid lots of copy/paste and enforce rigor
+// in the reporter. Components are not thread-safe.
+//
+// E.g., each component does something like flip pending value, record timestamp
+// of that value change, wait for next reporting cycle, finalize the elapsed
+// time, flip the actual value, and then start recording from that previous
+// finalize time. They may also clear the pending value flip if the value
+// changes back to the previous value.
+template <typename T>
+class WatchTimeComponent {
+ public:
+ // Callback used to convert |current_value_| into a WatchTimeKey which will be
+ // given to WatchTimeRecorder::RecordWatchTime().
+ using ValueToKeyCB = base::RepeatingCallback<WatchTimeKey(T value)>;
+
+ // Mirror of WatchTimeReporter::GetMediaTimeCB to avoid circular dependency.
+ using GetMediaTimeCB = base::RepeatingCallback<base::TimeDelta(void)>;
+
+ // |initial_value| is the starting value for |current_value_| and
+ // |pending_value_|.
+ //
+ // |keys_to_finalize| is the list of keys which should be finalized.
+ //
+ // |value_to_key_cb| is optional, if unspecified every time RecordWatchTime()
+ // is called, |keys_to_finalize| will also be treated as the list of keys to
+ // record watch time too.
+ //
+ // See WatchTimeReporter constructor for |get_media_time_cb| and |recorder|.
+ WatchTimeComponent(T initial_value,
+ std::vector<WatchTimeKey> keys_to_finalize,
+ ValueToKeyCB value_to_key_cb,
+ GetMediaTimeCB get_media_time_cb,
+ mojom::WatchTimeRecorder* recorder);
+ ~WatchTimeComponent();
+
+ // Called when the main WatchTimeReporter timer is started. Reinitializes
+ // tracking variables and sets |start_timestamp_|. May be called at any time.
+ void OnReportingStarted(base::TimeDelta start_timestamp);
+
+ // Called when the primary value tracked by this component changes but the
+ // change shouldn't take effect until the next Finalize() call.
+ //
+ // |pending_value_| is set to |new_value| when different than |current_value_|
+ // and a finalize is marked at the current media time. If the |current_value_|
+ // is unchanged any pending finalize is cleared.
+ void SetPendingValue(T new_value);
+
+ // Called when the primary value tracked by this component changes and the
+ // change should take effect immediately. This is typically only called when
+ // the watch time timer is not running.
+ void SetCurrentValue(T new_value);
+
+ // If there's no pending finalize, records the amount of watch time which has
+ // elapsed between |current_timestamp| and |start_timestamp_| by calling into
+ // mojom::WatchTimeRecorder::RecordWatchTime(). The key to be recorded to is
+ // determined by the |value_to_key_cb_|; or if none is present, all keys in
+ // |keys_to_finalize_| are recorded to.
+ //
+ // If there's a pending finalize it records the delta between |end_timestamp_|
+ // and |start_timestamp_| if |end_timestamp_| < |current_timestamp|. Does not
+ // complete any pending finalize. May be called multiple times even if a
+ // finalize is pending.
+ void RecordWatchTime(base::TimeDelta current_timestamp);
+
+ // Completes any pending finalize. Which means setting |current_value_| to
+ // |pending_value_| and setting |start_timestamp_| to |end_timestamp_| so that
+ // reporting may continue on a new key if desired. Adds all keys that should
+ // be finalized to |keys_to_finalize|.
+ //
+ // Callers must call mojom::WatchTimeRecorder::FinalizeWatchTime() for the
+ // resulting keys in order to actually complete the finalize. We rely on the
+ // calling class to perform the actual finalization since it may desire to
+ // batch a set of keys into one finalize call to the recorder.
+ //
+ // E.g., some components may stop reporting upon Finalize() while others want
+ // to report to a new key for all watch time going forward.
+ void Finalize(std::vector<WatchTimeKey>* keys_to_finalize);
+
+ // Returns true if Finalize() should be called.
+ bool NeedsFinalize() const;
+
+ // Returns the current value for |end_timestamp_|.
+ base::TimeDelta end_timestamp() const { return end_timestamp_; }
+
+ T current_value_for_testing() const { return current_value_; }
+
+ private:
+ // Initialized during construction. See constructor for details.
+ const std::vector<WatchTimeKey> keys_to_finalize_;
+ const ValueToKeyCB value_to_key_cb_;
+ const GetMediaTimeCB get_media_time_cb_;
+ mojom::WatchTimeRecorder* const recorder_;
+
+ // The current value which will be used to select keys for reporting WatchTime
+ // during the next RecordWatchTime() call.
+ T current_value_;
+
+ // A pending value which will be used to set |current_value_| once Finalize()
+ // has been called.
+ T pending_value_;
+
+ // The starting and ending timestamps used for reporting watch time. The end
+ // timestamp may be kNoTimestamp if reporting is ongoing.
+ base::TimeDelta start_timestamp_;
+ base::TimeDelta end_timestamp_ = kNoTimestamp;
+
+ // The last media timestamp seen by RecordWatchTime().
+ base::TimeDelta last_timestamp_ = kNoTimestamp;
+
+ DISALLOW_COPY_AND_ASSIGN(WatchTimeComponent);
+};
+
+} // namespace media
+
+#endif // MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
diff --git a/chromium/media/blink/watch_time_component_unittest.cc b/chromium/media/blink/watch_time_component_unittest.cc
new file mode 100644
index 00000000000..26b93c956d8
--- /dev/null
+++ b/chromium/media/blink/watch_time_component_unittest.cc
@@ -0,0 +1,300 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/blink/watch_time_component.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/blink/public/platform/web_media_player.h"
+
+namespace media {
+
+class WatchTimeInterceptor : public mojom::WatchTimeRecorder {
+ public:
+ WatchTimeInterceptor() = default;
+ ~WatchTimeInterceptor() override = default;
+
+ // mojom::WatchTimeRecorder implementation:
+ MOCK_METHOD2(RecordWatchTime, void(WatchTimeKey, base::TimeDelta));
+ MOCK_METHOD1(FinalizeWatchTime, void(const std::vector<WatchTimeKey>&));
+ MOCK_METHOD1(OnError, void(PipelineStatus));
+ MOCK_METHOD1(UpdateUnderflowCount, void(int32_t count));
+ MOCK_METHOD1(
+ UpdateSecondaryProperties,
+ void(mojom::SecondaryPlaybackPropertiesPtr secondary_properties));
+ MOCK_METHOD1(SetAutoplayInitiated, void(bool));
+ MOCK_METHOD1(OnDurationChanged, void(base::TimeDelta));
+};
+
+class WatchTimeComponentTest : public testing::Test {
+ public:
+ WatchTimeComponentTest() = default;
+ ~WatchTimeComponentTest() override = default;
+
+ protected:
+ template <typename T>
+ std::unique_ptr<WatchTimeComponent<T>> CreateComponent(
+ T initial_value,
+ std::vector<WatchTimeKey> keys_to_finalize,
+ typename WatchTimeComponent<T>::ValueToKeyCB value_to_key_cb) {
+ return std::make_unique<WatchTimeComponent<T>>(
+ initial_value, std::move(keys_to_finalize), std::move(value_to_key_cb),
+ base::BindRepeating(&WatchTimeComponentTest::GetMediaTime,
+ base::Unretained(this)),
+ &recorder_);
+ }
+
+ MOCK_METHOD0(GetMediaTime, base::TimeDelta(void));
+
+ // Usage of StrictMock is intentional here. This ensures all mock method calls
+ // are accounted for in tests.
+ testing::StrictMock<WatchTimeInterceptor> recorder_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(WatchTimeComponentTest);
+};
+
+// Components should be key agnostic so just choose an arbitrary key for running
+// most of the tests.
+constexpr WatchTimeKey kTestKey = WatchTimeKey::kAudioAll;
+
+// This is a test of the standard flow for most components. Most components will
+// be created, be enabled, start reporting, record watch time, be disabled,
+// report a finalize, and then record watch time again.
+TEST_F(WatchTimeComponentTest, BasicFlow) {
+ auto test_component = CreateComponent<bool>(
+ false, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
+ EXPECT_FALSE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Simulate flag enabled after construction, but before timer is running; this
+ // should set the current value immediately.
+ test_component->SetCurrentValue(true);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Notify the start of reporting to set the starting timestamp.
+ const base::TimeDelta kStartTime = base::TimeDelta::FromSeconds(1);
+ test_component->OnReportingStarted(kStartTime);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Simulate a single recording tick.
+ const base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(2);
+ EXPECT_CALL(recorder_, RecordWatchTime(kTestKey, kWatchTime - kStartTime));
+ test_component->RecordWatchTime(kWatchTime);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Simulate the flag being flipped to false while the timer is running; which
+ // should trigger a finalize, but not yet set the current value.
+ const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(3);
+ EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
+ test_component->SetPendingValue(false);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
+
+ // If record is called again it should use the finalize timestamp instead of
+ // whatever timestamp we provide.
+ EXPECT_CALL(recorder_,
+ RecordWatchTime(kTestKey, kFinalWatchTime - kStartTime));
+ test_component->RecordWatchTime(base::TimeDelta::FromSeconds(1234));
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
+
+ // Calling it twice or more should not change anything; nor even generate a
+ // report since that time has already been recorded.
+ for (int i = 0; i < 2; ++i) {
+ test_component->RecordWatchTime(base::TimeDelta::FromSeconds(1234 + i));
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
+ }
+
+ // Trigger finalize which should transition the pending value to the current
+ // value as well as clear the finalize.
+ std::vector<WatchTimeKey> finalize_keys;
+ test_component->Finalize(&finalize_keys);
+ EXPECT_FALSE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+ ASSERT_EQ(finalize_keys.size(), 1u);
+ EXPECT_EQ(finalize_keys[0], kTestKey);
+
+ // The start timestamps should be equal to the previous end timestamp now, so
+ // if we call RecordWatchTime again, the value should be relative.
+ const base::TimeDelta kNewWatchTime = base::TimeDelta::FromSeconds(4);
+ EXPECT_CALL(recorder_,
+ RecordWatchTime(kTestKey, kNewWatchTime - kFinalWatchTime));
+ test_component->RecordWatchTime(kNewWatchTime);
+ EXPECT_FALSE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+}
+
+TEST_F(WatchTimeComponentTest, SetCurrentValue) {
+ auto test_component = CreateComponent<bool>(
+ true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // An update when the timer isn't running should take effect immediately.
+ test_component->SetCurrentValue(false);
+ EXPECT_FALSE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ test_component->SetCurrentValue(true);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+}
+
+TEST_F(WatchTimeComponentTest, RecordDuringFinalizeRespectsCurrentTime) {
+ auto test_component = CreateComponent<bool>(
+ true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Simulate the flag being flipped to false while the timer is running; which
+ // should trigger a finalize, but not yet set the current value.
+ const base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(3);
+ EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kWatchTime1));
+ test_component->SetPendingValue(false);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kWatchTime1);
+
+ // Now issue a RecordWatchTime() call with a media time before the finalize
+ // time. This can happen when the TimeDelta provided to RecordWatchTime has
+ // been clamped for some reason (e.g., a superseding finalize).
+ const base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(2);
+ EXPECT_CALL(recorder_, RecordWatchTime(kTestKey, kWatchTime2));
+ test_component->RecordWatchTime(kWatchTime2);
+}
+
+TEST_F(WatchTimeComponentTest, SetPendingValue) {
+ auto test_component = CreateComponent<bool>(
+ true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // A change when running should trigger a finalize.
+ const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(1);
+ EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
+ test_component->SetPendingValue(false);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
+
+ // Issuing the same property change again should do nothing since there's a
+ // pending finalize already.
+ test_component->SetPendingValue(false);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_TRUE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
+
+ // Changing the value back, should cancel the finalize.
+ test_component->SetPendingValue(true);
+ EXPECT_TRUE(test_component->current_value_for_testing());
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+}
+
+// Tests RecordWatchTime() behavior when a ValueToKeyCB is provided.
+TEST_F(WatchTimeComponentTest, WithValueToKeyCB) {
+ using DisplayType = blink::WebMediaPlayer::DisplayType;
+
+ const std::vector<WatchTimeKey> finalize_keys = {
+ WatchTimeKey::kAudioVideoDisplayInline,
+ WatchTimeKey::kAudioVideoDisplayFullscreen,
+ WatchTimeKey::kAudioVideoDisplayPictureInPicture};
+ auto test_component = CreateComponent<DisplayType>(
+ DisplayType::kFullscreen, finalize_keys,
+ base::BindRepeating([](DisplayType display_type) {
+ switch (display_type) {
+ case DisplayType::kInline:
+ return WatchTimeKey::kAudioVideoDisplayInline;
+ case DisplayType::kFullscreen:
+ return WatchTimeKey::kAudioVideoDisplayFullscreen;
+ case DisplayType::kPictureInPicture:
+ return WatchTimeKey::kAudioVideoDisplayPictureInPicture;
+ }
+ }));
+ EXPECT_EQ(test_component->current_value_for_testing(),
+ DisplayType::kFullscreen);
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Notify the start of reporting to set the starting timestamp.
+ const base::TimeDelta kStartTime = base::TimeDelta::FromSeconds(1);
+ test_component->OnReportingStarted(kStartTime);
+ EXPECT_EQ(test_component->current_value_for_testing(),
+ DisplayType::kFullscreen);
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Record and verify the key recorded too matches the callback provided.
+ const base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(2);
+ EXPECT_CALL(recorder_,
+ RecordWatchTime(WatchTimeKey::kAudioVideoDisplayFullscreen,
+ kWatchTime1 - kStartTime));
+ test_component->RecordWatchTime(kWatchTime1);
+ EXPECT_EQ(test_component->current_value_for_testing(),
+ DisplayType::kFullscreen);
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Change property while saying the timer isn't running to avoid finalize.
+ const base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(3);
+ test_component->SetCurrentValue(DisplayType::kInline);
+ EXPECT_CALL(recorder_, RecordWatchTime(WatchTimeKey::kAudioVideoDisplayInline,
+ kWatchTime2 - kStartTime));
+ test_component->RecordWatchTime(kWatchTime2);
+ EXPECT_EQ(test_component->current_value_for_testing(), DisplayType::kInline);
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Cycle through all three properties...
+ const base::TimeDelta kWatchTime3 = base::TimeDelta::FromSeconds(4);
+ test_component->SetCurrentValue(DisplayType::kPictureInPicture);
+ EXPECT_CALL(recorder_,
+ RecordWatchTime(WatchTimeKey::kAudioVideoDisplayPictureInPicture,
+ kWatchTime3 - kStartTime));
+ test_component->RecordWatchTime(kWatchTime3);
+ EXPECT_EQ(test_component->current_value_for_testing(),
+ DisplayType::kPictureInPicture);
+ EXPECT_FALSE(test_component->NeedsFinalize());
+ EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
+
+ // Verify finalize sends all three keys.
+ std::vector<WatchTimeKey> actual_finalize_keys;
+ const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(5);
+ EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
+ test_component->SetPendingValue(DisplayType::kFullscreen);
+ test_component->Finalize(&actual_finalize_keys);
+ ASSERT_EQ(actual_finalize_keys.size(), finalize_keys.size());
+ for (size_t i = 0; i < finalize_keys.size(); ++i)
+ EXPECT_EQ(actual_finalize_keys[i], finalize_keys[i]);
+}
+
+// Unlike WatchTimeReporter, WatchTimeComponents have no automatic finalization
+// so creating and destroying one without calls, should do nothing.
+TEST_F(WatchTimeComponentTest, NoAutomaticFinalize) {
+ auto test_component = CreateComponent<bool>(
+ false, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
+}
+
+} // namespace media
diff --git a/chromium/media/blink/watch_time_reporter.cc b/chromium/media/blink/watch_time_reporter.cc
index 49a4ebf7b24..52ffc08b7b0 100644
--- a/chromium/media/blink/watch_time_reporter.cc
+++ b/chromium/media/blink/watch_time_reporter.cc
@@ -18,8 +18,29 @@ static bool IsOnBatteryPower() {
return false;
}
+// Helper function for managing property changes. If the watch time timer is
+// running it sets the pending value otherwise it sets the current value and
+// then returns true if the component needs finalize.
+enum class PropertyAction { kNoActionRequired, kFinalizeRequired };
+template <typename T>
+PropertyAction HandlePropertyChange(T new_value,
+ bool is_timer_running,
+ WatchTimeComponent<T>* component) {
+ if (!component)
+ return PropertyAction::kNoActionRequired;
+
+ if (is_timer_running)
+ component->SetPendingValue(new_value);
+ else
+ component->SetCurrentValue(new_value);
+
+ return component->NeedsFinalize() ? PropertyAction::kFinalizeRequired
+ : PropertyAction::kNoActionRequired;
+}
+
WatchTimeReporter::WatchTimeReporter(
mojom::PlaybackPropertiesPtr properties,
+ const gfx::Size& initial_natural_size,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -27,6 +48,7 @@ WatchTimeReporter::WatchTimeReporter(
: WatchTimeReporter(std::move(properties),
false /* is_background */,
false /* is_muted */,
+ initial_natural_size,
std::move(get_media_time_cb),
provider,
task_runner,
@@ -36,6 +58,7 @@ WatchTimeReporter::WatchTimeReporter(
mojom::PlaybackPropertiesPtr properties,
bool is_background,
bool is_muted,
+ const gfx::Size& initial_natural_size,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -43,6 +66,7 @@ WatchTimeReporter::WatchTimeReporter(
: properties_(std::move(properties)),
is_background_(is_background),
is_muted_(is_muted),
+ initial_natural_size_(initial_natural_size),
get_media_time_cb_(std::move(get_media_time_cb)),
reporting_timer_(tick_clock) {
DCHECK(!get_media_time_cb_.is_null());
@@ -67,6 +91,14 @@ WatchTimeReporter::WatchTimeReporter(
reporting_timer_.SetTaskRunner(task_runner);
+ base_component_ = CreateBaseComponent();
+ power_component_ = CreatePowerComponent();
+ if (!is_background_) {
+ controls_component_ = CreateControlsComponent();
+ if (properties_->has_video)
+ display_type_component_ = CreateDisplayTypeComponent();
+ }
+
// If this is a sub-reporter or we shouldn't report watch time, we're done. We
// don't support muted+background reporting currently.
if (is_background_ || is_muted_ || !ShouldReportWatchTime())
@@ -79,7 +111,8 @@ WatchTimeReporter::WatchTimeReporter(
prop_copy->is_background = true;
background_reporter_.reset(new WatchTimeReporter(
std::move(prop_copy), true /* is_background */, false /* is_muted */,
- get_media_time_cb_, provider, task_runner, tick_clock));
+ initial_natural_size_, get_media_time_cb_, provider, task_runner,
+ tick_clock));
// Muted watch time is only reported for audio+video playback.
if (!properties_->has_video || !properties_->has_audio)
@@ -91,7 +124,8 @@ WatchTimeReporter::WatchTimeReporter(
prop_copy->is_muted = true;
muted_reporter_.reset(new WatchTimeReporter(
std::move(prop_copy), false /* is_background */, true /* is_muted */,
- get_media_time_cb_, provider, task_runner, tick_clock));
+ initial_natural_size_, get_media_time_cb_, provider, task_runner,
+ tick_clock));
}
WatchTimeReporter::~WatchTimeReporter() {
@@ -99,6 +133,7 @@ WatchTimeReporter::~WatchTimeReporter() {
muted_reporter_.reset();
// This is our last chance, so finalize now if there's anything remaining.
+ in_shutdown_ = true;
MaybeFinalizeWatchTime(FinalizeTime::IMMEDIATELY);
if (base::PowerMonitor* pm = base::PowerMonitor::Get())
pm->RemoveObserver(this);
@@ -111,6 +146,7 @@ void WatchTimeReporter::OnPlaying() {
muted_reporter_->OnPlaying();
is_playing_ = true;
+ is_seeking_ = false;
MaybeStartReportingTimer(get_media_time_cb_.Run());
}
@@ -132,6 +168,7 @@ void WatchTimeReporter::OnSeeking() {
// Seek is a special case that does not have hysteresis, when this is called
// the seek is imminent, so finalize the previous playback immediately.
+ is_seeking_ = true;
MaybeFinalizeWatchTime(FinalizeTime::IMMEDIATELY);
}
@@ -210,71 +247,39 @@ void WatchTimeReporter::OnUnderflow() {
}
void WatchTimeReporter::OnNativeControlsEnabled() {
- if (muted_reporter_)
- muted_reporter_->OnNativeControlsEnabled();
-
- if (!reporting_timer_.IsRunning()) {
- has_native_controls_ = true;
- return;
- }
-
- if (end_timestamp_for_controls_ != kNoTimestamp) {
- end_timestamp_for_controls_ = kNoTimestamp;
- return;
- }
-
- end_timestamp_for_controls_ = get_media_time_cb_.Run();
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
+ OnNativeControlsChanged(true);
}
void WatchTimeReporter::OnNativeControlsDisabled() {
- if (muted_reporter_)
- muted_reporter_->OnNativeControlsDisabled();
-
- if (!reporting_timer_.IsRunning()) {
- has_native_controls_ = false;
- return;
- }
-
- if (end_timestamp_for_controls_ != kNoTimestamp) {
- end_timestamp_for_controls_ = kNoTimestamp;
- return;
- }
-
- end_timestamp_for_controls_ = get_media_time_cb_.Run();
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
+ OnNativeControlsChanged(false);
}
void WatchTimeReporter::OnDisplayTypeInline() {
- OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType::kInline);
+ OnDisplayTypeChanged(DisplayType::kInline);
}
void WatchTimeReporter::OnDisplayTypeFullscreen() {
- OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(DisplayType::kFullscreen);
}
void WatchTimeReporter::OnDisplayTypePictureInPicture() {
- OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType::kPictureInPicture);
+ OnDisplayTypeChanged(DisplayType::kPictureInPicture);
}
-void WatchTimeReporter::SetAudioDecoderName(const std::string& name) {
- DCHECK(properties_->has_audio);
- recorder_->SetAudioDecoderName(name);
- if (background_reporter_)
- background_reporter_->SetAudioDecoderName(name);
- if (muted_reporter_)
- muted_reporter_->SetAudioDecoderName(name);
-}
+void WatchTimeReporter::UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties) {
+ // Flush any unrecorded watch time before updating the secondary properties to
+ // ensure the UKM record is finalized with up-to-date watch time information.
+ if (reporting_timer_.IsRunning())
+ RecordWatchTime();
-void WatchTimeReporter::SetVideoDecoderName(const std::string& name) {
- DCHECK(properties_->has_video);
- recorder_->SetVideoDecoderName(name);
- if (background_reporter_)
- background_reporter_->SetVideoDecoderName(name);
+ recorder_->UpdateSecondaryProperties(secondary_properties.Clone());
+ if (background_reporter_) {
+ background_reporter_->UpdateSecondaryProperties(
+ secondary_properties.Clone());
+ }
if (muted_reporter_)
- muted_reporter_->SetVideoDecoderName(name);
+ muted_reporter_->UpdateSecondaryProperties(std::move(secondary_properties));
}
void WatchTimeReporter::SetAutoplayInitiated(bool autoplay_initiated) {
@@ -285,88 +290,97 @@ void WatchTimeReporter::SetAutoplayInitiated(bool autoplay_initiated) {
muted_reporter_->SetAutoplayInitiated(autoplay_initiated);
}
+void WatchTimeReporter::OnDurationChanged(base::TimeDelta duration) {
+ recorder_->OnDurationChanged(duration);
+ if (background_reporter_)
+ background_reporter_->OnDurationChanged(duration);
+ if (muted_reporter_)
+ muted_reporter_->OnDurationChanged(duration);
+}
+
void WatchTimeReporter::OnPowerStateChange(bool on_battery_power) {
- if (!reporting_timer_.IsRunning())
- return;
+ if (HandlePropertyChange<bool>(on_battery_power, reporting_timer_.IsRunning(),
+ power_component_.get()) ==
+ PropertyAction::kFinalizeRequired) {
+ RestartTimerForHysteresis();
+ }
+}
- // Defer changing |is_on_battery_power_| until the next watch time report to
- // avoid momentary power changes from affecting the results.
- if (is_on_battery_power_ != on_battery_power) {
- end_timestamp_for_power_ = get_media_time_cb_.Run();
+void WatchTimeReporter::OnNativeControlsChanged(bool has_native_controls) {
+ if (muted_reporter_)
+ muted_reporter_->OnNativeControlsChanged(has_native_controls);
- // Restart the reporting timer so the full hysteresis is afforded.
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
- return;
+ if (HandlePropertyChange<bool>(
+ has_native_controls, reporting_timer_.IsRunning(),
+ controls_component_.get()) == PropertyAction::kFinalizeRequired) {
+ RestartTimerForHysteresis();
}
+}
+
+void WatchTimeReporter::OnDisplayTypeChanged(DisplayType display_type) {
+ if (muted_reporter_)
+ muted_reporter_->OnDisplayTypeChanged(display_type);
- end_timestamp_for_power_ = kNoTimestamp;
+ if (HandlePropertyChange<DisplayType>(
+ display_type, reporting_timer_.IsRunning(),
+ display_type_component_.get()) == PropertyAction::kFinalizeRequired) {
+ RestartTimerForHysteresis();
+ }
}
-bool WatchTimeReporter::ShouldReportWatchTime() {
+bool WatchTimeReporter::ShouldReportWatchTime() const {
// Report listen time or watch time for videos of sufficient size.
return properties_->has_video
- ? (properties_->natural_size.height() >=
- kMinimumVideoSize.height() &&
- properties_->natural_size.width() >= kMinimumVideoSize.width())
+ ? (initial_natural_size_.height() >= kMinimumVideoSize.height() &&
+ initial_natural_size_.width() >= kMinimumVideoSize.width())
: properties_->has_audio;
}
+bool WatchTimeReporter::ShouldReportingTimerRun() const {
+ // TODO(dalecurtis): We should only consider |volume_| when there is actually
+ // an audio track; requires updating lots of tests to fix.
+ return ShouldReportWatchTime() && is_playing_ && volume_ && is_visible_ &&
+ !in_shutdown_ && !is_seeking_;
+}
+
void WatchTimeReporter::MaybeStartReportingTimer(
base::TimeDelta start_timestamp) {
DCHECK_NE(start_timestamp, kInfiniteDuration);
DCHECK_GE(start_timestamp, base::TimeDelta());
- // Don't start the timer if any of our state indicates we shouldn't; this
- // check is important since the various event handlers do not have to care
- // about the state of other events.
- //
- // TODO(dalecurtis): We should only consider |volume_| when there is actually
- // an audio track; requires updating lots of tests to fix.
- if (!ShouldReportWatchTime() || !is_playing_ || !volume_ || !is_visible_) {
- // If we reach this point the timer should already have been stopped or
- // there is a pending finalize in flight.
- DCHECK(!reporting_timer_.IsRunning() || end_timestamp_ != kNoTimestamp);
+ // Don't start the timer if our state indicates we shouldn't; this check is
+ // important since the various event handlers do not have to care about the
+ // state of other events.
+ const bool should_start = ShouldReportingTimerRun();
+ if (reporting_timer_.IsRunning()) {
+ base_component_->SetPendingValue(should_start);
return;
}
- // If we haven't finalized the last watch time metrics yet, count this
- // playback as a continuation of the previous metrics.
- if (end_timestamp_ != kNoTimestamp) {
- DCHECK(reporting_timer_.IsRunning());
- end_timestamp_ = kNoTimestamp;
- return;
- }
-
- // Don't restart the timer if it's already running.
- if (reporting_timer_.IsRunning())
+ base_component_->SetCurrentValue(should_start);
+ if (!should_start)
return;
underflow_count_ = 0;
pending_underflow_events_.clear();
- last_media_timestamp_ = last_media_power_timestamp_ =
- last_media_controls_timestamp_ = end_timestamp_for_power_ =
- last_media_display_type_timestamp_ = end_timestamp_for_display_type_ =
- kNoTimestamp;
- is_on_battery_power_ = IsOnBatteryPower();
- display_type_for_recording_ = display_type_;
- start_timestamp_ = start_timestamp_for_power_ =
- start_timestamp_for_controls_ = start_timestamp_for_display_type_ =
- start_timestamp;
+
+ base_component_->OnReportingStarted(start_timestamp);
+ power_component_->OnReportingStarted(start_timestamp);
+
+ if (controls_component_)
+ controls_component_->OnReportingStarted(start_timestamp);
+ if (display_type_component_)
+ display_type_component_->OnReportingStarted(start_timestamp);
+
reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
&WatchTimeReporter::UpdateWatchTime);
}
void WatchTimeReporter::MaybeFinalizeWatchTime(FinalizeTime finalize_time) {
- // Don't finalize if the timer is already stopped.
- if (!reporting_timer_.IsRunning())
+ if (HandlePropertyChange<bool>(
+ ShouldReportingTimerRun(), reporting_timer_.IsRunning(),
+ base_component_.get()) == PropertyAction::kNoActionRequired) {
return;
-
- // Don't trample an existing finalize; the first takes precedence.
- if (end_timestamp_ == kNoTimestamp) {
- end_timestamp_ = get_media_time_cb_.Run();
- DCHECK_NE(end_timestamp_, kInfiniteDuration);
- DCHECK_GE(end_timestamp_, base::TimeDelta());
}
if (finalize_time == FinalizeTime::IMMEDIATELY) {
@@ -377,175 +391,31 @@ void WatchTimeReporter::MaybeFinalizeWatchTime(FinalizeTime finalize_time) {
// Always restart the timer when finalizing, so that we allow for the full
// length of |kReportingInterval| to elapse for hysteresis purposes.
DCHECK_EQ(finalize_time, FinalizeTime::ON_NEXT_UPDATE);
+ RestartTimerForHysteresis();
+}
+
+void WatchTimeReporter::RestartTimerForHysteresis() {
+ // Restart the reporting timer so the full hysteresis is afforded.
+ DCHECK(reporting_timer_.IsRunning());
reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
&WatchTimeReporter::UpdateWatchTime);
}
-void WatchTimeReporter::UpdateWatchTime() {
- DCHECK(ShouldReportWatchTime());
-
- const bool is_finalizing = end_timestamp_ != kNoTimestamp;
- const bool is_power_change_pending = end_timestamp_for_power_ != kNoTimestamp;
- const bool is_controls_change_pending =
- end_timestamp_for_controls_ != kNoTimestamp;
- const bool is_display_type_change_pending =
- end_timestamp_for_display_type_ != kNoTimestamp;
-
- // If we're finalizing the log, use the media time value at the time of
- // finalization.
+void WatchTimeReporter::RecordWatchTime() {
+ // If we're finalizing, use the media time at time of finalization.
const base::TimeDelta current_timestamp =
- is_finalizing ? end_timestamp_ : get_media_time_cb_.Run();
- DCHECK_NE(current_timestamp, kInfiniteDuration);
- DCHECK_GE(current_timestamp, start_timestamp_);
-
- const base::TimeDelta elapsed = current_timestamp - start_timestamp_;
-
-#define RECORD_WATCH_TIME(key, value) \
- do { \
- recorder_->RecordWatchTime( \
- (properties_->has_video && properties_->has_audio) \
- ? (is_background_ \
- ? WatchTimeKey::kAudioVideoBackground##key \
- : (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key)) \
- : properties_->has_video \
- ? (is_background_ ? WatchTimeKey::kVideoBackground##key \
- : WatchTimeKey::kVideo##key) \
- : (is_background_ ? WatchTimeKey::kAudioBackground##key \
- : WatchTimeKey::kAudio##key), \
- value); \
- } while (0)
-
- // Only report watch time after some minimum amount has elapsed. Don't update
- // watch time if media time hasn't changed since the last run; this may occur
- // if a seek is taking some time to complete or the playback is stalled for
- // some reason.
- if (last_media_timestamp_ != current_timestamp) {
- last_media_timestamp_ = current_timestamp;
-
- if (elapsed > base::TimeDelta()) {
- RECORD_WATCH_TIME(All, elapsed);
- if (properties_->is_mse)
- RECORD_WATCH_TIME(Mse, elapsed);
- else
- RECORD_WATCH_TIME(Src, elapsed);
-
- if (properties_->is_eme)
- RECORD_WATCH_TIME(Eme, elapsed);
-
- if (properties_->is_embedded_media_experience)
- RECORD_WATCH_TIME(EmbeddedExperience, elapsed);
- }
- }
-
- if (last_media_power_timestamp_ != current_timestamp) {
- // We need a separate |last_media_power_timestamp_| since we don't always
- // base the last watch time calculation on the current timestamp.
- last_media_power_timestamp_ =
- is_power_change_pending ? end_timestamp_for_power_ : current_timestamp;
-
- // Record watch time using the last known value for |is_on_battery_power_|;
- // if there's a |pending_power_change_| use that to accurately finalize the
- // last bits of time in the previous bucket.
- DCHECK_GE(last_media_power_timestamp_, start_timestamp_for_power_);
- const base::TimeDelta elapsed_power =
- last_media_power_timestamp_ - start_timestamp_for_power_;
-
- // Again, only update watch time if any time has elapsed; we need to recheck
- // the elapsed time here since the power source can change anytime.
- if (elapsed_power > base::TimeDelta()) {
- if (is_on_battery_power_)
- RECORD_WATCH_TIME(Battery, elapsed_power);
- else
- RECORD_WATCH_TIME(Ac, elapsed_power);
- }
- }
-
-// Similar to RECORD_WATCH_TIME but ignores background watch time.
-#define RECORD_FOREGROUND_WATCH_TIME(key, value) \
- do { \
- DCHECK(!is_background_); \
- recorder_->RecordWatchTime( \
- (properties_->has_video && properties_->has_audio) \
- ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key) \
- : properties_->has_audio ? WatchTimeKey::kAudio##key \
- : WatchTimeKey::kVideo##key, \
- value); \
- } while (0)
-
- // Similar to the block above for controls.
- if (!is_background_ && last_media_controls_timestamp_ != current_timestamp) {
- last_media_controls_timestamp_ = is_controls_change_pending
- ? end_timestamp_for_controls_
- : current_timestamp;
-
- DCHECK_GE(last_media_controls_timestamp_, start_timestamp_for_controls_);
- const base::TimeDelta elapsed_controls =
- last_media_controls_timestamp_ - start_timestamp_for_controls_;
-
- if (elapsed_controls > base::TimeDelta()) {
- if (has_native_controls_)
- RECORD_FOREGROUND_WATCH_TIME(NativeControlsOn, elapsed_controls);
- else
- RECORD_FOREGROUND_WATCH_TIME(NativeControlsOff, elapsed_controls);
- }
- }
-
-// Similar to RECORD_WATCH_TIME but ignores background and audio watch time.
-#define RECORD_DISPLAY_WATCH_TIME(key, value) \
- do { \
- DCHECK(properties_->has_video); \
- DCHECK(!is_background_); \
- recorder_->RecordWatchTime( \
- properties_->has_audio \
- ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key) \
- : WatchTimeKey::kVideo##key, \
- value); \
- } while (0)
-
- // Similar to the block above for display type.
- if (!is_background_ && properties_->has_video &&
- last_media_display_type_timestamp_ != current_timestamp) {
- last_media_display_type_timestamp_ = is_display_type_change_pending
- ? end_timestamp_for_display_type_
- : current_timestamp;
-
- DCHECK_GE(last_media_display_type_timestamp_,
- start_timestamp_for_display_type_);
- const base::TimeDelta elapsed_display_type =
- last_media_display_type_timestamp_ - start_timestamp_for_display_type_;
-
- if (elapsed_display_type > base::TimeDelta()) {
- switch (display_type_for_recording_) {
- case blink::WebMediaPlayer::DisplayType::kInline:
- RECORD_DISPLAY_WATCH_TIME(DisplayInline, elapsed_display_type);
- break;
- case blink::WebMediaPlayer::DisplayType::kFullscreen:
- RECORD_DISPLAY_WATCH_TIME(DisplayFullscreen, elapsed_display_type);
- break;
- case blink::WebMediaPlayer::DisplayType::kPictureInPicture:
- RECORD_DISPLAY_WATCH_TIME(DisplayPictureInPicture,
- elapsed_display_type);
- break;
- }
- }
- }
-
-#undef RECORD_WATCH_TIME
-#undef RECORD_FOREGROUND_WATCH_TIME
-#undef RECORD_DISPLAY_WATCH_TIME
+ base_component_->NeedsFinalize() ? base_component_->end_timestamp()
+ : get_media_time_cb_.Run();
// Pass along any underflow events which have occurred since the last report.
if (!pending_underflow_events_.empty()) {
- if (!is_finalizing) {
+ if (!base_component_->NeedsFinalize()) {
// The maximum value here per period is ~5 events, so int cast is okay.
underflow_count_ += static_cast<int>(pending_underflow_events_.size());
} else {
// Only count underflow events prior to finalize.
for (auto& ts : pending_underflow_events_) {
- if (ts <= end_timestamp_)
+ if (ts <= base_component_->end_timestamp())
underflow_count_++;
}
}
@@ -554,106 +424,156 @@ void WatchTimeReporter::UpdateWatchTime() {
pending_underflow_events_.clear();
}
- // Always send finalize, even if we don't currently have any data, it's
- // harmless to send since nothing will be logged if we've already finalized.
- if (is_finalizing) {
- recorder_->FinalizeWatchTime({});
- } else {
- std::vector<WatchTimeKey> keys_to_finalize;
- if (is_power_change_pending) {
- keys_to_finalize.insert(
- keys_to_finalize.end(),
- {WatchTimeKey::kAudioBattery, WatchTimeKey::kAudioAc,
- WatchTimeKey::kAudioBackgroundBattery,
- WatchTimeKey::kAudioBackgroundAc, WatchTimeKey::kAudioVideoBattery,
- WatchTimeKey::kAudioVideoAc,
- WatchTimeKey::kAudioVideoBackgroundBattery,
- WatchTimeKey::kAudioVideoBackgroundAc,
- WatchTimeKey::kAudioVideoMutedBattery,
- WatchTimeKey::kAudioVideoMutedAc, WatchTimeKey::kVideoBattery,
- WatchTimeKey::kVideoAc, WatchTimeKey::kVideoBackgroundAc,
- WatchTimeKey::kVideoBackgroundBattery});
- }
+ // Record watch time for all components.
+ base_component_->RecordWatchTime(current_timestamp);
+ power_component_->RecordWatchTime(current_timestamp);
+ if (display_type_component_)
+ display_type_component_->RecordWatchTime(current_timestamp);
+ if (controls_component_)
+ controls_component_->RecordWatchTime(current_timestamp);
+}
- if (is_controls_change_pending) {
- keys_to_finalize.insert(keys_to_finalize.end(),
- {WatchTimeKey::kAudioNativeControlsOn,
- WatchTimeKey::kAudioNativeControlsOff,
- WatchTimeKey::kAudioVideoNativeControlsOn,
- WatchTimeKey::kAudioVideoNativeControlsOff,
- WatchTimeKey::kAudioVideoMutedNativeControlsOn,
- WatchTimeKey::kAudioVideoMutedNativeControlsOff,
- WatchTimeKey::kVideoNativeControlsOn,
- WatchTimeKey::kVideoNativeControlsOff});
- }
+void WatchTimeReporter::UpdateWatchTime() {
+ DCHECK(ShouldReportWatchTime());
- if (is_display_type_change_pending) {
- keys_to_finalize.insert(
- keys_to_finalize.end(),
- {WatchTimeKey::kAudioVideoDisplayFullscreen,
- WatchTimeKey::kAudioVideoDisplayInline,
- WatchTimeKey::kAudioVideoDisplayPictureInPicture,
- WatchTimeKey::kAudioVideoMutedDisplayFullscreen,
- WatchTimeKey::kAudioVideoMutedDisplayInline,
- WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture,
- WatchTimeKey::kVideoDisplayFullscreen,
- WatchTimeKey::kVideoDisplayInline,
- WatchTimeKey::kVideoDisplayPictureInPicture});
- }
+ // First record watch time.
+ RecordWatchTime();
+ // Second, process any pending finalize events.
+ std::vector<WatchTimeKey> keys_to_finalize;
+ if (power_component_->NeedsFinalize())
+ power_component_->Finalize(&keys_to_finalize);
+ if (display_type_component_ && display_type_component_->NeedsFinalize())
+ display_type_component_->Finalize(&keys_to_finalize);
+ if (controls_component_ && controls_component_->NeedsFinalize())
+ controls_component_->Finalize(&keys_to_finalize);
+
+ // Then finalize the base component.
+ if (!base_component_->NeedsFinalize()) {
if (!keys_to_finalize.empty())
recorder_->FinalizeWatchTime(keys_to_finalize);
+ return;
}
- if (is_power_change_pending) {
- // Invert battery power status here instead of using the value returned by
- // the PowerObserver since there may be a pending OnPowerStateChange().
- is_on_battery_power_ = !is_on_battery_power_;
+ // Always send finalize, even if we don't currently have any data, it's
+ // harmless to send since nothing will be logged if we've already finalized.
+ base_component_->Finalize(&keys_to_finalize);
+ recorder_->FinalizeWatchTime({});
- start_timestamp_for_power_ = end_timestamp_for_power_;
- end_timestamp_for_power_ = kNoTimestamp;
- }
+ // Stop the timer if this is supposed to be our last tick.
+ underflow_count_ = 0;
+ reporting_timer_.Stop();
+}
- if (is_controls_change_pending) {
- has_native_controls_ = !has_native_controls_;
+#define NORMAL_KEY(key) \
+ ((properties_->has_video && properties_->has_audio) \
+ ? (is_background_ ? WatchTimeKey::kAudioVideoBackground##key \
+ : (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key)) \
+ : properties_->has_video \
+ ? (is_background_ ? WatchTimeKey::kVideoBackground##key \
+ : WatchTimeKey::kVideo##key) \
+ : (is_background_ ? WatchTimeKey::kAudioBackground##key \
+ : WatchTimeKey::kAudio##key))
+
+std::unique_ptr<WatchTimeComponent<bool>>
+WatchTimeReporter::CreateBaseComponent() {
+ std::vector<WatchTimeKey> keys_to_finalize;
+ keys_to_finalize.emplace_back(NORMAL_KEY(All));
+ if (properties_->is_mse)
+ keys_to_finalize.emplace_back(NORMAL_KEY(Mse));
+ else
+ keys_to_finalize.emplace_back(NORMAL_KEY(Src));
+
+ if (properties_->is_eme)
+ keys_to_finalize.emplace_back(NORMAL_KEY(Eme));
+
+ if (properties_->is_embedded_media_experience)
+ keys_to_finalize.emplace_back(NORMAL_KEY(EmbeddedExperience));
+
+ return std::make_unique<WatchTimeComponent<bool>>(
+ false, std::move(keys_to_finalize),
+ WatchTimeComponent<bool>::ValueToKeyCB(), get_media_time_cb_,
+ recorder_.get());
+}
- start_timestamp_for_controls_ = end_timestamp_for_controls_;
- end_timestamp_for_controls_ = kNoTimestamp;
- }
+std::unique_ptr<WatchTimeComponent<bool>>
+WatchTimeReporter::CreatePowerComponent() {
+ std::vector<WatchTimeKey> keys_to_finalize{NORMAL_KEY(Battery),
+ NORMAL_KEY(Ac)};
- if (is_display_type_change_pending) {
- display_type_for_recording_ = display_type_;
+ return std::make_unique<WatchTimeComponent<bool>>(
+ IsOnBatteryPower(), std::move(keys_to_finalize),
+ base::BindRepeating(&WatchTimeReporter::GetPowerKey,
+ base::Unretained(this)),
+ get_media_time_cb_, recorder_.get());
+}
- start_timestamp_for_display_type_ = end_timestamp_for_display_type_;
- end_timestamp_for_display_type_ = kNoTimestamp;
- }
+WatchTimeKey WatchTimeReporter::GetPowerKey(bool is_on_battery_power) {
+ return is_on_battery_power ? NORMAL_KEY(Battery) : NORMAL_KEY(Ac);
+}
+#undef NORMAL_KEY
- // Stop the timer if this is supposed to be our last tick.
- if (is_finalizing) {
- end_timestamp_ = kNoTimestamp;
- underflow_count_ = 0;
- reporting_timer_.Stop();
- }
+#define FOREGROUND_KEY(key) \
+ ((properties_->has_video && properties_->has_audio) \
+ ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key) \
+ : properties_->has_audio ? WatchTimeKey::kAudio##key \
+ : WatchTimeKey::kVideo##key)
+
+std::unique_ptr<WatchTimeComponent<bool>>
+WatchTimeReporter::CreateControlsComponent() {
+ DCHECK(!is_background_);
+
+ std::vector<WatchTimeKey> keys_to_finalize{FOREGROUND_KEY(NativeControlsOn),
+ FOREGROUND_KEY(NativeControlsOff)};
+
+ return std::make_unique<WatchTimeComponent<bool>>(
+ false, std::move(keys_to_finalize),
+ base::BindRepeating(&WatchTimeReporter::GetControlsKey,
+ base::Unretained(this)),
+ get_media_time_cb_, recorder_.get());
}
-void WatchTimeReporter::OnDisplayTypeChanged(
- blink::WebMediaPlayer::DisplayType display_type) {
- if (muted_reporter_)
- muted_reporter_->OnDisplayTypeChanged(display_type);
+WatchTimeKey WatchTimeReporter::GetControlsKey(bool has_native_controls) {
+ return has_native_controls ? FOREGROUND_KEY(NativeControlsOn)
+ : FOREGROUND_KEY(NativeControlsOff);
+}
- display_type_ = display_type;
+#undef FOREGROUND_KEY
- if (!reporting_timer_.IsRunning())
- return;
+#define DISPLAY_TYPE_KEY(key) \
+ (properties_->has_audio ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key) \
+ : WatchTimeKey::kVideo##key)
- if (display_type_for_recording_ == display_type_) {
- end_timestamp_for_display_type_ = kNoTimestamp;
- return;
- }
+std::unique_ptr<WatchTimeComponent<WatchTimeReporter::DisplayType>>
+WatchTimeReporter::CreateDisplayTypeComponent() {
+ DCHECK(properties_->has_video);
+ DCHECK(!is_background_);
- end_timestamp_for_display_type_ = get_media_time_cb_.Run();
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
+ std::vector<WatchTimeKey> keys_to_finalize{
+ DISPLAY_TYPE_KEY(DisplayInline), DISPLAY_TYPE_KEY(DisplayFullscreen),
+ DISPLAY_TYPE_KEY(DisplayPictureInPicture)};
+
+ return std::make_unique<WatchTimeComponent<DisplayType>>(
+ DisplayType::kInline, std::move(keys_to_finalize),
+ base::BindRepeating(&WatchTimeReporter::GetDisplayTypeKey,
+ base::Unretained(this)),
+ get_media_time_cb_, recorder_.get());
}
+WatchTimeKey WatchTimeReporter::GetDisplayTypeKey(DisplayType display_type) {
+ switch (display_type) {
+ case DisplayType::kInline:
+ return DISPLAY_TYPE_KEY(DisplayInline);
+ case DisplayType::kFullscreen:
+ return DISPLAY_TYPE_KEY(DisplayFullscreen);
+ case DisplayType::kPictureInPicture:
+ return DISPLAY_TYPE_KEY(DisplayPictureInPicture);
+ }
+}
+
+#undef DISPLAY_TYPE_KEY
+
} // namespace media
diff --git a/chromium/media/blink/watch_time_reporter.h b/chromium/media/blink/watch_time_reporter.h
index 76c508e4c7b..19cb9ff48c5 100644
--- a/chromium/media/blink/watch_time_reporter.h
+++ b/chromium/media/blink/watch_time_reporter.h
@@ -17,6 +17,7 @@
#include "media/base/timestamp_constants.h"
#include "media/base/video_codecs.h"
#include "media/blink/media_blink_export.h"
+#include "media/blink/watch_time_component.h"
#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
#include "third_party/blink/public/platform/web_media_player.h"
@@ -56,6 +57,7 @@ namespace media {
// old metric finalized as accurately as possible.
class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
public:
+ using DisplayType = blink::WebMediaPlayer::DisplayType;
using GetMediaTimeCB = base::RepeatingCallback<base::TimeDelta(void)>;
// Constructor for the reporter; all requested metadata should be fully known
@@ -78,6 +80,7 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
// TODO(dalecurtis): Should we only report when rate == 1.0? Should we scale
// the elapsed media time instead?
WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
+ const gfx::Size& initial_natural_size,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -130,29 +133,19 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
void OnDisplayTypeFullscreen();
void OnDisplayTypePictureInPicture();
- // Sets the audio and video decoder names for reporting. Similar to OnError(),
- // this value is always sent to the recorder regardless of whether we're
- // currently reporting watch time or not. Must only be set once.
- void SetAudioDecoderName(const std::string& name);
- void SetVideoDecoderName(const std::string& name);
+ // Mutates various properties that may change over the lifetime of a playback
+ // but for which we don't want to interrupt reporting for. UMA watch time will
+ // not be interrupted by changes to these properties, while UKM will.
+ void UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties);
// Notifies the autoplay status of the playback. Must not be called multiple
// times with different values.
void SetAutoplayInitiated(bool autoplay_initiated);
- // Setup the reporting interval to be immediate to avoid spinning real time
- // within the unit test.
- void set_reporting_interval_for_testing() {
- reporting_interval_ = base::TimeDelta();
- }
-
- void set_is_on_battery_power_for_testing(bool on_battery_power) {
- is_on_battery_power_ = on_battery_power;
- }
-
- void OnPowerStateChangeForTesting(bool on_battery_power) {
- OnPowerStateChange(on_battery_power);
- }
+ // Updates the duration maintained by the recorder. May be called any number
+ // of times during playback.
+ void OnDurationChanged(base::TimeDelta duration);
private:
friend class WatchTimeReporterTest;
@@ -161,6 +154,7 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
bool is_background,
bool is_muted,
+ const gfx::Size& initial_natural_size,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -172,18 +166,36 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
// resume events because we report watch time in terms of elapsed media time
// and not in terms of elapsed real time.
void OnPowerStateChange(bool on_battery_power) override;
+ void OnNativeControlsChanged(bool has_native_controls);
+ void OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType display_type);
- bool ShouldReportWatchTime();
+ bool ShouldReportWatchTime() const;
+ bool ShouldReportingTimerRun() const;
void MaybeStartReportingTimer(base::TimeDelta start_timestamp);
enum class FinalizeTime { IMMEDIATELY, ON_NEXT_UPDATE };
void MaybeFinalizeWatchTime(FinalizeTime finalize_time);
+ void RestartTimerForHysteresis();
+
+ // UpdateWatchTime() both records watch time and processes any finalize event.
+ void RecordWatchTime();
void UpdateWatchTime();
- void OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType display_type);
+
+ // Helper methods for creating the components that make up the watch time
+ // report. All components except the base component require a creation method
+ // and a conversion method to get the correct WatchTimeKey.
+ std::unique_ptr<WatchTimeComponent<bool>> CreateBaseComponent();
+ std::unique_ptr<WatchTimeComponent<bool>> CreatePowerComponent();
+ WatchTimeKey GetPowerKey(bool is_on_battery_power);
+ std::unique_ptr<WatchTimeComponent<bool>> CreateControlsComponent();
+ WatchTimeKey GetControlsKey(bool has_native_controls);
+ std::unique_ptr<WatchTimeComponent<DisplayType>> CreateDisplayTypeComponent();
+ WatchTimeKey GetDisplayTypeKey(DisplayType display_type);
// Initialized during construction.
const mojom::PlaybackPropertiesPtr properties_;
const bool is_background_;
const bool is_muted_;
+ const gfx::Size initial_natural_size_;
const GetMediaTimeCB get_media_time_cb_;
mojom::WatchTimeRecorderPtr recorder_;
@@ -194,44 +206,27 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
base::RepeatingTimer reporting_timer_;
- // Updated by the OnXXX() methods above.
- bool is_on_battery_power_ = false;
+ // Updated by the OnXXX() methods above; controls timer state.
bool is_playing_ = false;
bool is_visible_ = true;
- bool has_native_controls_ = false;
+ bool is_seeking_ = false;
+ bool in_shutdown_ = false;
double volume_ = 1.0;
+
int underflow_count_ = 0;
std::vector<base::TimeDelta> pending_underflow_events_;
- blink::WebMediaPlayer::DisplayType display_type_ =
- blink::WebMediaPlayer::DisplayType::kInline;
- blink::WebMediaPlayer::DisplayType display_type_for_recording_ =
- blink::WebMediaPlayer::DisplayType::kInline;
-
- // The last media timestamp seen by UpdateWatchTime().
- base::TimeDelta last_media_timestamp_ = kNoTimestamp;
- base::TimeDelta last_media_power_timestamp_ = kNoTimestamp;
- base::TimeDelta last_media_controls_timestamp_ = kNoTimestamp;
- base::TimeDelta last_media_display_type_timestamp_ = kNoTimestamp;
-
- // The starting and ending timestamps used for reporting watch time.
- base::TimeDelta start_timestamp_;
- base::TimeDelta end_timestamp_ = kNoTimestamp;
-
- // Similar to the above but tracks watch time relative to whether or not
- // battery or AC power is being used.
- base::TimeDelta start_timestamp_for_power_;
- base::TimeDelta end_timestamp_for_power_ = kNoTimestamp;
-
- // Similar to the above but tracks watch time relative to whether or not
- // native controls are being used.
- base::TimeDelta start_timestamp_for_controls_;
- base::TimeDelta end_timestamp_for_controls_ = kNoTimestamp;
-
- // Similar to the above but tracks watch time relative to whether the display
- // type is inline, fullscreen or picture-in-picture.
- base::TimeDelta start_timestamp_for_display_type_;
- base::TimeDelta end_timestamp_for_display_type_ = kNoTimestamp;
+ // The various components making up WatchTime. If the |base_component_| is
+ // finalized, all reporting will be stopped and finalized using its ending
+ // timestamp.
+ //
+ // Note: If you are adding a new type of component (i.e., one that is not
+ // bool, etc) you must also update the end of the WatchTimeComponent .cc file
+ // to add a new template class definition or you will get linking errors.
+ std::unique_ptr<WatchTimeComponent<bool>> base_component_;
+ std::unique_ptr<WatchTimeComponent<bool>> power_component_;
+ std::unique_ptr<WatchTimeComponent<DisplayType>> display_type_component_;
+ std::unique_ptr<WatchTimeComponent<bool>> controls_component_;
// Special case reporter for handling background video watch time. Configured
// as an audio only WatchTimeReporter with |is_background_| set to true.
diff --git a/chromium/media/blink/watch_time_reporter_unittest.cc b/chromium/media/blink/watch_time_reporter_unittest.cc
index d194003a1c6..481d26e6591 100644
--- a/chromium/media/blink/watch_time_reporter_unittest.cc
+++ b/chromium/media/blink/watch_time_reporter_unittest.cc
@@ -25,6 +25,7 @@ namespace media {
constexpr gfx::Size kSizeJustRight = gfx::Size(201, 201);
using blink::WebMediaPlayer;
+using testing::_;
#define EXPECT_WATCH_TIME(key, value) \
do { \
@@ -76,17 +77,17 @@ using blink::WebMediaPlayer;
// finalize event is expected to finalize.
#define EXPECT_POWER_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnPowerWatchTimeFinalized()) \
- .Times(14) \
+ .Times(2) \
.RetiresOnSaturation();
#define EXPECT_CONTROLS_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnControlsWatchTimeFinalized()) \
- .Times(8) \
+ .Times(2) \
.RetiresOnSaturation();
#define EXPECT_DISPLAY_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnDisplayWatchTimeFinalized()) \
- .Times(9) \
+ .Times(3) \
.RetiresOnSaturation();
using WatchTimeReporterTestData = std::tuple<bool, bool>;
@@ -195,22 +196,23 @@ class WatchTimeReporterTest
void OnError(PipelineStatus status) override { parent_->OnError(status); }
- void UpdateUnderflowCount(int32_t count) override {
- parent_->OnUnderflowUpdate(count);
+ void UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties) override {
+ parent_->OnUpdateSecondaryProperties(std::move(secondary_properties));
}
- void SetAudioDecoderName(const std::string& name) override {
- parent_->OnSetAudioDecoderName(name);
- }
-
- void SetVideoDecoderName(const std::string& name) override {
- parent_->OnSetVideoDecoderName(name);
+ void UpdateUnderflowCount(int32_t count) override {
+ parent_->OnUnderflowUpdate(count);
}
void SetAutoplayInitiated(bool value) override {
parent_->OnSetAutoplayInitiated(value);
}
+ void OnDurationChanged(base::TimeDelta duration) override {
+ parent_->OnDurationChanged(duration);
+ }
+
private:
WatchTimeReporterTest* parent_;
@@ -272,9 +274,9 @@ class WatchTimeReporterTest
EXPECT_WATCH_TIME_FINALIZED();
wtr_.reset(new WatchTimeReporter(
- mojom::PlaybackProperties::New(
- kUnknownAudioCodec, kUnknownVideoCodec, has_audio_, has_video_,
- false, false, is_mse, is_encrypted, false, initial_video_size),
+ mojom::PlaybackProperties::New(has_audio_, has_video_, false, false,
+ is_mse, is_encrypted, false),
+ initial_video_size,
base::BindRepeating(&WatchTimeReporterTest::GetCurrentMediaTime,
base::Unretained(this)),
&fake_metrics_provider_,
@@ -304,7 +306,11 @@ class WatchTimeReporterTest
// PowerMonitorTestSource since that results in a posted tasks which interfere
// with our ability to test the timer.
void SetOnBatteryPower(bool on_battery_power) {
- wtr_->is_on_battery_power_ = on_battery_power;
+ wtr_->power_component_->SetCurrentValue(on_battery_power);
+ }
+
+ bool IsOnBatteryPower() const {
+ return wtr_->power_component_->current_value_for_testing();
}
void OnPowerStateChange(bool on_battery_power) {
@@ -450,7 +456,7 @@ class WatchTimeReporterTest
if (TestFlags & kStartOnBattery)
SetOnBatteryPower(true);
else
- ASSERT_FALSE(wtr_->is_on_battery_power_);
+ ASSERT_FALSE(IsOnBatteryPower());
EXPECT_WATCH_TIME(All, kWatchTime1);
EXPECT_WATCH_TIME(Src, kWatchTime1);
@@ -575,9 +581,10 @@ class WatchTimeReporterTest
MOCK_METHOD2(OnWatchTimeUpdate, void(WatchTimeKey, base::TimeDelta));
MOCK_METHOD1(OnUnderflowUpdate, void(int));
MOCK_METHOD1(OnError, void(PipelineStatus));
- MOCK_METHOD1(OnSetAudioDecoderName, void(const std::string&));
- MOCK_METHOD1(OnSetVideoDecoderName, void(const std::string&));
+ MOCK_METHOD1(OnUpdateSecondaryProperties,
+ void(mojom::SecondaryPlaybackPropertiesPtr));
MOCK_METHOD1(OnSetAutoplayInitiated, void(bool));
+ MOCK_METHOD1(OnDurationChanged, void(base::TimeDelta));
const bool has_video_;
const bool has_audio_;
@@ -596,6 +603,8 @@ class WatchTimeReporterTest
DISALLOW_COPY_AND_ASSIGN(WatchTimeReporterTest);
};
+class DisplayTypeWatchTimeReporterTest : public WatchTimeReporterTest {};
+
// Tests that watch time reporting is appropriately enabled or disabled.
TEST_P(WatchTimeReporterTest, WatchTimeReporter) {
EXPECT_CALL(*this, GetCurrentMediaTime())
@@ -681,6 +690,23 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterBasic) {
wtr_.reset();
}
+TEST_P(WatchTimeReporterTest, WatchTimeReporterDuration) {
+ constexpr base::TimeDelta kDuration1 = base::TimeDelta::FromSeconds(5);
+ constexpr base::TimeDelta kDuration2 = base::TimeDelta::FromSeconds(10);
+ Initialize(true, true, kSizeJustRight);
+
+ EXPECT_CALL(*this, OnDurationChanged(kDuration1))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
+ wtr_->OnDurationChanged(kDuration1);
+ CycleReportingTimer();
+
+ EXPECT_CALL(*this, OnDurationChanged(kDuration2))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
+ wtr_->OnDurationChanged(kDuration2);
+ CycleReportingTimer();
+ wtr_.reset();
+}
+
TEST_P(WatchTimeReporterTest, WatchTimeReporterUnderflow) {
constexpr base::TimeDelta kWatchTimeFirst = base::TimeDelta::FromSeconds(5);
constexpr base::TimeDelta kWatchTimeEarly = base::TimeDelta::FromSeconds(10);
@@ -750,24 +776,32 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterUnderflow) {
wtr_.reset();
}
-TEST_P(WatchTimeReporterTest, WatchTimeReporterDecoderNames) {
+// Verify secondary properties pass through correctly.
+TEST_P(WatchTimeReporterTest, WatchTimeReporterSecondaryProperties) {
Initialize(true, true, kSizeJustRight);
- // Setup the initial decoder names; these should be sent immediately as soon
- // they're called. Each should be called thrice, once for foreground, once for
- // background, and once for muted reporting.
- const std::string kAudioDecoderName = "FirstAudioDecoder";
- const std::string kVideoDecoderName = "FirstVideoDecoder";
- if (has_audio_) {
- EXPECT_CALL(*this, OnSetAudioDecoderName(kAudioDecoderName))
- .Times((has_audio_ && has_video_) ? 3 : 2);
- wtr_->SetAudioDecoderName(kAudioDecoderName);
- }
- if (has_video_) {
- EXPECT_CALL(*this, OnSetVideoDecoderName(kVideoDecoderName))
- .Times((has_audio_ && has_video_) ? 3 : 2);
- wtr_->SetVideoDecoderName(kVideoDecoderName);
- }
+ auto properties = mojom::SecondaryPlaybackProperties::New(
+ has_audio_ ? kCodecAAC : kUnknownAudioCodec,
+ has_video_ ? kCodecH264 : kUnknownVideoCodec,
+ has_audio_ ? "FirstAudioDecoder" : "",
+ has_video_ ? "FirstVideoDecoder" : "",
+ has_video_ ? gfx::Size(800, 600) : gfx::Size());
+
+ // Get a pointer to our original properties since we're not allowed to use
+ // lambda capture for movable types in Chromium C++ yet.
+ auto* properies_ptr = properties.get();
+
+ // Muted watch time is only reported for audio+video.
+ EXPECT_CALL(*this, OnUpdateSecondaryProperties(_))
+ .Times((has_audio_ && has_video_) ? 3 : 2)
+ .WillRepeatedly([properies_ptr](auto secondary_properties) {
+ ASSERT_TRUE(properies_ptr->Equals(*secondary_properties));
+ });
+ wtr_->UpdateSecondaryProperties(properties.Clone());
+ CycleReportingTimer();
+
+ // Ensure expectations are met before |properies| goes out of scope.
+ testing::Mock::VerifyAndClearExpectations(this);
}
TEST_P(WatchTimeReporterTest, WatchTimeReporterAutoplayInitiated) {
@@ -1016,7 +1050,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterHiddenControlsBackground) {
wtr_.reset();
}
-TEST_P(WatchTimeReporterTest, WatchTimeReporterHiddenDisplayTypeBackground) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ WatchTimeReporterHiddenDisplayTypeBackground) {
constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(16);
EXPECT_CALL(*this, GetCurrentMediaTime())
@@ -1152,8 +1187,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterMultiplePartialFinalize) {
wtr_.reset();
}
- // Transition display type and battery.
- {
+ // Transition display type and battery. Test only works with video.
+ if (has_video_) {
EXPECT_CALL(*this, GetCurrentMediaTime())
.WillOnce(testing::Return(base::TimeDelta()))
.WillOnce(testing::Return(kWatchTime1))
@@ -1191,8 +1226,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterMultiplePartialFinalize) {
wtr_.reset();
}
- // Transition controls, battery and display type.
- {
+ // Transition controls, battery and display type. Test only works with video.
+ if (has_video_) {
EXPECT_CALL(*this, GetCurrentMediaTime())
.WillOnce(testing::Return(base::TimeDelta()))
.WillOnce(testing::Return(kWatchTime1))
@@ -1279,6 +1314,44 @@ TEST_P(WatchTimeReporterTest, SeekFinalizes) {
wtr_->OnSeeking();
}
+// Tests that seeking can't be undone by anything other than OnPlaying().
+TEST_P(WatchTimeReporterTest, SeekOnlyClearedByPlaying) {
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(10);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillRepeatedly(testing::Return(kWatchTime));
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMonitoring());
+
+ EXPECT_WATCH_TIME(Ac, kWatchTime);
+ EXPECT_WATCH_TIME(All, kWatchTime);
+ EXPECT_WATCH_TIME(Eme, kWatchTime);
+ EXPECT_WATCH_TIME(Mse, kWatchTime);
+ EXPECT_WATCH_TIME(NativeControlsOff, kWatchTime);
+ EXPECT_WATCH_TIME_IF_VIDEO(DisplayInline, kWatchTime);
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_->OnSeeking();
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnHidden();
+ wtr_->OnShown();
+ wtr_->OnVolumeChange(0);
+ wtr_->OnVolumeChange(1);
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMonitoring());
+
+ // Because the above calls may tickle the background and muted reporters,
+ // we'll receive 2-3 finalize calls upon destruction if they exist.
+ if (has_audio_ && has_video_)
+ EXPECT_WATCH_TIME_FINALIZED();
+ EXPECT_WATCH_TIME_FINALIZED();
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_.reset();
+}
+
// Tests that seeking causes an immediate finalization, but does not trample a
// previously set finalize time.
TEST_P(WatchTimeReporterTest, SeekFinalizeDoesNotTramplePreviousFinalize) {
@@ -1553,7 +1626,7 @@ TEST_P(WatchTimeReporterTest, OnControlsChangeToNative) {
[this]() { OnNativeControlsEnabled(true); });
}
-TEST_P(WatchTimeReporterTest,
+TEST_P(DisplayTypeWatchTimeReporterTest,
OnDisplayTypeChangeHysteresisFullscreenContinuation) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeExitDoesNotRequireCurrentTime |
@@ -1563,13 +1636,15 @@ TEST_P(WatchTimeReporterTest,
});
}
-TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeHysteresisNativeFinalized) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ OnDisplayTypeChangeHysteresisNativeFinalized) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kStartWithDisplayFullscreen>(
[this]() { OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline); });
}
-TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeHysteresisInlineContinuation) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ OnDisplayTypeChangeHysteresisInlineContinuation) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeExitDoesNotRequireCurrentTime>([this]() {
OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
@@ -1577,21 +1652,24 @@ TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeHysteresisInlineContinuation) {
});
}
-TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeHysteresisNativeOffFinalized) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ OnDisplayTypeChangeHysteresisNativeOffFinalized) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime>([this]() {
OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
});
}
-TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeInlineToFullscreen) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ OnDisplayTypeChangeInlineToFullscreen) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kStartWithDisplayFullscreen |
kTransitionDisplayWatchTime>(
[this]() { OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline); });
}
-TEST_P(WatchTimeReporterTest, OnDisplayTypeChangeFullscreenToInline) {
+TEST_P(DisplayTypeWatchTimeReporterTest,
+ OnDisplayTypeChangeFullscreenToInline) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kTransitionDisplayWatchTime>(
[this]() {
@@ -1921,11 +1999,19 @@ INSTANTIATE_TEST_CASE_P(WatchTimeReporterTest,
WatchTimeReporterTest,
testing::ValuesIn({// has_video, has_audio
std::make_tuple(true, true),
- // has_audio
- std::make_tuple(true, false),
// has_video
+ std::make_tuple(true, false),
+ // has_audio
std::make_tuple(false, true)}));
+// Separate test set since display tests only work with video.
+INSTANTIATE_TEST_CASE_P(DisplayTypeWatchTimeReporterTest,
+ DisplayTypeWatchTimeReporterTest,
+ testing::ValuesIn({// has_video, has_audio
+ std::make_tuple(true, true),
+ // has_video
+ std::make_tuple(true, false)}));
+
// Separate test set since muted tests only work with audio+video.
INSTANTIATE_TEST_CASE_P(MutedWatchTimeReporterTest,
MutedWatchTimeReporterTest,
diff --git a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
index 4be9a0f7a3d..941ca3c0aad 100644
--- a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
@@ -68,17 +68,17 @@ CdmSessionType convertSessionType(
blink::WebEncryptedMediaSessionType session_type) {
switch (session_type) {
case blink::WebEncryptedMediaSessionType::kTemporary:
- return CdmSessionType::TEMPORARY_SESSION;
+ return CdmSessionType::kTemporary;
case blink::WebEncryptedMediaSessionType::kPersistentLicense:
- return CdmSessionType::PERSISTENT_LICENSE_SESSION;
- case blink::WebEncryptedMediaSessionType::kPersistentReleaseMessage:
- return CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION;
+ return CdmSessionType::kPersistentLicense;
+ case blink::WebEncryptedMediaSessionType::kPersistentUsageRecord:
+ return CdmSessionType::kPersistentUsageRecord;
case blink::WebEncryptedMediaSessionType::kUnknown:
break;
}
NOTREACHED();
- return CdmSessionType::TEMPORARY_SESSION;
+ return CdmSessionType::kTemporary;
}
bool SanitizeInitData(EmeInitDataType init_data_type,
@@ -176,7 +176,7 @@ bool SanitizeResponse(const std::string& key_system,
if (IsClearKey(key_system) || IsExternalClearKey(key_system)) {
std::string key_string(response, response + response_length);
KeyIdAndKeyPairs keys;
- CdmSessionType session_type = CdmSessionType::TEMPORARY_SESSION;
+ CdmSessionType session_type = CdmSessionType::kTemporary;
if (!ExtractKeysFromJWKSet(key_string, &keys, &session_type))
return false;
@@ -332,8 +332,7 @@ void WebContentDecryptionModuleSessionImpl::InitializeNewSession(
// instance value.
// 10.9 Use the cdm to execute the following steps:
CdmSessionType cdm_session_type = convertSessionType(session_type);
- is_persistent_session_ =
- cdm_session_type != CdmSessionType::TEMPORARY_SESSION;
+ is_persistent_session_ = cdm_session_type != CdmSessionType::kTemporary;
adapter_->InitializeNewSession(
eme_init_data_type, sanitized_init_data, cdm_session_type,
std::unique_ptr<NewSessionCdmPromise>(new NewSessionCdmResultPromise(
@@ -371,7 +370,7 @@ void WebContentDecryptionModuleSessionImpl::Load(
// constructor (and removed from initializeNewSession()).
is_persistent_session_ = true;
adapter_->LoadSession(
- CdmSessionType::PERSISTENT_LICENSE_SESSION, sanitized_session_id,
+ CdmSessionType::kPersistentLicense, sanitized_session_id,
std::unique_ptr<NewSessionCdmPromise>(new NewSessionCdmResultPromise(
result, adapter_->GetKeySystemUMAPrefix(), kLoadSessionUMAName,
base::Bind(
diff --git a/chromium/media/blink/webmediacapabilitiesclient_impl.cc b/chromium/media/blink/webmediacapabilitiesclient_impl.cc
index 3a71b87805c..55b19afeea0 100644
--- a/chromium/media/blink/webmediacapabilitiesclient_impl.cc
+++ b/chromium/media/blink/webmediacapabilitiesclient_impl.cc
@@ -22,6 +22,7 @@
#include "third_party/blink/public/platform/modules/media_capabilities/web_media_configuration.h"
#include "third_party/blink/public/platform/modules/media_capabilities/web_video_configuration.h"
#include "third_party/blink/public/platform/platform.h"
+#include "third_party/blink/public/platform/scoped_web_callbacks.h"
namespace media {
@@ -140,17 +141,25 @@ WebMediaCapabilitiesClientImpl::WebMediaCapabilitiesClientImpl() = default;
WebMediaCapabilitiesClientImpl::~WebMediaCapabilitiesClientImpl() = default;
+namespace {
void VideoPerfInfoCallback(
- std::unique_ptr<blink::WebMediaCapabilitiesQueryCallbacks> callbacks,
+ blink::ScopedWebCallbacks<blink::WebMediaCapabilitiesQueryCallbacks>
+ scoped_callbacks,
std::unique_ptr<blink::WebMediaCapabilitiesInfo> info,
bool is_smooth,
bool is_power_efficient) {
DCHECK(info->supported);
info->smooth = is_smooth;
info->power_efficient = is_power_efficient;
- callbacks->OnSuccess(std::move(info));
+ scoped_callbacks.PassCallbacks()->OnSuccess(std::move(info));
}
+void OnGetPerfInfoError(
+ std::unique_ptr<blink::WebMediaCapabilitiesQueryCallbacks> callbacks) {
+ callbacks->OnError();
+}
+} // namespace
+
void WebMediaCapabilitiesClientImpl::DecodingInfo(
const blink::WebMediaConfiguration& configuration,
std::unique_ptr<blink::WebMediaCapabilitiesQueryCallbacks> callbacks) {
@@ -209,8 +218,16 @@ void WebMediaCapabilitiesClientImpl::DecodingInfo(
decode_history_ptr_->GetPerfInfo(
std::move(features),
- base::BindOnce(&VideoPerfInfoCallback, std::move(callbacks),
- std::move(info)));
+ base::BindOnce(
+ &VideoPerfInfoCallback,
+ blink::MakeScopedWebCallbacks(std::move(callbacks),
+ base::BindOnce(&OnGetPerfInfoError)),
+ std::move(info)));
+}
+
+void WebMediaCapabilitiesClientImpl::BindVideoDecodePerfHistoryForTests(
+ mojom::VideoDecodePerfHistoryPtr decode_history_ptr) {
+ decode_history_ptr_ = std::move(decode_history_ptr);
}
} // namespace media
diff --git a/chromium/media/blink/webmediacapabilitiesclient_impl.h b/chromium/media/blink/webmediacapabilitiesclient_impl.h
index 6e4e0c4167b..779b9998a03 100644
--- a/chromium/media/blink/webmediacapabilitiesclient_impl.h
+++ b/chromium/media/blink/webmediacapabilitiesclient_impl.h
@@ -24,6 +24,9 @@ class MEDIA_BLINK_EXPORT WebMediaCapabilitiesClientImpl
const blink::WebMediaConfiguration&,
std::unique_ptr<blink::WebMediaCapabilitiesQueryCallbacks>) override;
+ void BindVideoDecodePerfHistoryForTests(
+ mojom::VideoDecodePerfHistoryPtr decode_history_ptr);
+
private:
mojom::VideoDecodePerfHistoryPtr decode_history_ptr_;
diff --git a/chromium/media/blink/webmediacapabilitiesclient_impl_unittest.cc b/chromium/media/blink/webmediacapabilitiesclient_impl_unittest.cc
new file mode 100644
index 00000000000..1f830b52976
--- /dev/null
+++ b/chromium/media/blink/webmediacapabilitiesclient_impl_unittest.cc
@@ -0,0 +1,91 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <memory>
+
+#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
+#include "media/blink/webmediacapabilitiesclient_impl.h"
+#include "media/mojo/interfaces/video_decode_perf_history.mojom.h"
+#include "mojo/public/cpp/bindings/binding.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/blink/public/platform/modules/media_capabilities/web_media_capabilities_info.h"
+#include "third_party/blink/public/platform/modules/media_capabilities/web_media_configuration.h"
+
+using ::testing::_;
+
+namespace media {
+
+class MockVideoDecodePerfHistory : public mojom::VideoDecodePerfHistory {
+ public:
+ explicit MockVideoDecodePerfHistory(
+ mojom::VideoDecodePerfHistoryPtr* decode_perf_history_ptr)
+ : binding_(this, mojo::MakeRequest(decode_perf_history_ptr)) {}
+
+ MOCK_METHOD2(GetPerfInfo,
+ void(mojom::PredictionFeaturesPtr, GetPerfInfoCallback));
+
+ void CloseMojoBinding() { binding_.Close(); }
+
+ private:
+ mojo::Binding<mojom::VideoDecodePerfHistory> binding_;
+};
+
+class MockWebMediaCapabilitiesQueryCallbacks
+ : public blink::WebMediaCapabilitiesQueryCallbacks {
+ public:
+ ~MockWebMediaCapabilitiesQueryCallbacks() override = default;
+
+ void OnSuccess(std::unique_ptr<blink::WebMediaCapabilitiesInfo>) override {}
+ MOCK_METHOD0(OnError, void());
+};
+
+// Verify that query callback is called even if mojo connection is lost while
+// waiting for the result of mojom.VideoDecodePerfHistory.GetPerfInfo() call.
+// See https://crbug.com/847211
+TEST(WebMediaCapabilitiesClientImplTest, RunCallbackEvenIfMojoDisconnects) {
+ static const blink::WebVideoConfiguration kFakeVideoConfiguration{
+ blink::WebString::FromASCII("video/webm"), // mime type
+ blink::WebString::FromASCII("vp09.00.51.08.01.01.01.01"), // codec
+ 1920, // width
+ 1080, // height
+ 2661034, // bitrate
+ 25, // framerate
+ };
+
+ static const blink::WebMediaConfiguration kFakeMediaConfiguration{
+ blink::MediaConfigurationType::kFile,
+ base::nullopt, // audio configuration
+ kFakeVideoConfiguration, // video configuration
+ };
+
+ using ::testing::InvokeWithoutArgs;
+
+ mojom::VideoDecodePerfHistoryPtr decode_perf_history_ptr;
+ MockVideoDecodePerfHistory decode_perf_history_impl(&decode_perf_history_ptr);
+
+ ASSERT_TRUE(decode_perf_history_ptr.is_bound());
+
+ WebMediaCapabilitiesClientImpl media_capabilities_client_impl;
+ media_capabilities_client_impl.BindVideoDecodePerfHistoryForTests(
+ std::move(decode_perf_history_ptr));
+
+ auto query_callbacks =
+ std::make_unique<MockWebMediaCapabilitiesQueryCallbacks>();
+
+ EXPECT_CALL(decode_perf_history_impl, GetPerfInfo(_, _))
+ .WillOnce(
+ InvokeWithoutArgs(&decode_perf_history_impl,
+ &MockVideoDecodePerfHistory::CloseMojoBinding));
+
+ EXPECT_CALL(*query_callbacks, OnError());
+
+ media_capabilities_client_impl.DecodingInfo(kFakeMediaConfiguration,
+ std::move(query_callbacks));
+
+ base::RunLoop().RunUntilIdle();
+}
+
+} // namespace media
diff --git a/chromium/media/blink/webmediaplayer_cast_android.cc b/chromium/media/blink/webmediaplayer_cast_android.cc
index bfc59020877..b1af43406a8 100644
--- a/chromium/media/blink/webmediaplayer_cast_android.cc
+++ b/chromium/media/blink/webmediaplayer_cast_android.cc
@@ -129,7 +129,6 @@ scoped_refptr<VideoFrame> MakeTextFrameForCast(
bitmap.getPixels());
gpu::Mailbox texture_mailbox;
- gl->GenMailboxCHROMIUM(texture_mailbox.name);
gl->ProduceTextureDirectCHROMIUM(remote_playback_texture_id,
texture_mailbox.name);
@@ -247,7 +246,7 @@ void WebMediaPlayerCast::OnConnectedToRemoteDevice(
is_remote_ = true;
initializing_ = true;
paused_ = false;
- client_->PlaybackStateChanged();
+ client_->RequestPlay();
remote_playback_message_ = remote_playback_message;
webmediaplayer_->SuspendForRemote();
@@ -312,7 +311,7 @@ void WebMediaPlayerCast::OnMediaPlayerPlay() {
if (is_remote_ && paused_) {
paused_ = false;
remote_time_at_ = base::TimeTicks::Now();
- client_->PlaybackStateChanged();
+ client_->RequestPlay();
}
// Blink expects a timeChanged() in response to a seek().
if (should_notify_time_changed_)
@@ -323,7 +322,7 @@ void WebMediaPlayerCast::OnMediaPlayerPause() {
DVLOG(1) << __func__ << " is_remote_ = " << is_remote_;
if (is_remote_ && !paused_) {
paused_ = true;
- client_->PlaybackStateChanged();
+ client_->RequestPause();
}
}
diff --git a/chromium/media/blink/webmediaplayer_delegate.h b/chromium/media/blink/webmediaplayer_delegate.h
index 112177b27c2..96b66d25c53 100644
--- a/chromium/media/blink/webmediaplayer_delegate.h
+++ b/chromium/media/blink/webmediaplayer_delegate.h
@@ -76,6 +76,12 @@ class WebMediaPlayerDelegate {
// Called when Picture-in-Picture mode is terminated from the
// Picture-in-Picture window.
virtual void OnPictureInPictureModeEnded() = 0;
+
+ // Called when a custom control is clicked on the Picture-in-Picture window.
+ // |control_id| is the identifier for its custom control. This is defined by
+ // the site that calls the web API.
+ virtual void OnPictureInPictureControlClicked(
+ const std::string& control_id) = 0;
};
// Returns true if the host frame is hidden or closed.
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index a3fc8994fdc..5959f2135ad 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -77,7 +77,6 @@
#include "media/base/android/media_codec_util.h"
#endif
-using blink::WebCanvas;
using blink::WebMediaPlayer;
using blink::WebRect;
using blink::WebSize;
@@ -236,7 +235,6 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
cast_impl_(this, client_, params->context_provider()),
#endif
renderer_factory_selector_(std::move(renderer_factory_selector)),
- surface_manager_(params->surface_manager()),
observer_(params->media_observer()),
max_keyframe_distance_to_disable_background_video_(
params->max_keyframe_distance_to_disable_background_video()),
@@ -247,6 +245,7 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
embedded_media_experience_enabled_(
params->embedded_media_experience_enabled()),
surface_layer_for_video_enabled_(params->use_surface_layer_for_video()),
+ create_bridge_callback_(params->create_bridge_callback()),
request_routing_token_cb_(params->request_routing_token_cb()),
overlay_routing_token_(OverlayInfo::RoutingToken()),
media_metrics_provider_(params->take_metrics_provider()) {
@@ -256,9 +255,6 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
DCHECK(client_);
DCHECK(delegate_);
- if (surface_layer_for_video_enabled_)
- bridge_ = params->create_bridge_callback().Run(this);
-
// If we're supposed to force video overlays, then make sure that they're
// enabled all the time.
always_enable_overlays_ = base::CommandLine::ForCurrentProcess()->HasSwitch(
@@ -331,12 +327,6 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
client_->MediaRemotingStopped(
blink::WebLocalizedString::kMediaRemotingStopNoText);
- // If running in Picture-in-Picture but not in auto-pip, notify the player.
- if (client_->DisplayType() ==
- WebMediaPlayer::DisplayType::kPictureInPicture &&
- !client_->IsInAutoPIP())
- ExitPictureInPicture(base::DoNothing());
-
if (!surface_layer_for_video_enabled_ && video_layer_) {
video_layer_->StopUsingProvider();
}
@@ -381,28 +371,34 @@ void WebMediaPlayerImpl::DemuxerDestructionHelper(
std::move(demuxer)));
}
-void WebMediaPlayerImpl::Load(LoadType load_type,
- const blink::WebMediaPlayerSource& source,
- CORSMode cors_mode) {
+WebMediaPlayer::LoadTiming WebMediaPlayerImpl::Load(
+ LoadType load_type,
+ const blink::WebMediaPlayerSource& source,
+ CORSMode cors_mode) {
DVLOG(1) << __func__;
// Only URL or MSE blob URL is supported.
DCHECK(source.IsURL());
blink::WebURL url = source.GetAsURL();
DVLOG(1) << __func__ << "(" << load_type << ", " << GURL(url) << ", "
<< cors_mode << ")";
+
+ bool is_deferred = false;
+
if (!defer_load_cb_.is_null()) {
- defer_load_cb_.Run(base::Bind(&WebMediaPlayerImpl::DoLoad, AsWeakPtr(),
- load_type, url, cors_mode));
- return;
+ is_deferred = defer_load_cb_.Run(base::BindOnce(
+ &WebMediaPlayerImpl::DoLoad, AsWeakPtr(), load_type, url, cors_mode));
+ } else {
+ DoLoad(load_type, url, cors_mode);
}
- DoLoad(load_type, url, cors_mode);
+
+ return is_deferred ? LoadTiming::kDeferred : LoadTiming::kImmediate;
}
void WebMediaPlayerImpl::OnWebLayerUpdated() {}
void WebMediaPlayerImpl::RegisterContentsLayer(cc::Layer* layer) {
DCHECK(bridge_);
- bridge_->GetCcLayer()->SetContentsOpaque(opaque_);
+ bridge_->SetContentsOpaque(opaque_);
client_->SetCcLayer(layer);
}
@@ -412,16 +408,11 @@ void WebMediaPlayerImpl::UnregisterContentsLayer(cc::Layer* layer) {
}
void WebMediaPlayerImpl::OnSurfaceIdUpdated(viz::SurfaceId surface_id) {
- pip_surface_id_ = surface_id;
-
// TODO(726619): Handle the behavior when Picture-in-Picture mode is
// disabled.
// The viz::SurfaceId may be updated when the video begins playback or when
// the size of the video changes.
- if (client_ &&
- client_->DisplayType() ==
- WebMediaPlayer::DisplayType::kPictureInPicture &&
- !client_->IsInAutoPIP()) {
+ if (client_ && IsInPictureInPicture() && !client_->IsInAutoPIP()) {
delegate_->DidPictureInPictureSurfaceChange(
delegate_id_, surface_id, pipeline_metadata_.natural_size);
}
@@ -438,14 +429,8 @@ bool WebMediaPlayerImpl::SupportsOverlayFullscreenVideo() {
void WebMediaPlayerImpl::EnableOverlay() {
overlay_enabled_ = true;
- if (surface_manager_ && overlay_mode_ == OverlayMode::kUseContentVideoView) {
- overlay_surface_id_.reset();
- surface_created_cb_.Reset(
- base::Bind(&WebMediaPlayerImpl::OnSurfaceCreated, AsWeakPtr()));
- surface_manager_->CreateFullscreenSurface(pipeline_metadata_.natural_size,
- surface_created_cb_.callback());
- } else if (request_routing_token_cb_ &&
- overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
+ if (request_routing_token_cb_ &&
+ overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
overlay_routing_token_is_pending_ = true;
token_available_cb_.Reset(
base::Bind(&WebMediaPlayerImpl::OnOverlayRoutingToken, AsWeakPtr()));
@@ -464,7 +449,6 @@ void WebMediaPlayerImpl::DisableOverlay() {
overlay_enabled_ = false;
if (overlay_mode_ == OverlayMode::kUseContentVideoView) {
surface_created_cb_.Cancel();
- overlay_surface_id_ = SurfaceManager::kNoSurfaceID;
} else if (overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
token_available_cb_.Cancel();
overlay_routing_token_is_pending_ = false;
@@ -534,6 +518,15 @@ void WebMediaPlayerImpl::OnHasNativeControlsChanged(bool has_native_controls) {
void WebMediaPlayerImpl::OnDisplayTypeChanged(
WebMediaPlayer::DisplayType display_type) {
+ if (surface_layer_for_video_enabled_) {
+ vfc_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &VideoFrameCompositor::SetForceSubmit,
+ base::Unretained(compositor_.get()),
+ display_type == WebMediaPlayer::DisplayType::kPictureInPicture));
+ }
+
if (!watch_time_reporter_)
return;
@@ -800,19 +793,20 @@ void WebMediaPlayerImpl::SetVolume(double volume) {
void WebMediaPlayerImpl::EnterPictureInPicture(
blink::WebMediaPlayer::PipWindowOpenedCallback callback) {
- DCHECK(pip_surface_id_.is_valid());
+ DCHECK(bridge_);
+
+ const viz::SurfaceId& surface_id = bridge_->GetSurfaceId();
+ DCHECK(surface_id.is_valid());
// Notifies the browser process that the player should now be in
// Picture-in-Picture mode.
- delegate_->DidPictureInPictureModeStart(delegate_id_, pip_surface_id_,
+ delegate_->DidPictureInPictureModeStart(delegate_id_, surface_id,
pipeline_metadata_.natural_size,
std::move(callback));
}
void WebMediaPlayerImpl::ExitPictureInPicture(
blink::WebMediaPlayer::PipWindowClosedCallback callback) {
- DCHECK(pip_surface_id_.is_valid());
-
// Notifies the browser process that Picture-in-Picture has ended. It will
// clear out the states and close the window.
delegate_->DidPictureInPictureModeEnd(delegate_id_, std::move(callback));
@@ -823,10 +817,7 @@ void WebMediaPlayerImpl::ExitPictureInPicture(
void WebMediaPlayerImpl::RegisterPictureInPictureWindowResizeCallback(
blink::WebMediaPlayer::PipWindowResizedCallback callback) {
- DCHECK(pip_surface_id_.is_valid());
- DCHECK(client_->DisplayType() ==
- WebMediaPlayer::DisplayType::kPictureInPicture &&
- !client_->IsInAutoPIP());
+ DCHECK(IsInPictureInPicture() && !client_->IsInAutoPIP());
delegate_->RegisterPictureInPictureWindowResizeCallback(delegate_id_,
std::move(callback));
@@ -1092,7 +1083,7 @@ bool WebMediaPlayerImpl::DidLoadingProgress() {
return pipeline_progress || data_progress;
}
-void WebMediaPlayerImpl::Paint(blink::WebCanvas* canvas,
+void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
const blink::WebRect& rect,
cc::PaintFlags& flags,
int already_uploaded_id,
@@ -1139,6 +1130,12 @@ bool WebMediaPlayerImpl::DidGetOpaqueResponseFromServiceWorker() const {
}
bool WebMediaPlayerImpl::HasSingleSecurityOrigin() const {
+ if (demuxer_found_hls_) {
+ // HLS manifests might pull segments from a different origin. We can't know
+ // for sure, so we conservatively say no here.
+ return false;
+ }
+
if (data_source_)
return data_source_->HasSingleOrigin();
return true;
@@ -1543,6 +1540,8 @@ void WebMediaPlayerImpl::OnError(PipelineStatus status) {
#if defined(OS_ANDROID)
if (status == PipelineStatus::DEMUXER_ERROR_DETECTED_HLS) {
+ demuxer_found_hls_ = true;
+
renderer_factory_selector_->SetUseMediaPlayer(true);
pipeline_controller_.Stop();
@@ -1631,8 +1630,6 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
// then we don't need this check.
if (!always_enable_overlays_ && !DoesOverlaySupportMetadata())
DisableOverlay();
- else if (surface_manager_)
- surface_manager_->NaturalSizeChanged(pipeline_metadata_.natural_size);
}
if (!surface_layer_for_video_enabled_) {
@@ -1643,14 +1640,35 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
video_layer_->SetContentsOpaque(opaque_);
client_->SetCcLayer(video_layer_.get());
} else {
+ DCHECK(!bridge_);
+
+ bridge_ = std::move(create_bridge_callback_)
+ .Run(this, compositor_->GetUpdateSubmissionStateCallback());
+ bridge_->CreateSurfaceLayer();
+
vfc_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&VideoFrameCompositor::EnableSubmission,
- base::Unretained(compositor_.get()), bridge_->GetFrameSinkId(),
+ base::Unretained(compositor_.get()), bridge_->GetSurfaceId(),
pipeline_metadata_.video_decoder_config.video_rotation(),
+ IsInPictureInPicture(), opaque_,
BindToCurrentLoop(base::BindRepeating(
&WebMediaPlayerImpl::OnFrameSinkDestroyed, AsWeakPtr()))));
+ bridge_->SetContentsOpaque(opaque_);
+
+ // If the element is already in Picture-in-Picture mode, it means that it
+ // was set in this mode prior to this load, with a different
+ // WebMediaPlayerImpl. The new player needs to send its id, size and
+ // surface id to the browser process to make sure the states are properly
+ // updated.
+ // TODO(872056): the surface should be activated but for some reasons, it
+ // does not. It is possible that this will no longer be neded after 872056
+ // is fixed.
+ if (client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture) {
+ OnSurfaceIdUpdated(bridge_->GetSurfaceId());
+ }
}
}
@@ -1754,9 +1772,9 @@ void WebMediaPlayerImpl::OnBufferingStateChangeInternal(
DVLOG(1) << __func__ << "(" << state << ")";
DCHECK(main_task_runner_->BelongsToCurrentThread());
- // Ignore buffering state changes until we've completed all outstanding
- // operations unless this is a buffering update for a suspended startup.
- if (!pipeline_controller_.IsStable() && !for_suspended_start)
+ // Ignore buffering state changes caused by back-to-back seeking, so as not
+ // to assume the second seek has finished when it was only the first seek.
+ if (pipeline_controller_.IsPendingSeek())
return;
auto log_event = media_log_->CreateBufferingStateChangedEvent(
@@ -1833,6 +1851,8 @@ void WebMediaPlayerImpl::OnDurationChange() {
return;
client_->DurationChanged();
+ if (watch_time_reporter_)
+ watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
}
void WebMediaPlayerImpl::OnAddTextTrack(const TextTrackConfig& config,
@@ -1883,16 +1903,11 @@ void WebMediaPlayerImpl::OnVideoNaturalSizeChange(const gfx::Size& size) {
return;
pipeline_metadata_.natural_size = rotated_size;
- CreateWatchTimeReporter();
+ UpdateSecondaryProperties();
if (video_decode_stats_reporter_)
video_decode_stats_reporter_->OnNaturalSizeChanged(rotated_size);
- if (overlay_enabled_ && surface_manager_ &&
- overlay_mode_ == OverlayMode::kUseContentVideoView) {
- surface_manager_->NaturalSizeChanged(rotated_size);
- }
-
client_->SizeChanged();
if (observer_)
@@ -1910,7 +1925,11 @@ void WebMediaPlayerImpl::OnVideoOpacityChange(bool opaque) {
if (video_layer_)
video_layer_->SetContentsOpaque(opaque_);
} else if (bridge_->GetCcLayer()) {
- bridge_->GetCcLayer()->SetContentsOpaque(opaque_);
+ bridge_->SetContentsOpaque(opaque_);
+ vfc_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoFrameCompositor::UpdateIsOpaque,
+ base::Unretained(compositor_.get()), opaque_));
}
}
@@ -1926,7 +1945,7 @@ void WebMediaPlayerImpl::OnAudioConfigChange(const AudioDecoderConfig& config) {
observer_->OnMetadataChanged(pipeline_metadata_);
if (codec_change)
- CreateWatchTimeReporter();
+ UpdateSecondaryProperties();
}
void WebMediaPlayerImpl::OnVideoConfigChange(const VideoDecoderConfig& config) {
@@ -1947,7 +1966,7 @@ void WebMediaPlayerImpl::OnVideoConfigChange(const VideoDecoderConfig& config) {
video_decode_stats_reporter_->OnVideoConfigChanged(config);
if (codec_change)
- CreateWatchTimeReporter();
+ UpdateSecondaryProperties();
}
void WebMediaPlayerImpl::OnVideoAverageKeyframeDistanceUpdate() {
@@ -1958,34 +1977,26 @@ void WebMediaPlayerImpl::OnAudioDecoderChange(const std::string& name) {
if (name == audio_decoder_name_)
return;
- const bool is_decoder_change = !audio_decoder_name_.empty();
audio_decoder_name_ = name;
// If there's no current reporter, there's nothing to be done.
if (!watch_time_reporter_)
return;
- if (is_decoder_change)
- CreateWatchTimeReporter();
- else
- watch_time_reporter_->SetAudioDecoderName(name);
+ UpdateSecondaryProperties();
}
void WebMediaPlayerImpl::OnVideoDecoderChange(const std::string& name) {
if (name == video_decoder_name_)
return;
- const bool is_decoder_change = !video_decoder_name_.empty();
video_decoder_name_ = name;
// If there's no current reporter, there's nothing to be done.
if (!watch_time_reporter_)
return;
- if (is_decoder_change)
- CreateWatchTimeReporter();
- else
- watch_time_reporter_->SetVideoDecoderName(name);
+ UpdateSecondaryProperties();
}
void WebMediaPlayerImpl::OnFrameHidden() {
@@ -2070,13 +2081,11 @@ void WebMediaPlayerImpl::OnIdleTimeout() {
}
void WebMediaPlayerImpl::OnPlay() {
- Play();
- client_->PlaybackStateChanged();
+ client_->RequestPlay();
}
void WebMediaPlayerImpl::OnPause() {
- Pause();
- client_->PlaybackStateChanged();
+ client_->RequestPause();
}
void WebMediaPlayerImpl::OnSeekForward(double seconds) {
@@ -2099,14 +2108,20 @@ void WebMediaPlayerImpl::OnBecamePersistentVideo(bool value) {
}
void WebMediaPlayerImpl::OnPictureInPictureModeEnded() {
- // This should never be called if |pip_surface_id_| is invalid. This is either
- // called from the Picture-in-Picture window side by a user gesture to end
- // Picture-in-Picture mode, or in ExitPictureInPicture(), which already checks
- // for validity.
- DCHECK(pip_surface_id_.is_valid());
+ // It is possible for this method to be called when the player is no longer in
+ // Picture-in-Picture mode.
+ if (!client_ || !IsInPictureInPicture())
+ return;
- if (client_)
- client_->PictureInPictureStopped();
+ client_->PictureInPictureStopped();
+}
+
+void WebMediaPlayerImpl::OnPictureInPictureControlClicked(
+ const std::string& control_id) {
+ if (client_ && IsInPictureInPicture()) {
+ client_->PictureInPictureControlClicked(
+ blink::WebString::FromUTF8(control_id));
+ }
}
void WebMediaPlayerImpl::ScheduleRestart() {
@@ -2186,7 +2201,7 @@ void WebMediaPlayerImpl::OnDisconnectedFromRemoteDevice(double t) {
UpdatePlayState();
// We already told the delegate we're paused when remoting started.
- client_->PlaybackStateChanged();
+ client_->RequestPause();
client_->DisconnectedFromRemoteDevice();
}
@@ -2266,12 +2281,6 @@ void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) {
SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
}
-void WebMediaPlayerImpl::OnSurfaceCreated(int surface_id) {
- DCHECK(overlay_mode_ == OverlayMode::kUseContentVideoView);
- overlay_surface_id_ = surface_id;
- MaybeSendOverlayInfoToDecoder();
-}
-
void WebMediaPlayerImpl::OnOverlayRoutingToken(
const base::UnguessableToken& token) {
DCHECK(overlay_mode_ == OverlayMode::kUseAndroidOverlay);
@@ -2285,7 +2294,6 @@ void WebMediaPlayerImpl::OnOverlayInfoRequested(
bool decoder_requires_restart_for_overlay,
const ProvideOverlayInfoCB& provide_overlay_info_cb) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- DCHECK(surface_manager_);
// If we get a non-null cb, a decoder is initializing and requires overlay
// info. If we get a null cb, a previously initialized decoder is
@@ -2338,12 +2346,7 @@ void WebMediaPlayerImpl::MaybeSendOverlayInfoToDecoder() {
// Initialization requires this; AVDA should start with enough info to make an
// overlay, so that (pre-M) the initial codec is created with the right output
// surface; it can't switch later.
- if (overlay_mode_ == OverlayMode::kUseContentVideoView) {
- if (!overlay_surface_id_.has_value())
- return;
-
- overlay_info_.surface_id = *overlay_surface_id_;
- } else if (overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
+ if (overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
if (overlay_routing_token_is_pending_)
return;
@@ -2563,7 +2566,12 @@ void WebMediaPlayerImpl::SetDelegateState(DelegateState new_state,
// TODO(sandersd): WebContentsObserverSanityChecker does not allow sending the
// 'playing' IPC more than once in a row, even if the metadata has changed.
// Figure out whether it should.
- bool has_audio = HasAudio() && !client_->IsAutoplayingMuted();
+ // Pretend that the media has no audio if it never played unmuted. This is to
+ // avoid any action related to audible media such as taking audio focus or
+ // showing a media notification. To preserve a consistent experience, it does
+ // not apply if a media was audible so the system states do not flicker
+ // depending on whether the user muted the player.
+ bool has_audio = HasAudio() && !client_->WasAlwaysMuted();
if (delegate_state_ == new_state &&
(delegate_state_ != DelegateState::PLAYING ||
delegate_has_audio_ == has_audio)) {
@@ -2858,17 +2866,17 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
// Create the watch time reporter and synchronize its initial state.
watch_time_reporter_.reset(new WatchTimeReporter(
- mojom::PlaybackProperties::New(
- pipeline_metadata_.audio_decoder_config.codec(),
- pipeline_metadata_.video_decoder_config.codec(),
- pipeline_metadata_.has_audio, pipeline_metadata_.has_video, false,
- false, !!chunk_demuxer_, is_encrypted_,
- embedded_media_experience_enabled_, pipeline_metadata_.natural_size),
+ mojom::PlaybackProperties::New(pipeline_metadata_.has_audio,
+ pipeline_metadata_.has_video, false, false,
+ !!chunk_demuxer_, is_encrypted_,
+ embedded_media_experience_enabled_),
+ pipeline_metadata_.natural_size,
base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
base::Unretained(this)),
media_metrics_provider_.get(),
frame_->GetTaskRunner(blink::TaskType::kInternalMedia)));
watch_time_reporter_->OnVolumeChange(volume_);
+ watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
if (delegate_->IsFrameHidden())
watch_time_reporter_->OnHidden();
@@ -2880,11 +2888,6 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
else
watch_time_reporter_->OnNativeControlsDisabled();
- if (!audio_decoder_name_.empty())
- watch_time_reporter_->SetAudioDecoderName(audio_decoder_name_);
- if (!video_decoder_name_.empty())
- watch_time_reporter_->SetVideoDecoderName(video_decoder_name_);
-
switch (client_->DisplayType()) {
case WebMediaPlayer::DisplayType::kInline:
watch_time_reporter_->OnDisplayTypeInline();
@@ -2896,6 +2899,16 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
watch_time_reporter_->OnDisplayTypePictureInPicture();
break;
}
+
+ UpdateSecondaryProperties();
+}
+
+void WebMediaPlayerImpl::UpdateSecondaryProperties() {
+ watch_time_reporter_->UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackProperties::New(
+ pipeline_metadata_.audio_decoder_config.codec(),
+ pipeline_metadata_.video_decoder_config.codec(), audio_decoder_name_,
+ video_decoder_name_, pipeline_metadata_.natural_size));
}
bool WebMediaPlayerImpl::IsHidden() const {
@@ -2972,7 +2985,7 @@ bool WebMediaPlayerImpl::IsBackgroundOptimizationCandidate() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
// Don't optimize Picture-in-Picture players.
- if (client_->DisplayType() == WebMediaPlayer::DisplayType::kPictureInPicture)
+ if (IsInPictureInPicture())
return false;
#if defined(OS_ANDROID) // WMPI_CAST
@@ -3224,4 +3237,10 @@ void WebMediaPlayerImpl::RecordEncryptionScheme(
EncryptionSchemeUMA::kCount);
}
+bool WebMediaPlayerImpl::IsInPictureInPicture() const {
+ DCHECK(client_);
+ return client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture;
+}
+
} // namespace media
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index 1feaa9a1970..cb72c64ac2b 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -24,13 +24,13 @@
#include "base/timer/elapsed_timer.h"
#include "base/timer/timer.h"
#include "build/build_config.h"
+#include "cc/layers/surface_layer.h"
#include "components/viz/common/gpu/context_provider.h"
#include "media/base/media_observer.h"
#include "media/base/media_tracks.h"
#include "media/base/overlay_info.h"
#include "media/base/pipeline_impl.h"
#include "media/base/renderer_factory_selector.h"
-#include "media/base/surface_manager.h"
#include "media/base/text_track.h"
#include "media/blink/buffered_data_source_host_impl.h"
#include "media/blink/media_blink_export.h"
@@ -124,9 +124,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void UnregisterContentsLayer(cc::Layer* layer) override;
void OnSurfaceIdUpdated(viz::SurfaceId surface_id) override;
- void Load(LoadType load_type,
- const blink::WebMediaPlayerSource& source,
- CORSMode cors_mode) override;
+ WebMediaPlayer::LoadTiming Load(LoadType load_type,
+ const blink::WebMediaPlayerSource& source,
+ CORSMode cors_mode) override;
// Playback controls.
void Play() override;
@@ -150,7 +150,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// paint() the current video frame into |canvas|. This is used to support
// various APIs and functionalities, including but not limited to: <canvas>,
// WebGL texImage2D, ImageBitmap, printing and capturing capabilities.
- void Paint(blink::WebCanvas* canvas,
+ void Paint(cc::PaintCanvas* canvas,
const blink::WebRect& rect,
cc::PaintFlags& flags,
int already_uploaded_id,
@@ -243,6 +243,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnVolumeMultiplierUpdate(double multiplier) override;
void OnBecamePersistentVideo(bool value) override;
void OnPictureInPictureModeEnded() override;
+ void OnPictureInPictureControlClicked(const std::string& control_id) override;
void RequestRemotePlaybackDisabled(bool disabled) override;
#if defined(OS_ANDROID) // WMPI_CAST
@@ -364,9 +365,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Called when the data source is downloading or paused.
void NotifyDownloading(bool is_downloading);
- // Called by SurfaceManager when a surface is created.
- void OnSurfaceCreated(int surface_id);
-
// Called by RenderFrameImpl with the overlay routing token, if we request it.
void OnOverlayRoutingToken(const base::UnguessableToken& token);
@@ -427,6 +425,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// - is_idle_, must_suspend_,
// - paused_, ended_,
// - pending_suspend_resume_cycle_,
+ // - enter_pip_callback_,
void UpdatePlayState();
// Methods internal to UpdatePlayState().
@@ -462,6 +461,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
bool IsPrerollAttemptNeeded();
void CreateWatchTimeReporter();
+ void UpdateSecondaryProperties();
void CreateVideoDecodeStatsReporter();
@@ -581,6 +581,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void RecordEncryptionScheme(const std::string& stream_name,
const EncryptionScheme& encryption_scheme);
+ // Returns whether the player is currently displayed in Picture-in-Picture.
+ // It will return true even if the player is in AutoPIP mode.
+ // The player MUST have a `client_` when this call happen.
+ bool IsInPictureInPicture() const;
+
blink::WebLocalFrame* const frame_;
// The playback state last reported to |delegate_|, to avoid setting duplicate
@@ -752,18 +757,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
std::unique_ptr<RendererFactorySelector> renderer_factory_selector_;
- // For requesting surfaces on behalf of the Android H/W decoder in fullscreen.
- // This will be null everywhere but Android.
- SurfaceManager* const surface_manager_;
-
// For canceling ongoing surface creation requests when exiting fullscreen.
base::CancelableCallback<void(int)> surface_created_cb_;
- // The current overlay surface id. Populated, possibly with kNoSurfaceID if
- // we're not supposed to use an overlay, unless we have an outstanding surface
- // request to the SurfaceManager.
- base::Optional<int> overlay_surface_id_ = SurfaceManager::kNoSurfaceID;
-
// For canceling AndroidOverlay routing token requests.
base::CancelableCallback<void(const base::UnguessableToken&)>
token_available_cb_;
@@ -797,6 +793,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// removing |cast_impl_|.
bool using_media_player_renderer_ = false;
+ // Set whenever the demuxer encounters an HLS file.
+ // This flag is distinct from |using_media_player_renderer_|, because on older
+ // devices we might use MediaPlayerRenderer for non HLS playback.
+ bool demuxer_found_hls_ = false;
+
// Called sometime after the media is suspended in a playing state in
// OnFrameHidden(), causing the state to change to paused.
base::OneShotTimer background_pause_timer_;
@@ -876,6 +877,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Whether the use of a surface layer instead of a video layer is enabled.
bool surface_layer_for_video_enabled_ = false;
+ base::OnceCallback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB)>
+ create_bridge_callback_;
+
base::CancelableOnceCallback<void(base::TimeTicks)> frame_time_report_cb_;
bool initial_video_height_recorded_ = false;
@@ -919,10 +925,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// True if a frame has ever been rendered.
bool has_first_frame_ = false;
- // Keeps track of the SurfaceId for Picture-in-Picture. This is used to
- // route the video to be shown in the Picture-in-Picture window.
- viz::SurfaceId pip_surface_id_;
-
DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerImpl);
};
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index d8d002d4abc..1581ed0d1d4 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -53,8 +53,8 @@
#include "third_party/blink/public/platform/web_size.h"
#include "third_party/blink/public/platform/web_surface_layer_bridge.h"
#include "third_party/blink/public/platform/web_url_response.h"
-#include "third_party/blink/public/web/web_frame_client.h"
#include "third_party/blink/public/web/web_local_frame.h"
+#include "third_party/blink/public/web/web_local_frame_client.h"
#include "third_party/blink/public/web/web_scoped_user_gesture.h"
#include "third_party/blink/public/web/web_view.h"
#include "url/gurl.h"
@@ -131,7 +131,6 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD0(Repaint, void());
MOCK_METHOD0(DurationChanged, void());
MOCK_METHOD0(SizeChanged, void());
- MOCK_METHOD0(PlaybackStateChanged, void());
MOCK_METHOD1(SetCcLayer, void(cc::Layer*));
MOCK_METHOD5(AddAudioTrack,
blink::WebMediaPlayer::TrackId(
@@ -162,7 +161,7 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD2(RemotePlaybackCompatibilityChanged,
void(const blink::WebURL&, bool));
MOCK_METHOD1(OnBecamePersistentVideo, void(bool));
- MOCK_METHOD0(IsAutoplayingMuted, bool());
+ MOCK_METHOD0(WasAlwaysMuted, bool());
MOCK_METHOD0(HasSelectedVideoTrack, bool());
MOCK_METHOD0(GetSelectedVideoTrackId, blink::WebMediaPlayer::TrackId());
MOCK_METHOD0(HasNativeControls, bool());
@@ -174,11 +173,14 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD1(MediaRemotingStopped, void(blink::WebLocalizedString::Name));
MOCK_METHOD0(PictureInPictureStarted, void());
MOCK_METHOD0(PictureInPictureStopped, void());
+ MOCK_METHOD1(PictureInPictureControlClicked, void(const blink::WebString&));
MOCK_CONST_METHOD0(CouldPlayIfEnoughData, bool());
+ MOCK_METHOD0(RequestPlay, void());
+ MOCK_METHOD0(RequestPause, void());
- void set_is_autoplaying_muted(bool value) { is_autoplaying_muted_ = value; }
+ void set_was_always_muted(bool value) { was_always_muted_ = value; }
- bool is_autoplaying_muted_ = false;
+ bool was_always_muted_ = false;
private:
DISALLOW_COPY_AND_ASSIGN(MockWebMediaPlayerClient);
@@ -292,7 +294,10 @@ class MockSurfaceLayerBridge : public blink::WebSurfaceLayerBridge {
public:
MOCK_CONST_METHOD0(GetCcLayer, cc::Layer*());
MOCK_CONST_METHOD0(GetFrameSinkId, const viz::FrameSinkId&());
+ MOCK_CONST_METHOD0(GetSurfaceId, const viz::SurfaceId&());
MOCK_METHOD0(ClearSurfaceId, void());
+ MOCK_METHOD1(SetContentsOpaque, void(bool));
+ MOCK_METHOD0(CreateSurfaceLayer, void());
};
class MockVideoFrameCompositor : public VideoFrameCompositor {
@@ -305,9 +310,11 @@ class MockVideoFrameCompositor : public VideoFrameCompositor {
// MOCK_METHOD doesn't like OnceCallback.
void SetOnNewProcessedFrameCallback(OnNewProcessedFrameCB cb) override {}
MOCK_METHOD0(GetCurrentFrameAndUpdateIfStale, scoped_refptr<VideoFrame>());
- MOCK_METHOD3(EnableSubmission,
- void(const viz::FrameSinkId&,
+ MOCK_METHOD5(EnableSubmission,
+ void(const viz::SurfaceId&,
media::VideoRotation,
+ bool,
+ bool,
blink::WebFrameSinkDestroyedCallback));
};
@@ -316,7 +323,8 @@ class WebMediaPlayerImplTest : public testing::Test {
WebMediaPlayerImplTest()
: media_thread_("MediaThreadForTest"),
web_view_(
- blink::WebView::Create(nullptr,
+ blink::WebView::Create(/*client=*/nullptr,
+ /*widget_client=*/nullptr,
blink::mojom::PageVisibilityState::kVisible,
nullptr)),
web_local_frame_(
@@ -352,7 +360,8 @@ class WebMediaPlayerImplTest : public testing::Test {
RendererFactorySelector::FactoryType::DEFAULT);
mojom::MediaMetricsProviderPtr provider;
- MediaMetricsProvider::Create(nullptr, mojo::MakeRequest(&provider));
+ MediaMetricsProvider::Create(VideoDecodePerfHistory::SaveCallback(),
+ mojo::MakeRequest(&provider));
// Initialize provider since none of the tests below actually go through the
// full loading/pipeline initialize phase. If this ever changes the provider
@@ -369,13 +378,12 @@ class WebMediaPlayerImplTest : public testing::Test {
base::ThreadTaskRunnerHandle::Get(), media_thread_.task_runner(),
base::BindRepeating(&WebMediaPlayerImplTest::OnAdjustAllocatedMemory,
base::Unretained(this)),
- nullptr, nullptr, RequestRoutingTokenCallback(), nullptr,
+ nullptr, RequestRoutingTokenCallback(), nullptr,
kMaxKeyframeDistanceToDisableBackgroundVideo,
kMaxKeyframeDistanceToDisableBackgroundVideoMSE, false, false,
std::move(provider),
- base::BindRepeating(
- &WebMediaPlayerImplTest::CreateMockSurfaceLayerBridge,
- base::Unretained(this)),
+ base::BindOnce(&WebMediaPlayerImplTest::CreateMockSurfaceLayerBridge,
+ base::Unretained(this)),
viz::TestContextProvider::Create(),
base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo));
@@ -410,7 +418,8 @@ class WebMediaPlayerImplTest : public testing::Test {
protected:
std::unique_ptr<blink::WebSurfaceLayerBridge> CreateMockSurfaceLayerBridge(
- blink::WebSurfaceLayerBridgeObserver*) {
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB) {
return std::move(surface_layer_bridge_);
}
@@ -453,8 +462,8 @@ class WebMediaPlayerImplTest : public testing::Test {
EXPECT_CALL(client_, ReadyStateChanged());
wmpi_->SetReadyState(blink::WebMediaPlayer::kReadyStateHaveMetadata);
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
wmpi_->pipeline_metadata_.has_audio = has_audio;
wmpi_->pipeline_metadata_.has_video = has_video;
@@ -480,32 +489,32 @@ class WebMediaPlayerImplTest : public testing::Test {
}
WebMediaPlayerImpl::PlayState ComputePlayState() {
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
return wmpi_->UpdatePlayState_ComputePlayState(false, true, false, false);
}
WebMediaPlayerImpl::PlayState ComputePlayState_FrameHidden() {
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
return wmpi_->UpdatePlayState_ComputePlayState(false, true, false, true);
}
WebMediaPlayerImpl::PlayState ComputePlayState_Suspended() {
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
return wmpi_->UpdatePlayState_ComputePlayState(false, true, true, false);
}
WebMediaPlayerImpl::PlayState ComputePlayState_Remote() {
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
return wmpi_->UpdatePlayState_ComputePlayState(true, true, false, false);
}
WebMediaPlayerImpl::PlayState ComputePlayState_BackgroundedStreaming() {
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
return wmpi_->UpdatePlayState_ComputePlayState(false, false, false, true);
}
@@ -664,7 +673,7 @@ class WebMediaPlayerImplTest : public testing::Test {
base::Thread media_thread_;
// Blink state.
- blink::WebFrameClient web_frame_client_;
+ blink::WebLocalFrameClient web_frame_client_;
blink::WebView* web_view_;
blink::WebLocalFrame* web_local_frame_;
@@ -758,6 +767,16 @@ TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspendNoVideoMemoryUsage) {
EXPECT_CALL(client_, CouldPlayIfEnoughData()).WillRepeatedly(Return(false));
wmpi_->SetPreload(blink::WebMediaPlayer::kPreloadMetaData);
wmpi_->SetPoster(blink::WebURL(GURL("file://example.com/sample.jpg")));
+
+ if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
+ }
+
LoadAndWaitForMetadata("bear-320x240-video-only.webm");
testing::Mock::VerifyAndClearExpectations(&client_);
EXPECT_CALL(client_, ReadyStateChanged()).Times(AnyNumber());
@@ -1109,17 +1128,17 @@ TEST_F(WebMediaPlayerImplTest, AutoplayMuted_StartsAndStops) {
SetMetadata(true, true);
SetReadyState(blink::WebMediaPlayer::kReadyStateHaveFutureData);
SetPaused(false);
- client_.set_is_autoplaying_muted(true);
+ client_.set_was_always_muted(true);
EXPECT_CALL(delegate_, DidPlay(_, true, false, _));
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillOnce(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillOnce(Return(client_.was_always_muted_));
SetDelegateState(WebMediaPlayerImpl::DelegateState::PLAYING);
- client_.set_is_autoplaying_muted(false);
+ client_.set_was_always_muted(false);
EXPECT_CALL(delegate_, DidPlay(_, true, true, _));
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillOnce(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillOnce(Return(client_.was_always_muted_));
SetDelegateState(WebMediaPlayerImpl::DelegateState::PLAYING);
}
@@ -1128,16 +1147,16 @@ TEST_F(WebMediaPlayerImplTest, AutoplayMuted_SetVolume) {
SetMetadata(true, true);
SetReadyState(blink::WebMediaPlayer::kReadyStateHaveFutureData);
SetPaused(false);
- client_.set_is_autoplaying_muted(true);
+ client_.set_was_always_muted(true);
EXPECT_CALL(delegate_, DidPlay(_, true, false, _));
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillOnce(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillOnce(Return(client_.was_always_muted_));
SetDelegateState(WebMediaPlayerImpl::DelegateState::PLAYING);
- client_.set_is_autoplaying_muted(false);
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillOnce(Return(client_.is_autoplaying_muted_));
+ client_.set_was_always_muted(false);
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillOnce(Return(client_.was_always_muted_));
EXPECT_CALL(delegate_, DidPlay(_, true, true, _));
wmpi_->SetVolume(1.0);
}
@@ -1149,8 +1168,9 @@ TEST_F(WebMediaPlayerImplTest, NoStreams) {
EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
if (base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo)) {
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId()).Times(0);
- EXPECT_CALL(*compositor_, EnableSubmission(_, _, _)).Times(0);
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer()).Times(0);
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId()).Times(0);
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, _, _)).Times(0);
}
// Nothing should happen. In particular, no assertions should fail.
@@ -1165,10 +1185,12 @@ TEST_F(WebMediaPlayerImplTest, NaturalSizeChange) {
metadata.natural_size = gfx::Size(320, 240);
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
- .WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
} else {
EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
@@ -1191,9 +1213,11 @@ TEST_F(WebMediaPlayerImplTest, NaturalSizeChange_Rotated) {
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
- .WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
} else {
EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
@@ -1217,9 +1241,11 @@ TEST_F(WebMediaPlayerImplTest, VideoLockedWhenPausedWhenHidden) {
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
- .WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
} else {
EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
@@ -1291,9 +1317,11 @@ TEST_F(WebMediaPlayerImplTest, InfiniteDuration) {
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
- .WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
} else {
EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
@@ -1320,11 +1348,29 @@ TEST_F(WebMediaPlayerImplTest, SetContentsLayerGetsWebLayerFromBridge) {
InitializeWebMediaPlayerImpl();
+ PipelineMetadata metadata;
+ metadata.has_video = true;
+ metadata.video_decoder_config =
+ TestVideoConfig::NormalRotated(VIDEO_ROTATION_90);
+ metadata.natural_size = gfx::Size(320, 240);
+
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillOnce(ReturnRef(surface_id_));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+
+ // We only call the callback to create the bridge in OnMetadata, so we need
+ // to call it.
+ OnMetadata(metadata);
+
scoped_refptr<cc::Layer> layer = cc::Layer::Create();
EXPECT_CALL(*surface_layer_bridge_ptr_, GetCcLayer())
.WillRepeatedly(Return(layer.get()));
EXPECT_CALL(client_, SetCcLayer(Eq(layer.get())));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
wmpi_->RegisterContentsLayer(layer.get());
}
@@ -1344,11 +1390,22 @@ TEST_F(WebMediaPlayerImplTest, PlaybackRateChangeMediaLogs) {
}
}
-// Tests when the PipSurfaceInfoCB for |wmpi_| is triggered for
-// Picture-in-Picture.
-TEST_F(WebMediaPlayerImplTest, PictureInPictureTriggerCallback) {
+// Tests delegate methods are called when Picture-in-Picture is triggered.
+// Disabling this test only in the Beta branch where VideoSurfaceLayer is not
+// enabled by defaulty.
+TEST_F(WebMediaPlayerImplTest, DISABLED_PictureInPictureTriggerCallback) {
InitializeWebMediaPlayerImpl();
+ EXPECT_CALL(*surface_layer_bridge_ptr_, CreateSurfaceLayer());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId())
+ .WillRepeatedly(ReturnRef(surface_id_));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _, false, _));
+ EXPECT_CALL(*surface_layer_bridge_ptr_, SetContentsOpaque(false));
+
+ PipelineMetadata metadata;
+ metadata.has_video = true;
+ OnMetadata(metadata);
+
EXPECT_CALL(client_, DisplayType())
.WillRepeatedly(
Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
@@ -1366,9 +1423,9 @@ TEST_F(WebMediaPlayerImplTest, PictureInPictureTriggerCallback) {
wmpi_->EnterPictureInPicture(base::DoNothing());
wmpi_->OnSurfaceIdUpdated(surface_id_);
- // Upon exiting Picture-in-Picture mode, functions to cleanup are expected to
- // be called. ~WMPI calls ExitPictureInPicture().
- EXPECT_CALL(delegate_, DidPictureInPictureModeEnd(delegate_.player_id(), _));
+ // Updating SurfaceId should NOT exit Picture-in-Picture.
+ EXPECT_CALL(delegate_, DidPictureInPictureModeEnd(delegate_.player_id(), _))
+ .Times(0);
}
class WebMediaPlayerImplBackgroundBehaviorTest
@@ -1556,8 +1613,8 @@ TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, AudioVideo) {
// test back into a normal state.
EXPECT_TRUE(IsDisableVideoTrackPending());
- EXPECT_CALL(client_, IsAutoplayingMuted())
- .WillRepeatedly(Return(client_.is_autoplaying_muted_));
+ EXPECT_CALL(client_, WasAlwaysMuted())
+ .WillRepeatedly(Return(client_.was_always_muted_));
ForegroundPlayer();
EXPECT_FALSE(IsVideoTrackDisabled());
EXPECT_FALSE(IsDisableVideoTrackPending());
diff --git a/chromium/media/blink/webmediaplayer_params.cc b/chromium/media/blink/webmediaplayer_params.cc
index 99f28940d35..04368856102 100644
--- a/chromium/media/blink/webmediaplayer_params.cc
+++ b/chromium/media/blink/webmediaplayer_params.cc
@@ -21,7 +21,6 @@ WebMediaPlayerParams::WebMediaPlayerParams(
video_frame_compositor_task_runner,
const AdjustAllocatedMemoryCB& adjust_allocated_memory_cb,
blink::WebContentDecryptionModule* initial_cdm,
- SurfaceManager* surface_manager,
RequestRoutingTokenCallback request_routing_token_cb,
base::WeakPtr<MediaObserver> media_observer,
base::TimeDelta max_keyframe_distance_to_disable_background_video,
@@ -29,8 +28,9 @@ WebMediaPlayerParams::WebMediaPlayerParams(
bool enable_instant_source_buffer_gc,
bool embedded_media_experience_enabled,
mojom::MediaMetricsProviderPtr metrics_provider,
- base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
- blink::WebSurfaceLayerBridgeObserver*)> create_bridge_callback,
+ base::OnceCallback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB)> create_bridge_callback,
scoped_refptr<viz::ContextProvider> context_provider,
bool use_surface_layer_for_video)
: defer_load_cb_(defer_load_cb),
@@ -42,7 +42,6 @@ WebMediaPlayerParams::WebMediaPlayerParams(
video_frame_compositor_task_runner_(video_frame_compositor_task_runner),
adjust_allocated_memory_cb_(adjust_allocated_memory_cb),
initial_cdm_(initial_cdm),
- surface_manager_(surface_manager),
request_routing_token_cb_(std::move(request_routing_token_cb)),
media_observer_(media_observer),
max_keyframe_distance_to_disable_background_video_(
@@ -52,7 +51,7 @@ WebMediaPlayerParams::WebMediaPlayerParams(
enable_instant_source_buffer_gc_(enable_instant_source_buffer_gc),
embedded_media_experience_enabled_(embedded_media_experience_enabled),
metrics_provider_(std::move(metrics_provider)),
- create_bridge_callback_(create_bridge_callback),
+ create_bridge_callback_(std::move(create_bridge_callback)),
context_provider_(std::move(context_provider)),
use_surface_layer_for_video_(use_surface_layer_for_video) {}
diff --git a/chromium/media/blink/webmediaplayer_params.h b/chromium/media/blink/webmediaplayer_params.h
index 680f433e589..0793f19763e 100644
--- a/chromium/media/blink/webmediaplayer_params.h
+++ b/chromium/media/blink/webmediaplayer_params.h
@@ -13,6 +13,7 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/time/time.h"
+#include "cc/layers/surface_layer.h"
#include "components/viz/common/gpu/context_provider.h"
#include "media/base/media_log.h"
#include "media/base/media_observer.h"
@@ -37,14 +38,15 @@ class WebSurfaceLayerBridgeObserver;
namespace media {
class SwitchableAudioRendererSink;
-class SurfaceManager;
// Holds parameters for constructing WebMediaPlayerImpl without having
// to plumb arguments through various abstraction layers.
class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
public:
- typedef base::Callback<void(const base::Closure&)> DeferLoadCB;
- typedef base::Callback<Context3D()> Context3DCB;
+ // Returns true if load will deferred. False if it will run immediately.
+ using DeferLoadCB = base::RepeatingCallback<bool(base::OnceClosure)>;
+
+ using Context3DCB = base::Callback<Context3D()>;
// Callback to obtain the media ContextProvider.
// Requires being called on the media thread.
@@ -72,7 +74,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
video_frame_compositor_task_runner,
const AdjustAllocatedMemoryCB& adjust_allocated_memory_cb,
blink::WebContentDecryptionModule* initial_cdm,
- SurfaceManager* surface_manager,
RequestRoutingTokenCallback request_routing_token_cb,
base::WeakPtr<MediaObserver> media_observer,
base::TimeDelta max_keyframe_distance_to_disable_background_video,
@@ -80,8 +81,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
bool enable_instant_source_buffer_gc,
bool embedded_media_experience_enabled,
mojom::MediaMetricsProviderPtr metrics_provider,
- base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
- blink::WebSurfaceLayerBridgeObserver*)> bridge_callback,
+ base::OnceCallback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB)> bridge_callback,
scoped_refptr<viz::ContextProvider> context_provider,
bool use_surface_layer_for_video);
@@ -126,8 +128,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
return adjust_allocated_memory_cb_;
}
- SurfaceManager* surface_manager() const { return surface_manager_; }
-
base::WeakPtr<MediaObserver> media_observer() const {
return media_observer_;
}
@@ -153,9 +153,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
return request_routing_token_cb_;
}
- const base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
- blink::WebSurfaceLayerBridgeObserver*)>& create_bridge_callback() const {
- return create_bridge_callback_;
+ base::OnceCallback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB)>
+ create_bridge_callback() {
+ return std::move(create_bridge_callback_);
}
scoped_refptr<viz::ContextProvider> context_provider() {
@@ -178,7 +180,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
AdjustAllocatedMemoryCB adjust_allocated_memory_cb_;
blink::WebContentDecryptionModule* initial_cdm_;
- SurfaceManager* surface_manager_;
RequestRoutingTokenCallback request_routing_token_cb_;
base::WeakPtr<MediaObserver> media_observer_;
base::TimeDelta max_keyframe_distance_to_disable_background_video_;
@@ -186,8 +187,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
bool enable_instant_source_buffer_gc_;
const bool embedded_media_experience_enabled_;
mojom::MediaMetricsProviderPtr metrics_provider_;
- base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
- blink::WebSurfaceLayerBridgeObserver*)>
+ base::OnceCallback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
+ blink::WebSurfaceLayerBridgeObserver*,
+ cc::UpdateSubmissionStateCB)>
create_bridge_callback_;
scoped_refptr<viz::ContextProvider> context_provider_;
bool use_surface_layer_for_video_;
diff --git a/chromium/media/blink/webmediasource_impl.cc b/chromium/media/blink/webmediasource_impl.cc
index 5fec2d9e5c2..67ef90307ad 100644
--- a/chromium/media/blink/webmediasource_impl.cc
+++ b/chromium/media/blink/webmediasource_impl.cc
@@ -32,13 +32,13 @@ WebMediaSourceImpl::WebMediaSourceImpl(ChunkDemuxer* demuxer)
WebMediaSourceImpl::~WebMediaSourceImpl() = default;
WebMediaSource::AddStatus WebMediaSourceImpl::AddSourceBuffer(
- const blink::WebString& type,
+ const blink::WebString& content_type,
const blink::WebString& codecs,
blink::WebSourceBuffer** source_buffer) {
std::string id = base::GenerateGUID();
WebMediaSource::AddStatus result = static_cast<WebMediaSource::AddStatus>(
- demuxer_->AddId(id, type.Utf8().data(), codecs.Utf8().data()));
+ demuxer_->AddId(id, content_type.Utf8(), codecs.Utf8()));
if (result == WebMediaSource::kAddStatusOk)
*source_buffer = new WebSourceBufferImpl(id, demuxer_);
diff --git a/chromium/media/blink/webmediasource_impl.h b/chromium/media/blink/webmediasource_impl.h
index 5b88f416e9f..2e6d1fd125e 100644
--- a/chromium/media/blink/webmediasource_impl.h
+++ b/chromium/media/blink/webmediasource_impl.h
@@ -21,7 +21,7 @@ class MEDIA_BLINK_EXPORT WebMediaSourceImpl : public blink::WebMediaSource {
~WebMediaSourceImpl() override;
// blink::WebMediaSource implementation.
- AddStatus AddSourceBuffer(const blink::WebString& type,
+ AddStatus AddSourceBuffer(const blink::WebString& content_type,
const blink::WebString& codecs,
blink::WebSourceBuffer** source_buffer) override;
double Duration() override;
diff --git a/chromium/media/blink/websourcebuffer_impl.cc b/chromium/media/blink/websourcebuffer_impl.cc
index 2c6f12a98dd..ac7ae84132f 100644
--- a/chromium/media/blink/websourcebuffer_impl.cc
+++ b/chromium/media/blink/websourcebuffer_impl.cc
@@ -86,6 +86,10 @@ void WebSourceBufferImpl::SetClient(blink::WebSourceBufferClient* client) {
client_ = client;
}
+bool WebSourceBufferImpl::GetGenerateTimestampsFlag() {
+ return demuxer_->GetGenerateTimestampsFlag(id_);
+}
+
bool WebSourceBufferImpl::SetMode(WebSourceBuffer::AppendMode mode) {
if (demuxer_->IsParsingMediaSegment(id_))
return false;
@@ -159,6 +163,19 @@ void WebSourceBufferImpl::Remove(double start, double end) {
demuxer_->Remove(id_, DoubleToTimeDelta(start), DoubleToTimeDelta(end));
}
+bool WebSourceBufferImpl::CanChangeType(const blink::WebString& content_type,
+ const blink::WebString& codecs) {
+ return demuxer_->CanChangeType(id_, content_type.Utf8(), codecs.Utf8());
+}
+
+void WebSourceBufferImpl::ChangeType(const blink::WebString& content_type,
+ const blink::WebString& codecs) {
+ // Caller must first call ResetParserState() to flush any pending frames.
+ DCHECK(!demuxer_->IsParsingMediaSegment(id_));
+
+ demuxer_->ChangeType(id_, content_type.Utf8(), codecs.Utf8());
+}
+
bool WebSourceBufferImpl::SetTimestampOffset(double offset) {
if (demuxer_->IsParsingMediaSegment(id_))
return false;
diff --git a/chromium/media/blink/websourcebuffer_impl.h b/chromium/media/blink/websourcebuffer_impl.h
index f1da35b7e63..808aa486ceb 100644
--- a/chromium/media/blink/websourcebuffer_impl.h
+++ b/chromium/media/blink/websourcebuffer_impl.h
@@ -27,6 +27,7 @@ class WebSourceBufferImpl : public blink::WebSourceBuffer {
// blink::WebSourceBuffer implementation.
void SetClient(blink::WebSourceBufferClient* client) override;
+ bool GetGenerateTimestampsFlag() override;
bool SetMode(AppendMode mode) override;
blink::WebTimeRanges Buffered() override;
double HighestPresentationTimestamp() override;
@@ -37,6 +38,10 @@ class WebSourceBufferImpl : public blink::WebSourceBuffer {
double* timestamp_offset) override;
void ResetParserState() override;
void Remove(double start, double end) override;
+ bool CanChangeType(const blink::WebString& content_type,
+ const blink::WebString& codecs) override;
+ void ChangeType(const blink::WebString& content_type,
+ const blink::WebString& codecs) override;
bool SetTimestampOffset(double offset) override;
void SetAppendWindowStart(double start) override;
void SetAppendWindowEnd(double end) override;
diff --git a/chromium/media/capabilities/BUILD.gn b/chromium/media/capabilities/BUILD.gn
index a39858f3ab3..9dcfb7dcdf6 100644
--- a/chromium/media/capabilities/BUILD.gn
+++ b/chromium/media/capabilities/BUILD.gn
@@ -20,10 +20,14 @@ source_set("capabilities") {
sources = [
"bucket_utility.cc",
"bucket_utility.h",
+ "in_memory_video_decode_stats_db_impl.cc",
+ "in_memory_video_decode_stats_db_impl.h",
"video_decode_stats_db.cc",
"video_decode_stats_db.h",
"video_decode_stats_db_impl.cc",
"video_decode_stats_db_impl.h",
+ "video_decode_stats_db_provider.cc",
+ "video_decode_stats_db_provider.h",
]
public_deps = [
@@ -40,6 +44,7 @@ source_set("capabilities") {
source_set("unit_tests") {
testonly = true
sources = [
+ "in_memory_video_decode_stats_db_unittest.cc",
"video_decode_stats_db_unittest.cc",
]
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc
new file mode 100644
index 00000000000..e01adfb76f6
--- /dev/null
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc
@@ -0,0 +1,211 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capabilities/in_memory_video_decode_stats_db_impl.h"
+
+#include <memory>
+#include <tuple>
+
+#include "base/files/file_path.h"
+#include "base/format_macros.h"
+#include "base/logging.h"
+#include "base/metrics/histogram_macros.h"
+#include "base/sequence_checker.h"
+#include "base/strings/stringprintf.h"
+#include "base/task_scheduler/post_task.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/capabilities/video_decode_stats_db_provider.h"
+
+namespace media {
+
+InMemoryVideoDecodeStatsDBFactory::InMemoryVideoDecodeStatsDBFactory(
+ VideoDecodeStatsDBProvider* seed_db_provider)
+ : seed_db_provider_(seed_db_provider) {
+ DVLOG(2) << __func__ << " has_seed_provider:" << !!seed_db_provider_;
+}
+
+InMemoryVideoDecodeStatsDBFactory::~InMemoryVideoDecodeStatsDBFactory() =
+ default;
+
+std::unique_ptr<VideoDecodeStatsDB>
+InMemoryVideoDecodeStatsDBFactory::CreateDB() {
+ return std::make_unique<InMemoryVideoDecodeStatsDBImpl>(seed_db_provider_);
+}
+
+InMemoryVideoDecodeStatsDBImpl::InMemoryVideoDecodeStatsDBImpl(
+ VideoDecodeStatsDBProvider* seed_db_provider)
+ : seed_db_provider_(seed_db_provider), weak_ptr_factory_(this) {
+ DVLOG(2) << __func__;
+}
+
+InMemoryVideoDecodeStatsDBImpl::~InMemoryVideoDecodeStatsDBImpl() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+}
+
+void InMemoryVideoDecodeStatsDBImpl::Initialize(InitializeCB init_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(init_cb);
+ DCHECK(!db_init_);
+
+ // Fetch an *initialized* seed DB.
+ if (seed_db_provider_) {
+ seed_db_provider_->GetVideoDecodeStatsDB(
+ base::BindOnce(&InMemoryVideoDecodeStatsDBImpl::OnGotSeedDB,
+ weak_ptr_factory_.GetWeakPtr(), std::move(init_cb)));
+ } else {
+ // No seed DB provider (e.g. guest session) means no work to do.
+ DVLOG(2) << __func__ << " NO seed db";
+ db_init_ = true;
+
+ // Bind to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(init_cb))).Run(true);
+ }
+}
+
+void InMemoryVideoDecodeStatsDBImpl::OnGotSeedDB(InitializeCB init_cb,
+ VideoDecodeStatsDB* db) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOG(2) << __func__ << (db ? " has" : " null") << " seed db";
+
+ db_init_ = true;
+ seed_db_ = db;
+
+ // Hard coding success = true. There are rare cases (e.g. disk corruption)
+ // where an incognito profile may fail to acquire a reference to the base
+ // profile's DB. But this just means incognito is in the same boat as guest
+ // profiles (never have a seed DB) and is not a show stopper.
+ std::move(init_cb).Run(true);
+}
+
+void InMemoryVideoDecodeStatsDBImpl::AppendDecodeStats(
+ const VideoDescKey& key,
+ const DecodeStatsEntry& entry,
+ AppendDecodeStatsCB append_done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(db_init_);
+
+ DVLOG(3) << __func__ << " Reading key " << key.ToLogString()
+ << " from DB with intent to update with " << entry.ToLogString();
+
+ auto it = in_memory_db_.find(key.Serialize());
+ if (it == in_memory_db_.end()) {
+ if (seed_db_) {
+ // |seed_db_| exists and no in-memory entry is found for this key, means
+ // we haven't checked the |seed_db_| yet. Query |seed_db_| and append new
+ // stats to any seed values.
+ seed_db_->GetDecodeStats(
+ key, base::BindOnce(
+ &InMemoryVideoDecodeStatsDBImpl::CompleteAppendWithSeedData,
+ weak_ptr_factory_.GetWeakPtr(), key, entry,
+ std::move(append_done_cb)));
+ return;
+ }
+
+ // Otherwise, these are the first stats for this key. Add a a copy of
+ // |entry| to the database.
+ in_memory_db_.emplace(key.Serialize(), entry);
+ } else {
+ // We've already asked the |seed_db_| for its data. Just add the new stats
+ // to our local copy via the iterators reference.
+ it->second += entry;
+ }
+
+ // Bind to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(append_done_cb))).Run(true);
+}
+
+void InMemoryVideoDecodeStatsDBImpl::GetDecodeStats(
+ const VideoDescKey& key,
+ GetDecodeStatsCB get_stats_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(db_init_);
+
+ DVLOG(3) << __func__ << " " << key.ToLogString();
+
+ auto it = in_memory_db_.find(key.Serialize());
+ if (it == in_memory_db_.end()) {
+ if (seed_db_) {
+ // |seed_db_| exists and no in-memory entry is found for this key, means
+ // we haven't checked the |seed_db_| yet.
+ seed_db_->GetDecodeStats(
+ key, base::BindOnce(&InMemoryVideoDecodeStatsDBImpl::OnGotSeedEntry,
+ weak_ptr_factory_.GetWeakPtr(), key,
+ std::move(get_stats_cb)));
+ } else {
+ // No seed data. Return an empty entry. Bind to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(get_stats_cb)))
+ .Run(true, std::make_unique<DecodeStatsEntry>(0, 0, 0));
+ }
+ } else {
+ // Return whatever what we found. Bind to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(get_stats_cb)))
+ .Run(true, std::make_unique<DecodeStatsEntry>(it->second));
+ }
+}
+
+void InMemoryVideoDecodeStatsDBImpl::CompleteAppendWithSeedData(
+ const VideoDescKey& key,
+ const DecodeStatsEntry& entry,
+ AppendDecodeStatsCB append_done_cb,
+ bool read_success,
+ std::unique_ptr<DecodeStatsEntry> seed_entry) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(db_init_);
+
+ if (!read_success) {
+ // Not a show stopper. Log it and carry on as if the seed DB were empty.
+ DVLOG(2) << __func__ << " FAILED seed DB read for " << key.ToLogString();
+ DCHECK(!seed_entry);
+ }
+
+ if (!seed_entry)
+ seed_entry = std::make_unique<DecodeStatsEntry>(0, 0, 0);
+
+ // Add new stats to the seed entry and store in memory.
+ *seed_entry += entry;
+ in_memory_db_.emplace(key.Serialize(), *seed_entry);
+
+ DVLOG(3) << __func__ << " Updating " << key.ToLogString() << " with "
+ << entry.ToLogString() << " aggregate:" << seed_entry->ToLogString();
+
+ std::move(append_done_cb).Run(true);
+}
+
+void InMemoryVideoDecodeStatsDBImpl::OnGotSeedEntry(
+ const VideoDescKey& key,
+ GetDecodeStatsCB get_stats_cb,
+ bool success,
+ std::unique_ptr<DecodeStatsEntry> seed_entry) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ // Failure is not a show stopper. Just a debug log...
+ DVLOG(3) << __func__ << " read " << (success ? "succeeded" : "FAILED!")
+ << " entry: " << (seed_entry ? seed_entry->ToLogString() : "null");
+
+ if (!seed_entry)
+ seed_entry = std::make_unique<DecodeStatsEntry>(0, 0, 0);
+
+ // Always write to |in_memory_db_| to avoid querying |seed_db_| for this key
+ // going forward.
+ in_memory_db_.emplace(key.Serialize(), *seed_entry);
+
+ std::move(get_stats_cb).Run(true, std::move(seed_entry));
+}
+
+void InMemoryVideoDecodeStatsDBImpl::DestroyStats(
+ base::OnceClosure destroy_done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOG(2) << __func__;
+
+ // Really, this is not reachable code because user's can't clear the history
+ // for a guest/incognito account. But if that ever changes, the reasonable
+ // thing is to wipe only the |in_memory_db_|. |seed_db_| can be cleared by the
+ // profile that owns it.
+ in_memory_db_.clear();
+
+ // Bind to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(destroy_done_cb))).Run();
+}
+
+} // namespace media
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h
new file mode 100644
index 00000000000..bd93873e869
--- /dev/null
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.h
@@ -0,0 +1,126 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPABILITIES_IN_MEMORY_VIDEO_DECODE_STATS_DB_IMPL_H_
+#define MEDIA_CAPABILITIES_IN_MEMORY_VIDEO_DECODE_STATS_DB_IMPL_H_
+
+#include <map>
+#include <memory>
+
+#include "base/files/file_path.h"
+#include "base/memory/weak_ptr.h"
+#include "components/leveldb_proto/proto_database.h"
+#include "media/base/media_export.h"
+#include "media/base/video_codecs.h"
+#include "media/capabilities/video_decode_stats_db.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+class VideoDecodeStatsDBProvider;
+
+// The in-memory database disappears with profile shutdown to preserve the
+// privacy of off-the-record (OTR) browsing profiles (Guest and Incognito). It
+// also allows the MediaCapabilities API to behave the same both on and
+// off-the-record which prevents sites from detecting when users are OTR modes.
+// VideoDecodeStatsDBProvider gives incognito profiles a hook to read the stats
+// of the of the originating profile. Guest profiles are conceptually a blank
+// slate and will not have a "seed" DB.
+class MEDIA_EXPORT InMemoryVideoDecodeStatsDBFactory
+ : public VideoDecodeStatsDBFactory {
+ public:
+ // |seed_db_provider| provides access to a seed (read-only) DB instance.
+ // Callers must ensure the |seed_db_provider| outlives this factory and any
+ // databases it creates via CreateDB(). |seed_db_provider| may be null when no
+ // seed DB is available.
+ explicit InMemoryVideoDecodeStatsDBFactory(
+ VideoDecodeStatsDBProvider* seed_db_provider);
+ ~InMemoryVideoDecodeStatsDBFactory() override;
+
+ // DB is not thread-safe and is bound to the sequence used at construction.
+ std::unique_ptr<VideoDecodeStatsDB> CreateDB() override;
+
+ private:
+ // Provided at construction. Callers must ensure that object outlives this
+ // class.
+ VideoDecodeStatsDBProvider* seed_db_provider_;
+
+ DISALLOW_COPY_AND_ASSIGN(InMemoryVideoDecodeStatsDBFactory);
+};
+
+class MEDIA_EXPORT InMemoryVideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
+ public:
+ // Constructs the database. NOTE: must call Initialize() before using.
+ // |db| injects the level_db database instance for storing capabilities info.
+ // |dir| specifies where to store LevelDB files to disk. LevelDB generates a
+ // handful of files, so its recommended to provide a dedicated directory to
+ // keep them isolated.
+ explicit InMemoryVideoDecodeStatsDBImpl(
+ VideoDecodeStatsDBProvider* seed_db_provider);
+ ~InMemoryVideoDecodeStatsDBImpl() override;
+
+ // Implement VideoDecodeStatsDB.
+ void Initialize(InitializeCB init_cb) override;
+ void AppendDecodeStats(const VideoDescKey& key,
+ const DecodeStatsEntry& entry,
+ AppendDecodeStatsCB append_done_cb) override;
+ void GetDecodeStats(const VideoDescKey& key,
+ GetDecodeStatsCB get_stats_cb) override;
+ void DestroyStats(base::OnceClosure destroy_done_cb) override;
+
+ private:
+ // Called when the |seed_db_provider_| returns an initialized seed DB. Will
+ // run |init_cb|, marking the completion of Initialize().
+ void OnGotSeedDB(base::OnceCallback<void(bool)> init_cb,
+ VideoDecodeStatsDB* seed_db);
+
+ // Passed as the callback for |OnGotDecodeStats| by |AppendDecodeStats| to
+ // update the database once we've read the existing stats entry.
+ void CompleteAppendWithSeedData(const VideoDescKey& key,
+ const DecodeStatsEntry& entry,
+ AppendDecodeStatsCB append_done_cb,
+ bool read_success,
+ std::unique_ptr<DecodeStatsEntry> seed_entry);
+
+ // Called when GetDecodeStats() operation was performed. |get_stats_cb|
+ // will be run with |success| and a |DecodeStatsEntry| created from
+ // |stats_proto| or nullptr if no entry was found for the requested key.
+ void OnGotSeedEntry(const VideoDescKey& key,
+ GetDecodeStatsCB get_stats_cb,
+ bool success,
+ std::unique_ptr<DecodeStatsEntry> seed_entry);
+
+ // Indicates whether initialization is completed.
+ bool db_init_ = false;
+
+ // Lazily provides |seed_db_| from original profile. Owned by original profile
+ // and may be null.
+ VideoDecodeStatsDBProvider* seed_db_provider_ = nullptr;
+
+ // On-disk DB owned by the base profile for the off-the-record session. For
+ // incognito sessions, this will contain the original profile's stats. For
+ // guest sessions, this will be null (no notion of base profile). See
+ // |in_memory_db_|.
+ VideoDecodeStatsDB* seed_db_ = nullptr;
+
+ // In-memory DB, mapping VideoDescKey strings -> DecodeStatsEntries. This is
+ // the primary storage (read and write) for this class. The |seed_db_| is
+ // read-only, and will only be queried when the |in_memory_db_| lacks an
+ // entry for a given key.
+ std::map<std::string, DecodeStatsEntry> in_memory_db_;
+
+ // Ensures all access to class members come on the same sequence. API calls
+ // and callbacks should occur on the same sequence used during construction.
+ // LevelDB operations happen on a separate task runner, but all LevelDB
+ // callbacks to this happen on the checked sequence.
+ SEQUENCE_CHECKER(sequence_checker_);
+
+ base::WeakPtrFactory<InMemoryVideoDecodeStatsDBImpl> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(InMemoryVideoDecodeStatsDBImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPABILITIES_IN_MEMORY_VIDEO_DECODE_STATS_DB_IMPL_H_
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
new file mode 100644
index 00000000000..6f6264bf490
--- /dev/null
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
@@ -0,0 +1,391 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <memory>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "base/test/scoped_task_environment.h"
+#include "media/capabilities/in_memory_video_decode_stats_db_impl.h"
+#include "media/capabilities/video_decode_stats_db_provider.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+using testing::Eq;
+using testing::Pointee;
+using testing::IsNull;
+
+namespace media {
+
+static VideoDecodeStatsDB::VideoDescKey kTestKey() {
+ return VideoDecodeStatsDB::VideoDescKey::MakeBucketedKey(
+ VP9PROFILE_PROFILE3, gfx::Size(1024, 768), 60);
+}
+
+static VideoDecodeStatsDB::DecodeStatsEntry kEmtpyEntry() {
+ return VideoDecodeStatsDB::DecodeStatsEntry(0, 0, 0);
+}
+
+class MockSeedDB : public VideoDecodeStatsDB {
+ public:
+ MockSeedDB() = default;
+ ~MockSeedDB() override = default;
+
+ MOCK_METHOD1(Initialize, void(InitializeCB init_cb));
+ MOCK_METHOD3(AppendDecodeStats,
+ void(const VideoDescKey& key,
+ const DecodeStatsEntry& entry,
+ AppendDecodeStatsCB append_done_cb));
+ MOCK_METHOD2(GetDecodeStats,
+ void(const VideoDescKey& key, GetDecodeStatsCB get_stats_cb));
+ MOCK_METHOD1(DestroyStats, void(base::OnceClosure destroy_done_cb));
+};
+
+class MockDBProvider : public VideoDecodeStatsDBProvider {
+ public:
+ MockDBProvider() = default;
+ ~MockDBProvider() override = default;
+
+ MOCK_METHOD1(GetVideoDecodeStatsDB, void(GetCB get_db_b));
+};
+
+template <bool WithSeedDB>
+class InMemoryDBTestBase : public testing::Test {
+ public:
+ InMemoryDBTestBase()
+ : seed_db_(WithSeedDB ? new MockSeedDB() : nullptr),
+ db_provider_(WithSeedDB ? new MockDBProvider() : nullptr),
+ in_memory_db_(new InMemoryVideoDecodeStatsDBImpl(db_provider_.get())) {
+ // Setup MockDBProvider to provide the seed DB. No need to initialize the
+ // DB here since it too is a Mock.
+ if (db_provider_) {
+ using GetCB = VideoDecodeStatsDBProvider::GetCB;
+ ON_CALL(*db_provider_, GetVideoDecodeStatsDB(_))
+ .WillByDefault([&](GetCB cb) { std::move(cb).Run(seed_db_.get()); });
+ }
+
+ // The InMemoryDB should NEVER modify the seed DB.
+ if (seed_db_) {
+ EXPECT_CALL(*seed_db_, AppendDecodeStats(_, _, _)).Times(0);
+ EXPECT_CALL(*seed_db_, DestroyStats(_)).Times(0);
+ }
+ }
+
+ void InitializeEmptyDB() {
+ if (seed_db_)
+ EXPECT_CALL(*db_provider_, GetVideoDecodeStatsDB(_));
+
+ EXPECT_CALL(*this, InitializeCB(true));
+
+ in_memory_db_->Initialize(base::BindOnce(&InMemoryDBTestBase::InitializeCB,
+ base::Unretained(this)));
+ scoped_task_environment_.RunUntilIdle();
+ }
+
+ MOCK_METHOD1(InitializeCB, void(bool success));
+ MOCK_METHOD1(AppendDecodeStatsCB, void(bool success));
+ MOCK_METHOD2(
+ GetDecodeStatsCB,
+ void(bool success,
+ std::unique_ptr<VideoDecodeStatsDB::DecodeStatsEntry> entry));
+ MOCK_METHOD0(DestroyStatsCB, void());
+
+ protected:
+ using VideoDescKey = media::VideoDecodeStatsDB::VideoDescKey;
+ using DecodeStatsEntry = media::VideoDecodeStatsDB::DecodeStatsEntry;
+
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+ std::unique_ptr<MockSeedDB> seed_db_;
+ std::unique_ptr<MockDBProvider> db_provider_;
+ std::unique_ptr<InMemoryVideoDecodeStatsDBImpl> in_memory_db_;
+};
+
+// Specialization for tests that have/lack a seed DB. Some tests only make sense
+// with seed DB, so we separate them.
+class SeededInMemoryDBTest : public InMemoryDBTestBase<true> {};
+class SeedlessInMemoryDBTest : public InMemoryDBTestBase<false> {};
+
+TEST_F(SeedlessInMemoryDBTest, ReadExpectingEmpty) {
+ InitializeEmptyDB();
+
+ // Database is empty, seed DB is empty => expect empty stats entry.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(kEmtpyEntry()))));
+
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, ReadExpectingEmpty) {
+ InitializeEmptyDB();
+
+ // Make seed DB return null (empty) for this request.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _))
+ .WillOnce([](const auto& key, auto get_cb) {
+ std::move(get_cb).Run(true, nullptr);
+ });
+
+ // Database is empty, seed DB is empty => expect empty stats entry.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(kEmtpyEntry()))));
+
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, ReadExpectingSeedData) {
+ InitializeEmptyDB();
+
+ // Setup seed DB to return an entry for the test key.
+ DecodeStatsEntry seed_entry(1000, 2, 10);
+
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _))
+ .WillOnce([&](const auto& key, auto get_cb) {
+ std::move(get_cb).Run(true,
+ std::make_unique<DecodeStatsEntry>(seed_entry));
+ });
+
+ // Seed DB has a an entry for the test key. Expect it!
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(seed_entry))));
+
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Verify a second GetDecodeStats() call with the same key does not trigger a
+ // second call to the seed DB (we cache it).
+ EXPECT_CALL(*seed_db_, GetDecodeStats(_, _)).Times(0);
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(seed_entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, AppendReadAndDestroy) {
+ const DecodeStatsEntry seed_entry(1000, 2, 10);
+ const DecodeStatsEntry double_seed_entry(2000, 4, 20);
+ const DecodeStatsEntry triple_seed_entry(3000, 6, 30);
+
+ InitializeEmptyDB();
+
+ // Setup seed DB to always return an entry for the test key.
+ ON_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _))
+ .WillByDefault([&](const auto& key, auto get_cb) {
+ std::move(get_cb).Run(true,
+ std::make_unique<DecodeStatsEntry>(seed_entry));
+ });
+
+ // First append should trigger a request for the same key from the seed DB.
+ // Simulate a successful read providing seed_entry for that key.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _));
+
+ // Append the same seed entry, doubling the stats for this key.
+ EXPECT_CALL(*this, AppendDecodeStatsCB(true));
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), seed_entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Seed DB should not be queried again for this key.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _)).Times(0);
+
+ // Now verify that the stats were doubled by the append above.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(double_seed_entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Append the same seed entry again to triple the stats. Additional appends
+ // should not trigger queries the seed DB for this key.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _)).Times(0);
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), seed_entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ // Verify we have 3x the stats.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(triple_seed_entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ // Now destroy the in-memory stats...
+ EXPECT_CALL(*this, DestroyStatsCB());
+ in_memory_db_->DestroyStats(base::BindOnce(
+ &InMemoryDBTestBase::DestroyStatsCB, base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // With in-memory stats now gone, GetDecodeStats(kTestKey()) should again
+ // trigger a call to the seed DB and return the un-doubled seed stats.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _));
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(seed_entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeedlessInMemoryDBTest, AppendReadAndDestroy) {
+ const DecodeStatsEntry entry(50, 1, 5);
+ const DecodeStatsEntry double_entry(100, 2, 10);
+
+ InitializeEmptyDB();
+
+ // Expect successful append to the empty seedless DB.
+ EXPECT_CALL(*this, AppendDecodeStatsCB(true));
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ // Verify stats can be read back.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Append same stats again to test summation.
+ EXPECT_CALL(*this, AppendDecodeStatsCB(true));
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ // Verify doubled stats can be read back.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(double_entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Now destroy the in-memory stats...
+ EXPECT_CALL(*this, DestroyStatsCB());
+ in_memory_db_->DestroyStats(base::BindOnce(
+ &InMemoryDBTestBase::DestroyStatsCB, base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Verify DB now empty for this key.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(kEmtpyEntry()))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, ProvidedNullSeedDB) {
+ // DB provider may provide a null seed DB if it encounters some error.
+ EXPECT_CALL(*db_provider_, GetVideoDecodeStatsDB(_))
+ .WillOnce([](auto get_db_cb) { std::move(get_db_cb).Run(nullptr); });
+
+ // Failing to obtain the seed DB is not a show stopper. The in-memory DB
+ // should simply carry on in a seedless fashion.
+ EXPECT_CALL(*this, InitializeCB(true));
+ in_memory_db_->Initialize(base::BindOnce(&InMemoryDBTestBase::InitializeCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Writes still succeed.
+ EXPECT_CALL(*this, AppendDecodeStatsCB(true));
+ const DecodeStatsEntry entry(50, 1, 5);
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ // Reads should still succeed.
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, SeedReadFailureOnGettingStats) {
+ // Everything seems fine at initialization...
+ InitializeEmptyDB();
+
+ // But seed DB will repeatedly fail to provide stats.
+ ON_CALL(*seed_db_, GetDecodeStats(_, _))
+ .WillByDefault([](const auto& key, auto get_cb) {
+ std::move(get_cb).Run(false, nullptr);
+ });
+
+ // Reading the in-memory will still try to read the seed DB, and the read
+ // callback will simply report that the DB is empty for this key.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _));
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(kEmtpyEntry()))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(SeededInMemoryDBTest, SeedReadFailureOnAppendingingStats) {
+ // Everything seems fine at initialization...
+ InitializeEmptyDB();
+
+ // But seed DB will repeatedly fail to provide stats.
+ ON_CALL(*seed_db_, GetDecodeStats(_, _))
+ .WillByDefault([](const auto& key, auto get_cb) {
+ std::move(get_cb).Run(false, nullptr);
+ });
+
+ // Appending to the in-memory will still try to read the seed DB, and the
+ // append will proceed successfully as if the seed DB were empty.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _));
+ EXPECT_CALL(*this, AppendDecodeStatsCB(true));
+ const DecodeStatsEntry entry(50, 1, 5);
+ in_memory_db_->AppendDecodeStats(
+ kTestKey(), entry,
+ base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+ ::testing::Mock::VerifyAndClear(this);
+
+ // Reading the appended data works without issue and does not trigger new
+ // queries to the seed DB.
+ EXPECT_CALL(*seed_db_, GetDecodeStats(Eq(kTestKey()), _)).Times(0);
+ EXPECT_CALL(*this, GetDecodeStatsCB(true, Pointee(Eq(entry))));
+ in_memory_db_->GetDecodeStats(
+ kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+}
+
+} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db.cc b/chromium/media/capabilities/video_decode_stats_db.cc
index a3e5b85832f..2978a28fb7f 100644
--- a/chromium/media/capabilities/video_decode_stats_db.cc
+++ b/chromium/media/capabilities/video_decode_stats_db.cc
@@ -4,6 +4,8 @@
#include "media/capabilities/video_decode_stats_db.h"
+#include "base/format_macros.h"
+#include "base/strings/stringprintf.h"
#include "media/capabilities/bucket_utility.h"
namespace media {
@@ -25,12 +27,72 @@ VideoDecodeStatsDB::VideoDescKey::VideoDescKey(VideoCodecProfile codec_profile,
int frame_rate)
: codec_profile(codec_profile), size(size), frame_rate(frame_rate) {}
+std::string VideoDecodeStatsDB::VideoDescKey::Serialize() const {
+ return base::StringPrintf("%d|%s|%d", static_cast<int>(codec_profile),
+ size.ToString().c_str(), frame_rate);
+}
+
+std::string VideoDecodeStatsDB::VideoDescKey::ToLogString() const {
+ return "Key {" + Serialize() + "}";
+}
+
VideoDecodeStatsDB::DecodeStatsEntry::DecodeStatsEntry(
uint64_t frames_decoded,
uint64_t frames_dropped,
uint64_t frames_decoded_power_efficient)
: frames_decoded(frames_decoded),
frames_dropped(frames_dropped),
- frames_decoded_power_efficient(frames_decoded_power_efficient) {}
+ frames_decoded_power_efficient(frames_decoded_power_efficient) {
+ DCHECK_GE(frames_decoded, 0u);
+ DCHECK_GE(frames_dropped, 0u);
+ DCHECK_GE(frames_decoded_power_efficient, 0u);
+}
+
+VideoDecodeStatsDB::DecodeStatsEntry::DecodeStatsEntry(
+ const DecodeStatsEntry& entry)
+ : frames_decoded(entry.frames_decoded),
+ frames_dropped(entry.frames_dropped),
+ frames_decoded_power_efficient(entry.frames_decoded_power_efficient) {}
+
+std::string VideoDecodeStatsDB::DecodeStatsEntry::ToLogString() const {
+ return base::StringPrintf(
+ "DecodeStatsEntry {frames decoded:%" PRIu64 ", dropped:%" PRIu64
+ ", power efficient decoded:%" PRIu64 "}",
+ frames_decoded, frames_dropped, frames_decoded_power_efficient);
+}
+
+VideoDecodeStatsDB::DecodeStatsEntry& VideoDecodeStatsDB::DecodeStatsEntry::
+operator+=(const DecodeStatsEntry& right) {
+ DCHECK_GE(right.frames_decoded, 0u);
+ DCHECK_GE(right.frames_dropped, 0u);
+ DCHECK_GE(right.frames_decoded_power_efficient, 0u);
+
+ frames_decoded += right.frames_decoded;
+ frames_dropped += right.frames_dropped;
+ frames_decoded_power_efficient += right.frames_decoded_power_efficient;
+ return *this;
+}
+
+bool operator==(const VideoDecodeStatsDB::VideoDescKey& x,
+ const VideoDecodeStatsDB::VideoDescKey& y) {
+ return x.codec_profile == y.codec_profile && x.size == y.size &&
+ x.frame_rate == y.frame_rate;
+}
+bool operator!=(const VideoDecodeStatsDB::VideoDescKey& x,
+ const VideoDecodeStatsDB::VideoDescKey& y) {
+ return !(x == y);
+}
+
+bool operator==(const VideoDecodeStatsDB::DecodeStatsEntry& x,
+ const VideoDecodeStatsDB::DecodeStatsEntry& y) {
+ return x.frames_decoded == y.frames_decoded &&
+ x.frames_dropped == y.frames_dropped &&
+ x.frames_decoded_power_efficient == y.frames_decoded_power_efficient;
+}
+
+bool operator!=(const VideoDecodeStatsDB::DecodeStatsEntry& x,
+ const VideoDecodeStatsDB::DecodeStatsEntry& y) {
+ return !(x == y);
+}
} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db.h b/chromium/media/capabilities/video_decode_stats_db.h
index 83f3d30dd2c..7656cc1350c 100644
--- a/chromium/media/capabilities/video_decode_stats_db.h
+++ b/chromium/media/capabilities/video_decode_stats_db.h
@@ -6,8 +6,10 @@
#define MEDIA_CAPABILITIES_VIDEO_DECODE_STATS_DB_H_
#include <memory>
+#include <string>
-#include "base/callback.h"
+#include "base/callback_forward.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "media/base/media_export.h"
#include "media/base/video_codecs.h"
@@ -26,11 +28,19 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
const gfx::Size& size,
int frame_rate);
+ // Returns a concise string representation of the key for storing in DB.
+ std::string Serialize() const;
+
+ // For debug logging. NOT interchangeable with Serialize().
+ std::string ToLogString() const;
+
+ // Note: operator == and != are defined outside this class.
const VideoCodecProfile codec_profile;
const gfx::Size size;
const int frame_rate;
private:
+ // All key's should be "bucketed" using MakeBucketedKey(...).
VideoDescKey(VideoCodecProfile codec_profile,
const gfx::Size& size,
int frame_rate);
@@ -42,6 +52,15 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
DecodeStatsEntry(uint64_t frames_decoded,
uint64_t frames_dropped,
uint64_t frames_decoded_power_efficient);
+ DecodeStatsEntry(const DecodeStatsEntry& entry);
+
+ // Add stats from |right| to |this| entry.
+ DecodeStatsEntry& operator+=(const DecodeStatsEntry& right);
+
+ // For debug logging.
+ std::string ToLogString() const;
+
+ // Note: operator == and != are defined outside this class.
uint64_t frames_decoded;
uint64_t frames_dropped;
uint64_t frames_decoded_power_efficient;
@@ -53,7 +72,8 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
// before calling other APIs. Initialization must be RE-RUN after calling
// DestroyStats() and receiving its completion callback. |init_cb| must not be
// a null callback.
- virtual void Initialize(base::OnceCallback<void(bool)> init_cb) = 0;
+ using InitializeCB = base::OnceCallback<void(bool)>;
+ virtual void Initialize(InitializeCB init_cb) = 0;
// Appends `stats` to existing entry associated with `key`. Will create a new
// entry if none exists. The operation is asynchronous. The caller should be
@@ -79,6 +99,15 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
virtual void DestroyStats(base::OnceClosure destroy_done_cb) = 0;
};
+MEDIA_EXPORT bool operator==(const VideoDecodeStatsDB::VideoDescKey& x,
+ const VideoDecodeStatsDB::VideoDescKey& y);
+MEDIA_EXPORT bool operator!=(const VideoDecodeStatsDB::VideoDescKey& x,
+ const VideoDecodeStatsDB::VideoDescKey& y);
+MEDIA_EXPORT bool operator==(const VideoDecodeStatsDB::DecodeStatsEntry& x,
+ const VideoDecodeStatsDB::DecodeStatsEntry& y);
+MEDIA_EXPORT bool operator!=(const VideoDecodeStatsDB::DecodeStatsEntry& x,
+ const VideoDecodeStatsDB::DecodeStatsEntry& y);
+
// Factory interface to create a DB instance.
class MEDIA_EXPORT VideoDecodeStatsDBFactory {
public:
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.cc b/chromium/media/capabilities/video_decode_stats_db_impl.cc
index 0dc22b6f9ca..b184ebc976b 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.cc
@@ -8,10 +8,9 @@
#include <tuple>
#include "base/files/file_path.h"
-#include "base/format_macros.h"
+#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/sequence_checker.h"
-#include "base/strings/stringprintf.h"
#include "base/task_scheduler/post_task.h"
#include "components/leveldb_proto/proto_database_impl.h"
#include "media/capabilities/video_decode_stats.pb.h"
@@ -24,26 +23,6 @@ namespace {
// See comments in components/leveldb_proto/leveldb_database.h
const char kDatabaseClientName[] = "VideoDecodeStatsDB";
-// Serialize the |entry| to a string to use as a key in the database.
-std::string SerializeKey(const VideoDecodeStatsDB::VideoDescKey& key) {
- return base::StringPrintf("%d|%s|%d", static_cast<int>(key.codec_profile),
- key.size.ToString().c_str(), key.frame_rate);
-}
-
-// For debug logging.
-std::string KeyToString(const VideoDecodeStatsDB::VideoDescKey& key) {
- return "Key {" + SerializeKey(key) + "}";
-}
-
-// For debug logging.
-std::string EntryToString(const VideoDecodeStatsDB::DecodeStatsEntry& entry) {
- return base::StringPrintf("DecodeStatsEntry {frames decoded:%" PRIu64
- ", dropped:%" PRIu64
- ", power efficient decoded:%" PRIu64 "}",
- entry.frames_decoded, entry.frames_dropped,
- entry.frames_decoded_power_efficient);
-}
-
}; // namespace
VideoDecodeStatsDBImplFactory::VideoDecodeStatsDBImplFactory(
@@ -57,13 +36,13 @@ VideoDecodeStatsDBImplFactory::~VideoDecodeStatsDBImplFactory() = default;
std::unique_ptr<VideoDecodeStatsDB> VideoDecodeStatsDBImplFactory::CreateDB() {
std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db_;
- auto inner_db =
+ auto proto_db =
std::make_unique<leveldb_proto::ProtoDatabaseImpl<DecodeStatsProto>>(
base::CreateSequencedTaskRunnerWithTraits(
{base::MayBlock(), base::TaskPriority::BACKGROUND,
base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN}));
- return std::make_unique<VideoDecodeStatsDBImpl>(std::move(inner_db), db_dir_);
+ return std::make_unique<VideoDecodeStatsDBImpl>(std::move(proto_db), db_dir_);
}
VideoDecodeStatsDBImpl::VideoDecodeStatsDBImpl(
@@ -78,8 +57,7 @@ VideoDecodeStatsDBImpl::~VideoDecodeStatsDBImpl() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
-void VideoDecodeStatsDBImpl::Initialize(
- base::OnceCallback<void(bool)> init_cb) {
+void VideoDecodeStatsDBImpl::Initialize(InitializeCB init_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(init_cb);
DCHECK(!IsInitialized());
@@ -94,8 +72,7 @@ void VideoDecodeStatsDBImpl::Initialize(
weak_ptr_factory_.GetWeakPtr(), std::move(init_cb)));
}
-void VideoDecodeStatsDBImpl::OnInit(base::OnceCallback<void(bool)> init_cb,
- bool success) {
+void VideoDecodeStatsDBImpl::OnInit(InitializeCB init_cb, bool success) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DVLOG(2) << __func__ << (success ? " succeeded" : " FAILED!");
UMA_HISTOGRAM_BOOLEAN("Media.VideoDecodeStatsDB.OpSuccess.Initialize",
@@ -122,10 +99,10 @@ void VideoDecodeStatsDBImpl::AppendDecodeStats(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(IsInitialized());
- DVLOG(3) << __func__ << " Reading key " << KeyToString(key)
- << " from DB with intent to update with " << EntryToString(entry);
+ DVLOG(3) << __func__ << " Reading key " << key.ToLogString()
+ << " from DB with intent to update with " << entry.ToLogString();
- db_->GetEntry(SerializeKey(key),
+ db_->GetEntry(key.Serialize(),
base::BindOnce(&VideoDecodeStatsDBImpl::WriteUpdatedEntry,
weak_ptr_factory_.GetWeakPtr(), key, entry,
std::move(append_done_cb)));
@@ -136,10 +113,10 @@ void VideoDecodeStatsDBImpl::GetDecodeStats(const VideoDescKey& key,
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(IsInitialized());
- DVLOG(3) << __func__ << " " << KeyToString(key);
+ DVLOG(3) << __func__ << " " << key.ToLogString();
db_->GetEntry(
- SerializeKey(key),
+ key.Serialize(),
base::BindOnce(&VideoDecodeStatsDBImpl::OnGotDecodeStats,
weak_ptr_factory_.GetWeakPtr(), std::move(get_stats_cb)));
}
@@ -158,7 +135,7 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
read_success);
if (!read_success) {
- DVLOG(2) << __func__ << " FAILED DB read for " << KeyToString(key)
+ DVLOG(2) << __func__ << " FAILED DB read for " << key.ToLogString()
<< "; ignoring update!";
std::move(append_done_cb).Run(false);
return;
@@ -184,17 +161,17 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
prev_stats_proto->set_frames_decoded_power_efficient(
sum_frames_decoded_power_efficient);
- DVLOG(3) << __func__ << " Updating " << KeyToString(key) << " with "
- << EntryToString(entry) << " aggregate:"
- << EntryToString(
- DecodeStatsEntry(sum_frames_decoded, sum_frames_dropped,
- sum_frames_decoded_power_efficient));
+ DVLOG(3) << __func__ << " Updating " << key.ToLogString() << " with "
+ << entry.ToLogString() << " aggregate:"
+ << DecodeStatsEntry(sum_frames_decoded, sum_frames_dropped,
+ sum_frames_decoded_power_efficient)
+ .ToLogString();
using ProtoDecodeStatsEntry = leveldb_proto::ProtoDatabase<DecodeStatsProto>;
std::unique_ptr<ProtoDecodeStatsEntry::KeyEntryVector> entries =
std::make_unique<ProtoDecodeStatsEntry::KeyEntryVector>();
- entries->emplace_back(SerializeKey(key), *prev_stats_proto);
+ entries->emplace_back(key.Serialize(), *prev_stats_proto);
db_->UpdateEntries(std::move(entries),
std::make_unique<leveldb_proto::KeyVector>(),
@@ -227,7 +204,7 @@ void VideoDecodeStatsDBImpl::OnGotDecodeStats(
}
DVLOG(3) << __func__ << " read " << (success ? "succeeded" : "FAILED!")
- << " entry: " << (entry ? EntryToString(*entry) : "nullptr");
+ << " entry: " << (entry ? entry->ToLogString() : "nullptr");
std::move(get_stats_cb).Run(success, std::move(entry));
}
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.h b/chromium/media/capabilities/video_decode_stats_db_impl.h
index 43f2e4610db..f83289c1fc8 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.h
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.h
@@ -27,6 +27,9 @@ class DecodeStatsProto;
class MEDIA_EXPORT VideoDecodeStatsDBImplFactory
: public VideoDecodeStatsDBFactory {
public:
+ // |db_dir| specifies where to store LevelDB files to disk. LevelDB generates
+ // a handful of files, so its recommended to provide a dedicated directory to
+ // keep them isolated.
explicit VideoDecodeStatsDBImplFactory(base::FilePath db_dir);
~VideoDecodeStatsDBImplFactory() override;
std::unique_ptr<VideoDecodeStatsDB> CreateDB() override;
@@ -53,7 +56,7 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
~VideoDecodeStatsDBImpl() override;
// Implement VideoDecodeStatsDB.
- void Initialize(base::OnceCallback<void(bool)> init_cb) override;
+ void Initialize(InitializeCB init_cb) override;
void AppendDecodeStats(const VideoDescKey& key,
const DecodeStatsEntry& entry,
AppendDecodeStatsCB append_done_cb) override;
@@ -66,7 +69,7 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// Called when the database has been initialized. Will immediately call
// |init_cb| to forward |success|.
- void OnInit(base::OnceCallback<void(bool)> init_cb, bool success);
+ void OnInit(InitializeCB init_cb, bool success);
// Returns true if the DB is successfully initialized.
bool IsInitialized();
diff --git a/chromium/media/capabilities/video_decode_stats_db_provider.cc b/chromium/media/capabilities/video_decode_stats_db_provider.cc
new file mode 100644
index 00000000000..b58e44c7a26
--- /dev/null
+++ b/chromium/media/capabilities/video_decode_stats_db_provider.cc
@@ -0,0 +1,11 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capabilities/video_decode_stats_db_provider.h"
+
+namespace media {
+
+VideoDecodeStatsDBProvider::~VideoDecodeStatsDBProvider() = default;
+
+} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db_provider.h b/chromium/media/capabilities/video_decode_stats_db_provider.h
new file mode 100644
index 00000000000..6e56d9c6415
--- /dev/null
+++ b/chromium/media/capabilities/video_decode_stats_db_provider.h
@@ -0,0 +1,36 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPABILITIES_VIDEO_DECODE_STATS_DB_PROVIDER_H_
+#define MEDIA_CAPABILITIES_VIDEO_DECODE_STATS_DB_PROVIDER_H_
+
+#include "base/callback_forward.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+class VideoDecodeStatsDB;
+
+// Interface for extracting a pointer to the DB from its owner. DB lifetime is
+// assumed to match that of the provider. Callers must not use DB after provider
+// has been destroyed. This allows sharing a "seed" DB instance between an
+// Incognito profile and the original profile, which re-uses the in-memory
+// cache for that DB and avoids race conditions of instantiating a second DB
+// that reads the same files.
+class MEDIA_EXPORT VideoDecodeStatsDBProvider {
+ public:
+ // Request a pointer to the *initialized* DB owned by this provider. Call
+ // lazily to avoid triggering unnecessary DB initialization. |db| is null in
+ // the event of an error. Callback may be run immediately if |db| is already
+ // initialized by provider.
+ using GetCB = base::OnceCallback<void(VideoDecodeStatsDB* db)>;
+ virtual void GetVideoDecodeStatsDB(GetCB get_db_b) = 0;
+
+ protected:
+ virtual ~VideoDecodeStatsDBProvider();
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPABILITIES_VIDEO_DECODE_STATS_DB_PROVIDER_H_ \ No newline at end of file
diff --git a/chromium/media/capabilities/video_decode_stats_db_unittest.cc b/chromium/media/capabilities/video_decode_stats_db_unittest.cc
index 33444625082..6bb83107aaf 100644
--- a/chromium/media/capabilities/video_decode_stats_db_unittest.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_unittest.cc
@@ -83,13 +83,6 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
DISALLOW_COPY_AND_ASSIGN(VideoDecodeStatsDBImplTest);
};
-MATCHER_P(EntryEq, other_entry, "") {
- return arg.frames_decoded == other_entry.frames_decoded &&
- arg.frames_dropped == other_entry.frames_dropped &&
- arg.frames_decoded_power_efficient ==
- other_entry.frames_decoded_power_efficient;
-}
-
TEST_F(VideoDecodeStatsDBImplTest, ReadExpectingNothing) {
EXPECT_CALL(*this, OnInitialize(true));
fake_db_->InitCallback(true);
@@ -122,7 +115,7 @@ TEST_F(VideoDecodeStatsDBImplTest, WriteReadAndDestroy) {
fake_db_->GetCallback(true);
fake_db_->UpdateCallback(true);
- EXPECT_CALL(*this, MockGetDecodeStatsCb(true, Pointee(EntryEq(entry))));
+ EXPECT_CALL(*this, MockGetDecodeStatsCb(true, Pointee(Eq(entry))));
stats_db_->GetDecodeStats(
key, base::BindOnce(&VideoDecodeStatsDBImplTest::GetDecodeStatsCb,
base::Unretained(this)));
@@ -139,8 +132,7 @@ TEST_F(VideoDecodeStatsDBImplTest, WriteReadAndDestroy) {
// Expect to read what was written (2x the initial entry).
VideoDecodeStatsDB::DecodeStatsEntry aggregate_entry(2000, 4, 20);
- EXPECT_CALL(*this,
- MockGetDecodeStatsCb(true, Pointee(EntryEq(aggregate_entry))));
+ EXPECT_CALL(*this, MockGetDecodeStatsCb(true, Pointee(Eq(aggregate_entry))));
stats_db_->GetDecodeStats(
key, base::BindOnce(&VideoDecodeStatsDBImplTest::GetDecodeStatsCb,
base::Unretained(this)));
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index be5925b27b6..f9e2ccbb4f1 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -53,12 +53,8 @@ source_set("capture_device_specific") {
"content/animated_content_sampler.h",
"content/capture_resolution_chooser.cc",
"content/capture_resolution_chooser.h",
- "content/screen_capture_device_core.cc",
- "content/screen_capture_device_core.h",
"content/smooth_event_sampler.cc",
"content/smooth_event_sampler.h",
- "content/thread_safe_capture_oracle.cc",
- "content/thread_safe_capture_oracle.h",
"content/video_capture_oracle.cc",
"content/video_capture_oracle.h",
"video/blob_utils.cc",
@@ -97,24 +93,8 @@ source_set("capture_device_specific") {
component("capture_lib") {
defines = [ "CAPTURE_IMPLEMENTATION" ]
sources = [
- "video/linux/camera_config_chromeos.cc",
- "video/linux/camera_config_chromeos.h",
- "video/linux/v4l2_capture_delegate.cc",
- "video/linux/v4l2_capture_delegate.h",
- "video/linux/video_capture_device_chromeos.cc",
- "video/linux/video_capture_device_chromeos.h",
- "video/linux/video_capture_device_factory_linux.cc",
- "video/linux/video_capture_device_factory_linux.h",
- "video/linux/video_capture_device_linux.cc",
- "video/linux/video_capture_device_linux.h",
- "video/mac/video_capture_device_avfoundation_mac.h",
- "video/mac/video_capture_device_avfoundation_mac.mm",
- "video/mac/video_capture_device_decklink_mac.h",
- "video/mac/video_capture_device_decklink_mac.mm",
- "video/mac/video_capture_device_factory_mac.h",
- "video/mac/video_capture_device_factory_mac.mm",
- "video/mac/video_capture_device_mac.h",
- "video/mac/video_capture_device_mac.mm",
+ "video/create_video_capture_device_factory.cc",
+ "video/create_video_capture_device_factory.h",
"video/scoped_buffer_pool_reservation.h",
"video/shared_memory_buffer_tracker.cc",
"video/shared_memory_buffer_tracker.h",
@@ -130,33 +110,14 @@ component("capture_lib") {
"video/video_capture_device_client.cc",
"video/video_capture_device_client.h",
"video/video_capture_jpeg_decoder.h",
+ "video/video_capture_jpeg_decoder_impl.cc",
+ "video/video_capture_jpeg_decoder_impl.h",
"video/video_capture_system.h",
"video/video_capture_system_impl.cc",
"video/video_capture_system_impl.h",
"video/video_frame_receiver.h",
"video/video_frame_receiver_on_task_runner.cc",
"video/video_frame_receiver_on_task_runner.h",
- "video/win/capability_list_win.cc",
- "video/win/capability_list_win.h",
- "video/win/filter_base_win.cc",
- "video/win/filter_base_win.h",
- "video/win/metrics.cc",
- "video/win/metrics.h",
- "video/win/pin_base_win.cc",
- "video/win/pin_base_win.h",
- "video/win/sink_filter_observer_win.h",
- "video/win/sink_filter_win.cc",
- "video/win/sink_filter_win.h",
- "video/win/sink_input_pin_win.cc",
- "video/win/sink_input_pin_win.h",
- "video/win/video_capture_device_factory_win.cc",
- "video/win/video_capture_device_factory_win.h",
- "video/win/video_capture_device_mf_win.cc",
- "video/win/video_capture_device_mf_win.h",
- "video/win/video_capture_device_utils_win.cc",
- "video/win/video_capture_device_utils_win.h",
- "video/win/video_capture_device_win.cc",
- "video/win/video_capture_device_win.h",
"video_capturer_source.cc",
"video_capturer_source.h",
]
@@ -173,6 +134,7 @@ component("capture_lib") {
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
"//media/capture/mojom:video_capture",
+ "//media/mojo/clients:jpeg_decode_accelerator",
"//media/mojo/interfaces:interfaces",
"//services/service_manager/public/cpp",
"//third_party/libyuv",
@@ -192,6 +154,16 @@ component("capture_lib") {
}
if (is_mac) {
+ sources += [
+ "video/mac/video_capture_device_avfoundation_mac.h",
+ "video/mac/video_capture_device_avfoundation_mac.mm",
+ "video/mac/video_capture_device_decklink_mac.h",
+ "video/mac/video_capture_device_decklink_mac.mm",
+ "video/mac/video_capture_device_factory_mac.h",
+ "video/mac/video_capture_device_factory_mac.mm",
+ "video/mac/video_capture_device_mac.h",
+ "video/mac/video_capture_device_mac.mm",
+ ]
deps += [ "//third_party/decklink" ]
libs = [
"AVFoundation.framework",
@@ -204,6 +176,29 @@ component("capture_lib") {
}
if (is_win) {
+ sources += [
+ "video/win/capability_list_win.cc",
+ "video/win/capability_list_win.h",
+ "video/win/filter_base_win.cc",
+ "video/win/filter_base_win.h",
+ "video/win/metrics.cc",
+ "video/win/metrics.h",
+ "video/win/pin_base_win.cc",
+ "video/win/pin_base_win.h",
+ "video/win/sink_filter_observer_win.h",
+ "video/win/sink_filter_win.cc",
+ "video/win/sink_filter_win.h",
+ "video/win/sink_input_pin_win.cc",
+ "video/win/sink_input_pin_win.h",
+ "video/win/video_capture_device_factory_win.cc",
+ "video/win/video_capture_device_factory_win.h",
+ "video/win/video_capture_device_mf_win.cc",
+ "video/win/video_capture_device_mf_win.h",
+ "video/win/video_capture_device_utils_win.cc",
+ "video/win/video_capture_device_utils_win.h",
+ "video/win/video_capture_device_win.cc",
+ "video/win/video_capture_device_win.h",
+ ]
deps += [ "//media/base/win" ]
libs = [
"mf.lib",
@@ -221,6 +216,25 @@ component("capture_lib") {
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
}
+ # This includes the case of ChromeOS
+ if (is_linux) {
+ sources += [
+ "video/linux/camera_config_chromeos.cc",
+ "video/linux/camera_config_chromeos.h",
+ "video/linux/v4l2_capture_delegate.cc",
+ "video/linux/v4l2_capture_delegate.h",
+ "video/linux/v4l2_capture_device.h",
+ "video/linux/v4l2_capture_device_impl.cc",
+ "video/linux/v4l2_capture_device_impl.h",
+ "video/linux/video_capture_device_chromeos.cc",
+ "video/linux/video_capture_device_chromeos.h",
+ "video/linux/video_capture_device_factory_linux.cc",
+ "video/linux/video_capture_device_factory_linux.h",
+ "video/linux/video_capture_device_linux.cc",
+ "video/linux/video_capture_device_linux.h",
+ ]
+ }
+
if (is_chromeos) {
sources += [
"video/chromeos/camera_3a_controller.cc",
@@ -248,10 +262,11 @@ component("capture_lib") {
"video/chromeos/video_capture_device_factory_chromeos.cc",
"video/chromeos/video_capture_device_factory_chromeos.h",
]
+ public_deps += [ "//media/capture/video/chromeos/public" ]
deps += [
"//chromeos:chromeos",
"//media/capture/video/chromeos/mojo:cros_camera",
- "//mojo/edk",
+ "//third_party/libdrm",
"//third_party/libsync",
]
}
@@ -286,7 +301,10 @@ test("capture_unittests") {
"video/fake_video_capture_device_unittest.cc",
"video/file_video_capture_device_unittest.cc",
"video/linux/camera_config_chromeos_unittest.cc",
+ "video/linux/fake_v4l2_impl.cc",
+ "video/linux/fake_v4l2_impl.h",
"video/linux/v4l2_capture_delegate_unittest.cc",
+ "video/linux/video_capture_device_factory_linux_unittest.cc",
"video/mac/video_capture_device_factory_mac_unittest.mm",
"video/mock_gpu_memory_buffer_manager.cc",
"video/mock_gpu_memory_buffer_manager.h",
@@ -308,7 +326,7 @@ test("capture_unittests") {
"//media:test_support",
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
- "//mojo/edk",
+ "//mojo/core/embedder",
"//testing/gmock",
"//testing/gtest",
"//ui/gfx:test_support",
@@ -358,7 +376,7 @@ test("capture_unittests") {
deps += [
"//chromeos:chromeos",
"//media/capture/video/chromeos/mojo:cros_camera",
- "//mojo/edk",
+ "//mojo/core/embedder",
"//third_party/libdrm",
"//third_party/libsync",
"//third_party/minigbm",
diff --git a/chromium/media/capture/DEPS b/chromium/media/capture/DEPS
index d415fec5261..809f1c1109f 100644
--- a/chromium/media/capture/DEPS
+++ b/chromium/media/capture/DEPS
@@ -1,3 +1,3 @@
specific_include_rules = {
- "run_all_unittests.cc": [ "+mojo/edk/embedder" ],
+ "run_all_unittests.cc": [ "+mojo/core/embedder" ],
}
diff --git a/chromium/media/capture/content/android/BUILD.gn b/chromium/media/capture/content/android/BUILD.gn
index 92ff80e2ea3..521a0f0a51f 100644
--- a/chromium/media/capture/content/android/BUILD.gn
+++ b/chromium/media/capture/content/android/BUILD.gn
@@ -14,14 +14,18 @@ source_set("android") {
sources = [
"screen_capture_machine_android.cc",
"screen_capture_machine_android.h",
+ "thread_safe_capture_oracle.cc",
+ "thread_safe_capture_oracle.h",
]
configs += [ "//media:media_config" ]
deps = [
":screen_capture_jni_headers",
+ "//media",
"//media/capture:capture_device_specific",
"//media/capture/mojom:image_capture",
"//third_party/libyuv",
"//ui/gfx:color_space",
+ "//ui/gfx/geometry",
]
}
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.cc b/chromium/media/capture/content/android/screen_capture_machine_android.cc
index 734048e68ac..be83b1379e5 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.cc
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.cc
@@ -4,10 +4,15 @@
#include "media/capture/content/android/screen_capture_machine_android.h"
+#include <utility>
+
#include "base/android/jni_android.h"
#include "base/android/scoped_java_ref.h"
#include "jni/ScreenCapture_jni.h"
+#include "media/base/video_frame.h"
+#include "media/capture/content/android/thread_safe_capture_oracle.h"
#include "media/capture/content/video_capture_oracle.h"
+#include "media/capture/video_capture_types.h"
#include "third_party/libyuv/include/libyuv.h"
using base::android::AttachCurrentThread;
@@ -94,7 +99,7 @@ void ScreenCaptureMachineAndroid::OnRGBAFrameAvailable(
frame->visible_rect().height(), libyuv::kFilterBilinear);
}
- capture_frame_cb.Run(frame, start_time, true);
+ std::move(capture_frame_cb).Run(frame, start_time, true);
lastFrame_ = frame;
}
@@ -177,7 +182,7 @@ void ScreenCaptureMachineAndroid::OnI420FrameAvailable(
frame->visible_rect().height(), libyuv::kFilterBilinear);
}
- capture_frame_cb.Run(frame, start_time, true);
+ std::move(capture_frame_cb).Run(frame, start_time, true);
lastFrame_ = frame;
}
@@ -225,20 +230,18 @@ void ScreenCaptureMachineAndroid::OnOrientationChange(
}
}
-void ScreenCaptureMachineAndroid::Start(
- const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy,
- const VideoCaptureParams& params,
- const base::Callback<void(bool)> callback) {
+bool ScreenCaptureMachineAndroid::Start(
+ scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy,
+ const VideoCaptureParams& params) {
DCHECK(oracle_proxy.get());
- oracle_proxy_ = oracle_proxy;
+ oracle_proxy_ = std::move(oracle_proxy);
j_capture_.Reset(
createScreenCaptureMachineAndroid(reinterpret_cast<intptr_t>(this)));
if (j_capture_.obj() == nullptr) {
DLOG(ERROR) << "Failed to createScreenCaptureAndroid";
- callback.Run(false);
- return;
+ return false;
}
DCHECK(params.requested_format.frame_size.GetArea());
@@ -251,24 +254,19 @@ void ScreenCaptureMachineAndroid::Start(
params.requested_format.frame_size.height());
if (!ret) {
DLOG(ERROR) << "Failed to init ScreenCaptureAndroid";
- callback.Run(ret);
- return;
+ return false;
}
ret = Java_ScreenCapture_startPrompt(AttachCurrentThread(), j_capture_);
- // Must wait for user input to start capturing before we can report back
- // device started state. However, if the user-prompt failed to show, report
- // a failed start immediately.
- if (!ret)
- callback.Run(ret);
+ // NOTE: Result of user prompt will be delivered to OnActivityResult(), and
+ // this will report the device started/error state via the |oracle_proxy_|.
+ return !!ret;
}
-void ScreenCaptureMachineAndroid::Stop(const base::Closure& callback) {
+void ScreenCaptureMachineAndroid::Stop() {
if (j_capture_.obj() != nullptr) {
Java_ScreenCapture_stopCapture(AttachCurrentThread(), j_capture_);
}
-
- callback.Run();
}
// ScreenCapture on Android works in a passive way and there are no captured
@@ -306,7 +304,7 @@ void ScreenCaptureMachineAndroid::MaybeCaptureForRefresh() {
frame->stride(VideoFrame::kVPlane), frame->visible_rect().width(),
frame->visible_rect().height(), libyuv::kFilterBilinear);
- capture_frame_cb.Run(frame, start_time, true);
+ std::move(capture_frame_cb).Run(frame, start_time, true);
}
} // namespace media
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.h b/chromium/media/capture/content/android/screen_capture_machine_android.h
index 775e6d6c00c..4b118341f62 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.h
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.h
@@ -9,16 +9,20 @@
#include <memory>
#include "base/android/scoped_java_ref.h"
+#include "base/memory/scoped_refptr.h"
#include "media/capture/capture_export.h"
-#include "media/capture/content/screen_capture_device_core.h"
namespace media {
+class ThreadSafeCaptureOracle;
+struct VideoCaptureParams;
+class VideoFrame;
+
// ScreenCaptureMachineAndroid captures 32bit RGB or YUV420 triplanar.
-class CAPTURE_EXPORT ScreenCaptureMachineAndroid : public VideoCaptureMachine {
+class CAPTURE_EXPORT ScreenCaptureMachineAndroid {
public:
ScreenCaptureMachineAndroid();
- ~ScreenCaptureMachineAndroid() override;
+ virtual ~ScreenCaptureMachineAndroid();
static base::android::ScopedJavaLocalRef<jobject>
createScreenCaptureMachineAndroid(jlong nativeScreenCaptureMachineAndroid);
@@ -58,12 +62,14 @@ class CAPTURE_EXPORT ScreenCaptureMachineAndroid : public VideoCaptureMachine {
const base::android::JavaRef<jobject>& obj,
jint rotation);
- // VideoCaptureMachine overrides.
- void Start(const scoped_refptr<media::ThreadSafeCaptureOracle>& oracle_proxy,
- const media::VideoCaptureParams& params,
- const base::Callback<void(bool)> callback) override;
- void Stop(const base::Closure& callback) override;
- void MaybeCaptureForRefresh() override;
+ // Starts/Stops capturing.
+ bool Start(scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy,
+ const VideoCaptureParams& params);
+ void Stop();
+
+ // If there is a cached frame, and the oracle allows sending another frame
+ // right now, the cached captured frame is redelivered.
+ void MaybeCaptureForRefresh();
private:
// Indicates the orientation of the device.
diff --git a/chromium/media/capture/content/thread_safe_capture_oracle.cc b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
index 4d07bb7cdf6..ef8732f591b 100644
--- a/chromium/media/capture/content/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/capture/content/thread_safe_capture_oracle.h"
+#include "media/capture/content/android/thread_safe_capture_oracle.h"
#include <stdint.h>
@@ -13,7 +13,6 @@
#include "base/bits.h"
#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
-#include "base/synchronization/lock.h"
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "media/base/video_frame.h"
@@ -44,11 +43,8 @@ struct ThreadSafeCaptureOracle::InFlightFrameCapture {
ThreadSafeCaptureOracle::ThreadSafeCaptureOracle(
std::unique_ptr<VideoCaptureDevice::Client> client,
- const VideoCaptureParams& params,
- bool enable_auto_throttling)
- : client_(std::move(client)),
- oracle_(enable_auto_throttling),
- params_(params) {
+ const VideoCaptureParams& params)
+ : client_(std::move(client)), oracle_(false), params_(params) {
DCHECK_GE(params.requested_format.frame_rate, 1e-6f);
oracle_.SetMinCapturePeriod(base::TimeDelta::FromMicroseconds(
static_cast<int64_t>(1000000.0 / params.requested_format.frame_rate +
@@ -168,8 +164,8 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
return false;
}
- *callback = base::Bind(&ThreadSafeCaptureOracle::DidCaptureFrame, this,
- base::Passed(&capture));
+ *callback = base::BindOnce(&ThreadSafeCaptureOracle::DidCaptureFrame, this,
+ base::Passed(&capture));
return true;
}
@@ -217,9 +213,6 @@ void ThreadSafeCaptureOracle::DidCaptureFrame(
const bool should_deliver_frame =
oracle_.CompleteCapture(capture->frame_number, success, &reference_time);
- // The following is used by
- // chrome/browser/extension/api/cast_streaming/performance_test.cc, in
- // addition to the usual runtime tracing.
TRACE_EVENT_ASYNC_END2("gpu.capture", "Capture", capture->buffer.id,
"success", should_deliver_frame, "timestamp",
(reference_time - base::TimeTicks()).InMicroseconds());
diff --git a/chromium/media/capture/content/thread_safe_capture_oracle.h b/chromium/media/capture/content/android/thread_safe_capture_oracle.h
index cab73733cae..65e62f66044 100644
--- a/chromium/media/capture/content/thread_safe_capture_oracle.h
+++ b/chromium/media/capture/content/android/thread_safe_capture_oracle.h
@@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CAPTURE_CONTENT_THREAD_SAFE_CAPTURE_ORACLE_H_
-#define MEDIA_CAPTURE_CONTENT_THREAD_SAFE_CAPTURE_ORACLE_H_
+#ifndef MEDIA_CAPTURE_CONTENT_ANDROID_THREAD_SAFE_CAPTURE_ORACLE_H_
+#define MEDIA_CAPTURE_CONTENT_ANDROID_THREAD_SAFE_CAPTURE_ORACLE_H_
#include <memory>
#include <string>
#include "base/memory/ref_counted.h"
-#include "media/base/video_frame.h"
+#include "base/synchronization/lock.h"
#include "media/capture/capture_export.h"
#include "media/capture/content/video_capture_oracle.h"
#include "media/capture/video/video_capture_buffer_handle.h"
@@ -23,6 +23,7 @@ namespace media {
struct VideoCaptureParams;
class VideoFrame;
+class VideoFrameMetadata;
// Thread-safe, refcounted proxy to the VideoCaptureOracle. This proxy wraps
// the VideoCaptureOracle, which decides which frames to capture, and a
@@ -32,16 +33,15 @@ class CAPTURE_EXPORT ThreadSafeCaptureOracle
: public base::RefCountedThreadSafe<ThreadSafeCaptureOracle> {
public:
ThreadSafeCaptureOracle(std::unique_ptr<VideoCaptureDevice::Client> client,
- const VideoCaptureParams& params,
- bool enable_auto_throttling);
+ const VideoCaptureParams& params);
// Called when a captured frame is available or an error has occurred.
// If |success| is true then |frame| is valid and |timestamp| indicates when
// the frame was painted.
// If |success| is false, all other parameters are invalid.
- typedef base::Callback<void(scoped_refptr<VideoFrame> frame,
- base::TimeTicks timestamp,
- bool success)>
+ typedef base::OnceCallback<void(scoped_refptr<VideoFrame> frame,
+ base::TimeTicks timestamp,
+ bool success)>
CaptureFrameCallback;
// Record a change |event| along with its |damage_rect| and |event_time|, and
@@ -60,18 +60,6 @@ class CAPTURE_EXPORT ThreadSafeCaptureOracle
scoped_refptr<VideoFrame>* storage,
CaptureFrameCallback* callback);
- base::TimeDelta min_capture_period() const {
- return oracle_.min_capture_period();
- }
-
- base::TimeTicks last_time_animation_was_detected() const {
- return oracle_.last_time_animation_was_detected();
- }
-
- gfx::Size max_frame_size() const {
- return params_.requested_format.frame_size;
- }
-
// Returns the current capture resolution.
gfx::Size GetCaptureSize() const;
@@ -125,4 +113,4 @@ class CAPTURE_EXPORT ThreadSafeCaptureOracle
} // namespace media
-#endif // MEDIA_CAPTURE_CONTENT_THREAD_SAFE_CAPTURE_ORACLE_H_
+#endif // MEDIA_CAPTURE_CONTENT_ANDROID_THREAD_SAFE_CAPTURE_ORACLE_H_
diff --git a/chromium/media/capture/content/screen_capture_device_core.cc b/chromium/media/capture/content/screen_capture_device_core.cc
deleted file mode 100644
index 5ae39399c9e..00000000000
--- a/chromium/media/capture/content/screen_capture_device_core.cc
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/content/screen_capture_device_core.h"
-
-#include <memory>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/strings/string_number_conversions.h"
-#include "base/strings/stringprintf.h"
-#include "base/threading/thread_checker.h"
-
-namespace media {
-
-namespace {
-
-void DeleteCaptureMachine(
- std::unique_ptr<VideoCaptureMachine> capture_machine) {
- capture_machine.reset();
-}
-
-} // namespace
-
-VideoCaptureMachine::VideoCaptureMachine() = default;
-
-VideoCaptureMachine::~VideoCaptureMachine() = default;
-
-bool VideoCaptureMachine::IsAutoThrottlingEnabled() const {
- return false;
-}
-
-void ScreenCaptureDeviceCore::AllocateAndStart(
- const VideoCaptureParams& params,
- std::unique_ptr<VideoCaptureDevice::Client> client) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ != kIdle) {
- DVLOG(1) << "Allocate() invoked when not in state Idle.";
- return;
- }
-
- if (params.requested_format.pixel_format != PIXEL_FORMAT_I420) {
- client->OnError(
- FROM_HERE,
- base::StringPrintf(
- "unsupported format: %s",
- VideoCaptureFormat::ToString(params.requested_format).c_str()));
- return;
- }
-
- oracle_proxy_ = new ThreadSafeCaptureOracle(
- std::move(client), params, capture_machine_->IsAutoThrottlingEnabled());
-
- capture_machine_->Start(
- oracle_proxy_, params,
- base::Bind(&ScreenCaptureDeviceCore::CaptureStarted, AsWeakPtr()));
-
- TransitionStateTo(kCapturing);
-}
-
-void ScreenCaptureDeviceCore::RequestRefreshFrame() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ != kCapturing)
- return;
-
- capture_machine_->MaybeCaptureForRefresh();
-}
-
-void ScreenCaptureDeviceCore::Suspend() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ != kCapturing)
- return;
-
- TransitionStateTo(kSuspended);
-
- capture_machine_->Suspend();
-}
-
-void ScreenCaptureDeviceCore::Resume() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ != kSuspended)
- return;
-
- TransitionStateTo(kCapturing);
-
- capture_machine_->Resume();
-}
-
-void ScreenCaptureDeviceCore::StopAndDeAllocate() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ != kCapturing && state_ != kSuspended)
- return;
-
- oracle_proxy_->Stop();
- oracle_proxy_ = NULL;
-
- TransitionStateTo(kIdle);
-
- capture_machine_->Stop(base::DoNothing());
-}
-
-void ScreenCaptureDeviceCore::OnConsumerReportingUtilization(
- int frame_feedback_id,
- double utilization) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(oracle_proxy_);
- oracle_proxy_->OnConsumerReportingUtilization(frame_feedback_id, utilization);
-}
-
-void ScreenCaptureDeviceCore::CaptureStarted(bool success) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!success)
- Error(FROM_HERE, "Failed to start capture machine.");
- else if (oracle_proxy_)
- oracle_proxy_->ReportStarted();
-}
-
-ScreenCaptureDeviceCore::ScreenCaptureDeviceCore(
- std::unique_ptr<VideoCaptureMachine> capture_machine)
- : state_(kIdle), capture_machine_(std::move(capture_machine)) {
- DCHECK(capture_machine_.get());
-}
-
-ScreenCaptureDeviceCore::~ScreenCaptureDeviceCore() {
- DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(state_ != kCapturing && state_ != kSuspended);
- if (capture_machine_) {
- capture_machine_->Stop(
- base::Bind(&DeleteCaptureMachine, base::Passed(&capture_machine_)));
- }
- DVLOG(1) << "ScreenCaptureDeviceCore@" << this << " destroying.";
-}
-
-void ScreenCaptureDeviceCore::TransitionStateTo(State next_state) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
-#ifndef NDEBUG
- static const char* kStateNames[] = {"Idle", "Capturing", "Suspended",
- "Error"};
- static_assert(arraysize(kStateNames) == kLastCaptureState,
- "Different number of states and textual descriptions");
- DVLOG(1) << "State change: " << kStateNames[state_] << " --> "
- << kStateNames[next_state];
-#endif
-
- state_ = next_state;
-}
-
-void ScreenCaptureDeviceCore::Error(const base::Location& from_here,
- const std::string& reason) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (state_ == kIdle)
- return;
-
- if (oracle_proxy_)
- oracle_proxy_->ReportError(from_here, reason);
-
- StopAndDeAllocate();
- TransitionStateTo(kError);
-}
-
-} // namespace media
diff --git a/chromium/media/capture/content/screen_capture_device_core.h b/chromium/media/capture/content/screen_capture_device_core.h
deleted file mode 100644
index 5f5bdcbc32f..00000000000
--- a/chromium/media/capture/content/screen_capture_device_core.h
+++ /dev/null
@@ -1,122 +0,0 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_CONTENT_SCREEN_CAPTURE_DEVICE_CORE_H_
-#define MEDIA_CAPTURE_CONTENT_SCREEN_CAPTURE_DEVICE_CORE_H_
-
-#include <memory>
-#include <string>
-
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/threading/thread_checker.h"
-#include "media/capture/capture_export.h"
-#include "media/capture/content/thread_safe_capture_oracle.h"
-#include "media/capture/video/video_capture_device.h"
-
-namespace base {
-class Location;
-} // namespace base
-
-namespace media {
-
-struct VideoCaptureParams;
-
-class ThreadSafeCaptureOracle;
-
-// Keeps track of the video capture source frames and executes copying.
-class CAPTURE_EXPORT VideoCaptureMachine {
- public:
- VideoCaptureMachine();
- virtual ~VideoCaptureMachine();
-
- // Starts capturing.
- // |callback| is invoked with true if succeeded. Otherwise, with false.
- virtual void Start(const scoped_refptr<ThreadSafeCaptureOracle>& oracle_proxy,
- const VideoCaptureParams& params,
- const base::Callback<void(bool)> callback) = 0;
-
- // Suspend/Resume frame delivery. Implementations of these are optional.
- virtual void Suspend() {}
- virtual void Resume() {}
-
- // Stops capturing.
- // |callback| is invoked after the capturing has stopped.
- virtual void Stop(const base::Closure& callback) = 0;
-
- // Returns true if the video capture is configured to monitor end-to-end
- // system utilization, and alter frame sizes and/or frame rates to mitigate
- // overloading or under-utilization.
- virtual bool IsAutoThrottlingEnabled() const;
-
- // The implementation of this method should consult the oracle, using the
- // kRefreshRequest event type, to decide whether to initiate a new frame
- // capture, and then do so if the oracle agrees.
- virtual void MaybeCaptureForRefresh() = 0;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(VideoCaptureMachine);
-};
-
-// The "meat" of a content video capturer.
-//
-// Separating this from the "shell classes" WebContentsVideoCaptureDevice and
-// DesktopCaptureDeviceAura allows safe destruction without needing to block any
-// threads, as well as code sharing.
-//
-// ScreenCaptureDeviceCore manages a simple state machine and the pipeline
-// (see notes at top of this file). It times the start of successive captures
-// and facilitates the processing of each through the stages of the
-// pipeline.
-class CAPTURE_EXPORT ScreenCaptureDeviceCore
- : public base::SupportsWeakPtr<ScreenCaptureDeviceCore> {
- public:
- ScreenCaptureDeviceCore(std::unique_ptr<VideoCaptureMachine> capture_machine);
- virtual ~ScreenCaptureDeviceCore();
-
- // Asynchronous requests to change ScreenCaptureDeviceCore state.
- void AllocateAndStart(const VideoCaptureParams& params,
- std::unique_ptr<VideoCaptureDevice::Client> client);
- void RequestRefreshFrame();
- void Suspend();
- void Resume();
- void StopAndDeAllocate();
- void OnConsumerReportingUtilization(int frame_feedback_id,
- double utilization);
-
- private:
- // Flag indicating current state.
- enum State { kIdle, kCapturing, kSuspended, kError, kLastCaptureState };
-
- void TransitionStateTo(State next_state);
-
- // Called back in response to StartCaptureMachine(). |success| is true if
- // capture machine succeeded to start.
- void CaptureStarted(bool success);
-
- // Stops capturing and notifies client_ of an error state.
- void Error(const base::Location& from_here, const std::string& reason);
-
- // Tracks that all activity occurs on the media stream manager's thread.
- base::ThreadChecker thread_checker_;
-
- // Current lifecycle state.
- State state_;
-
- // Tracks the CaptureMachine that's doing work on our behalf
- // on the device thread or UI thread.
- // This value should never be dereferenced by this class.
- std::unique_ptr<VideoCaptureMachine> capture_machine_;
-
- // Our thread-safe capture oracle which serves as the gateway to the video
- // capture pipeline. Besides the VideoCaptureDevice itself, it is the only
- // component of the system with direct access to |client_|.
- scoped_refptr<ThreadSafeCaptureOracle> oracle_proxy_;
-
- DISALLOW_COPY_AND_ASSIGN(ScreenCaptureDeviceCore);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_CONTENT_SCREEN_CAPTURE_DEVICE_CORE_H_
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index 414b5ca327f..b59c87abfcb 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -50,6 +50,12 @@ enum PowerLineFrequency {
HZ_60
};
+enum VideoFacingMode {
+ NONE,
+ USER,
+ ENVIRONMENT
+};
+
enum VideoCaptureApi {
LINUX_V4L2_SINGLE_PLANE,
WIN_MEDIA_FOUNDATION,
@@ -61,6 +67,7 @@ enum VideoCaptureApi {
ANDROID_API2_LEGACY,
ANDROID_API2_FULL,
ANDROID_API2_LIMITED,
+ VIRTUAL_DEVICE,
UNKNOWN
};
@@ -70,6 +77,18 @@ enum VideoCaptureTransportType {
OTHER_TRANSPORT
};
+enum VideoCaptureBufferType {
+ kSharedMemory,
+
+ // Warning: This case is a workaround for compatibility with an older version
+ // of Mojo only and will be deleted as soon as the Mojo version of ChromiumOS
+ // becomes compatible with the |kSharedMemory|.
+ // TODO(chfremer): Remove this when https://crbug.com/857537 is resolved.
+ kSharedMemoryViaRawFileDescriptor,
+
+ kMailboxHolder
+};
+
struct VideoCaptureFormat {
gfx.mojom.Size frame_size;
float frame_rate;
@@ -78,6 +97,7 @@ struct VideoCaptureFormat {
struct VideoCaptureParams {
VideoCaptureFormat requested_format;
+ VideoCaptureBufferType buffer_type;
ResolutionChangePolicy resolution_change_policy;
PowerLineFrequency power_line_frequency;
};
@@ -101,6 +121,7 @@ struct VideoCaptureDeviceDescriptor {
string display_name;
string device_id;
string model_id;
+ VideoFacingMode facing_mode;
VideoCaptureApi capture_api;
VideoCaptureTransportType transport_type;
VideoCaptureDeviceDescriptorCameraCalibration? camera_calibration;
@@ -116,8 +137,14 @@ struct MailboxBufferHandleSet {
array<gpu.mojom.MailboxHolder, 4> mailbox_holder;
};
+struct SharedMemoryViaRawFileDescriptor {
+ handle file_descriptor_handle;
+ uint32 shared_memory_size_in_bytes;
+};
+
union VideoBufferHandle {
handle<shared_buffer> shared_buffer_handle;
+ SharedMemoryViaRawFileDescriptor shared_memory_via_raw_file_descriptor;
MailboxBufferHandleSet mailbox_handles;
};
diff --git a/chromium/media/capture/mojom/video_capture_types.typemap b/chromium/media/capture/mojom/video_capture_types.typemap
index 27b85993df2..cec8543cd92 100644
--- a/chromium/media/capture/mojom/video_capture_types.typemap
+++ b/chromium/media/capture/mojom/video_capture_types.typemap
@@ -33,6 +33,7 @@ type_mappings = [
"media.mojom.ResolutionChangePolicy=media::ResolutionChangePolicy",
"media.mojom.PowerLineFrequency=media::PowerLineFrequency",
"media.mojom.VideoCapturePixelFormat=media::VideoPixelFormat",
+ "media.mojom.VideoCaptureBufferType=media::VideoCaptureBufferType",
"media.mojom.VideoCaptureFormat=media::VideoCaptureFormat",
"media.mojom.VideoCaptureParams=media::VideoCaptureParams",
"media.mojom.VideoCaptureDeviceDescriptorCameraCalibration=media::VideoCaptureDeviceDescriptor::CameraCalibration",
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index 396b0139456..f02c0ac60c3 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -234,6 +234,84 @@ bool EnumTraits<media::mojom::VideoCapturePixelFormat,
}
// static
+media::mojom::VideoCaptureBufferType
+EnumTraits<media::mojom::VideoCaptureBufferType,
+ media::VideoCaptureBufferType>::ToMojom(media::VideoCaptureBufferType
+ input) {
+ switch (input) {
+ case media::VideoCaptureBufferType::kSharedMemory:
+ return media::mojom::VideoCaptureBufferType::kSharedMemory;
+ case media::VideoCaptureBufferType::kSharedMemoryViaRawFileDescriptor:
+ return media::mojom::VideoCaptureBufferType::
+ kSharedMemoryViaRawFileDescriptor;
+ case media::VideoCaptureBufferType::kMailboxHolder:
+ return media::mojom::VideoCaptureBufferType::kMailboxHolder;
+ }
+ NOTREACHED();
+ return media::mojom::VideoCaptureBufferType::kSharedMemory;
+}
+
+// static
+bool EnumTraits<media::mojom::VideoCaptureBufferType,
+ media::VideoCaptureBufferType>::
+ FromMojom(media::mojom::VideoCaptureBufferType input,
+ media::VideoCaptureBufferType* output) {
+ switch (input) {
+ case media::mojom::VideoCaptureBufferType::kSharedMemory:
+ *output = media::VideoCaptureBufferType::kSharedMemory;
+ return true;
+ case media::mojom::VideoCaptureBufferType::
+ kSharedMemoryViaRawFileDescriptor:
+ *output =
+ media::VideoCaptureBufferType::kSharedMemoryViaRawFileDescriptor;
+ return true;
+ case media::mojom::VideoCaptureBufferType::kMailboxHolder:
+ *output = media::VideoCaptureBufferType::kMailboxHolder;
+ return true;
+ }
+ NOTREACHED();
+ return false;
+}
+
+// static
+media::mojom::VideoFacingMode
+EnumTraits<media::mojom::VideoFacingMode, media::VideoFacingMode>::ToMojom(
+ media::VideoFacingMode input) {
+ switch (input) {
+ case media::VideoFacingMode::MEDIA_VIDEO_FACING_NONE:
+ return media::mojom::VideoFacingMode::NONE;
+ case media::VideoFacingMode::MEDIA_VIDEO_FACING_USER:
+ return media::mojom::VideoFacingMode::USER;
+ case media::VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT:
+ return media::mojom::VideoFacingMode::ENVIRONMENT;
+ case media::VideoFacingMode::NUM_MEDIA_VIDEO_FACING_MODES:
+ NOTREACHED();
+ return media::mojom::VideoFacingMode::NONE;
+ }
+ NOTREACHED();
+ return media::mojom::VideoFacingMode::NONE;
+}
+
+// static
+bool EnumTraits<media::mojom::VideoFacingMode, media::VideoFacingMode>::
+ FromMojom(media::mojom::VideoFacingMode input,
+ media::VideoFacingMode* output) {
+ switch (input) {
+ case media::mojom::VideoFacingMode::NONE:
+ *output = media::VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
+ return true;
+ case media::mojom::VideoFacingMode::USER:
+ *output = media::VideoFacingMode::MEDIA_VIDEO_FACING_USER;
+ return true;
+ case media::mojom::VideoFacingMode::ENVIRONMENT:
+ *output = media::VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT;
+ return true;
+ }
+ NOTREACHED();
+ return false;
+}
+
+// static
media::mojom::VideoCaptureApi
EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::ToMojom(
media::VideoCaptureApi input) {
@@ -258,6 +336,8 @@ EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::ToMojom(
return media::mojom::VideoCaptureApi::ANDROID_API2_FULL;
case media::VideoCaptureApi::ANDROID_API2_LIMITED:
return media::mojom::VideoCaptureApi::ANDROID_API2_LIMITED;
+ case media::VideoCaptureApi::VIRTUAL_DEVICE:
+ return media::mojom::VideoCaptureApi::VIRTUAL_DEVICE;
case media::VideoCaptureApi::UNKNOWN:
return media::mojom::VideoCaptureApi::UNKNOWN;
}
@@ -300,6 +380,9 @@ bool EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::
case media::mojom::VideoCaptureApi::ANDROID_API2_LIMITED:
*output = media::VideoCaptureApi::ANDROID_API2_LIMITED;
return true;
+ case media::mojom::VideoCaptureApi::VIRTUAL_DEVICE:
+ *output = media::VideoCaptureApi::VIRTUAL_DEVICE;
+ return true;
case media::mojom::VideoCaptureApi::UNKNOWN:
*output = media::VideoCaptureApi::UNKNOWN;
return true;
@@ -360,6 +443,8 @@ bool StructTraits<media::mojom::VideoCaptureParamsDataView,
media::VideoCaptureParams* out) {
if (!data.ReadRequestedFormat(&out->requested_format))
return false;
+ if (!data.ReadBufferType(&out->buffer_type))
+ return false;
if (!data.ReadResolutionChangePolicy(&out->resolution_change_policy))
return false;
if (!data.ReadPowerLineFrequency(&out->power_line_frequency))
@@ -394,6 +479,8 @@ bool StructTraits<media::mojom::VideoCaptureDeviceDescriptorDataView,
return false;
if (!data.ReadModelId(&(output->model_id)))
return false;
+ if (!data.ReadFacingMode(&(output->facing)))
+ return false;
if (!data.ReadCaptureApi(&(output->capture_api)))
return false;
if (!data.ReadTransportType(&(output->transport_type)))
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
index 4c9283c48da..1b7649a2c4a 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
@@ -5,6 +5,7 @@
#ifndef MEDIA_CAPTURE_MOJOM_VIDEO_CAPTURE_TYPES_MOJOM_TRAITS_H_
#define MEDIA_CAPTURE_MOJOM_VIDEO_CAPTURE_TYPES_MOJOM_TRAITS_H_
+#include "media/base/video_facing.h"
#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_device_descriptor.h"
#include "media/capture/video/video_capture_device_info.h"
@@ -41,6 +42,23 @@ struct EnumTraits<media::mojom::VideoCapturePixelFormat,
};
template <>
+struct EnumTraits<media::mojom::VideoCaptureBufferType,
+ media::VideoCaptureBufferType> {
+ static media::mojom::VideoCaptureBufferType ToMojom(
+ media::VideoCaptureBufferType buffer_type);
+
+ static bool FromMojom(media::mojom::VideoCaptureBufferType input,
+ media::VideoCaptureBufferType* out);
+};
+
+template <>
+struct EnumTraits<media::mojom::VideoFacingMode, media::VideoFacingMode> {
+ static media::mojom::VideoFacingMode ToMojom(media::VideoFacingMode input);
+ static bool FromMojom(media::mojom::VideoFacingMode input,
+ media::VideoFacingMode* output);
+};
+
+template <>
struct EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi> {
static media::mojom::VideoCaptureApi ToMojom(media::VideoCaptureApi input);
static bool FromMojom(media::mojom::VideoCaptureApi input,
@@ -84,6 +102,11 @@ struct StructTraits<media::mojom::VideoCaptureParamsDataView,
return params.requested_format;
}
+ static media::VideoCaptureBufferType buffer_type(
+ const media::VideoCaptureParams& params) {
+ return params.buffer_type;
+ }
+
static media::ResolutionChangePolicy resolution_change_policy(
const media::VideoCaptureParams& params) {
return params.resolution_change_policy;
@@ -145,6 +168,11 @@ struct StructTraits<media::mojom::VideoCaptureDeviceDescriptorDataView,
return input.model_id;
}
+ static media::VideoFacingMode facing_mode(
+ const media::VideoCaptureDeviceDescriptor& input) {
+ return input.facing;
+ }
+
static media::VideoCaptureApi capture_api(
const media::VideoCaptureDeviceDescriptor& input) {
return input.capture_api;
diff --git a/chromium/media/capture/run_all_unittests.cc b/chromium/media/capture/run_all_unittests.cc
index d196bb67efc..9c4b2770c53 100644
--- a/chromium/media/capture/run_all_unittests.cc
+++ b/chromium/media/capture/run_all_unittests.cc
@@ -9,8 +9,8 @@
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/test_suite.h"
#include "base/threading/thread.h"
-#include "mojo/edk/embedder/embedder.h"
-#include "mojo/edk/embedder/scoped_ipc_support.h"
+#include "mojo/core/embedder/embedder.h"
+#include "mojo/core/embedder/scoped_ipc_support.h"
#include "testing/gtest/include/gtest/gtest.h"
class MojoEnabledTestEnvironment final : public testing::Environment {
@@ -20,12 +20,12 @@ class MojoEnabledTestEnvironment final : public testing::Environment {
~MojoEnabledTestEnvironment() final = default;
void SetUp() final {
- mojo::edk::Init();
+ mojo::core::Init();
mojo_ipc_thread_.StartWithOptions(
base::Thread::Options(base::MessageLoop::TYPE_IO, 0));
- mojo_ipc_support_.reset(new mojo::edk::ScopedIPCSupport(
+ mojo_ipc_support_.reset(new mojo::core::ScopedIPCSupport(
mojo_ipc_thread_.task_runner(),
- mojo::edk::ScopedIPCSupport::ShutdownPolicy::FAST));
+ mojo::core::ScopedIPCSupport::ShutdownPolicy::FAST));
VLOG(1) << "Mojo initialized";
}
@@ -36,7 +36,7 @@ class MojoEnabledTestEnvironment final : public testing::Environment {
private:
base::Thread mojo_ipc_thread_;
- std::unique_ptr<mojo::edk::ScopedIPCSupport> mojo_ipc_support_;
+ std::unique_ptr<mojo::core::ScopedIPCSupport> mojo_ipc_support_;
};
int main(int argc, char* argv[]) {
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index e1a0cba1908..ae4e875731b 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -154,14 +154,4 @@ bool VideoCaptureDeviceFactoryAndroid::IsLegacyOrDeprecatedDevice(
AttachCurrentThread(), id));
}
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- return new VideoCaptureDeviceFactoryAndroid();
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/DEPS b/chromium/media/capture/video/chromeos/DEPS
index fe2df0dba85..f1958045109 100644
--- a/chromium/media/capture/video/chromeos/DEPS
+++ b/chromium/media/capture/video/chromeos/DEPS
@@ -1,5 +1,4 @@
include_rules = [
"+chromeos/dbus",
- "+mojo/edk/embedder",
"+third_party/libsync",
]
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index 8505fcf230b..ad245ae399b 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -9,6 +9,7 @@
#include <utility>
#include <vector>
+#include "base/posix/safe_strerror.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/blob_utils.h"
@@ -18,8 +19,6 @@
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
-#include "mojo/edk/embedder/embedder.h"
-#include "mojo/edk/embedder/scoped_platform_handle.h"
namespace media {
@@ -288,7 +287,7 @@ void CameraDeviceDelegate::OnClosed(int32_t result) {
device_context_->SetState(CameraDeviceContext::State::kStopped);
if (result) {
device_context_->LogToClient(std::string("Failed to close device: ") +
- std::string(strerror(result)));
+ base::safe_strerror(-result));
}
ResetMojoInterface();
device_context_ = nullptr;
@@ -397,8 +396,8 @@ void CameraDeviceDelegate::OnInitialized(int32_t result) {
}
if (result) {
device_context_->SetErrorState(
- FROM_HERE, std::string("Failed to initialize camera device") +
- std::string(strerror(result)));
+ FROM_HERE, std::string("Failed to initialize camera device: ") +
+ base::safe_strerror(-result));
return;
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
@@ -471,7 +470,7 @@ void CameraDeviceDelegate::OnConfiguredStreams(
if (result) {
device_context_->SetErrorState(
FROM_HERE, std::string("Failed to configure streams: ") +
- std::string(strerror(result)));
+ base::safe_strerror(-result));
return;
}
if (!updated_config ||
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index aa0f3c91fcc..aad0c9bac8b 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -128,7 +128,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
hal_delegate_thread_("HalDelegateThread") {}
void SetUp() override {
- VideoCaptureDeviceFactoryChromeOS::SetBufferManagerForTesting(
+ VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
&mock_gpu_memory_buffer_manager_);
hal_delegate_thread_.Start();
camera_hal_delegate_ =
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index 3a33d9b8f89..a47b75b4332 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -12,6 +12,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/posix/safe_strerror.h"
#include "base/strings/string_piece.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
@@ -85,6 +86,7 @@ std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
if (!UpdateBuiltInCameraInfo()) {
return capture_device;
}
+ base::AutoLock lock(camera_info_lock_);
if (camera_info_.find(device_descriptor.device_id) == camera_info_.end()) {
LOG(ERROR) << "Invalid camera device: " << device_descriptor.device_id;
return capture_device;
@@ -103,6 +105,7 @@ void CameraHalDelegate::GetSupportedFormats(
return;
}
std::string camera_id = device_descriptor.device_id;
+ base::AutoLock lock(camera_info_lock_);
if (camera_info_.find(camera_id) == camera_info_.end() ||
camera_info_[camera_id].is_null()) {
LOG(ERROR) << "Invalid camera_id: " << camera_id;
@@ -168,13 +171,14 @@ void CameraHalDelegate::GetDeviceDescriptors(
if (!UpdateBuiltInCameraInfo()) {
return;
}
- for (size_t id = 0; id < num_builtin_cameras_; ++id) {
- VideoCaptureDeviceDescriptor desc;
- std::string camera_id = std::to_string(id);
- const cros::mojom::CameraInfoPtr& camera_info = camera_info_[camera_id];
+ base::AutoLock lock(camera_info_lock_);
+ for (const auto& it : camera_info_) {
+ const std::string& camera_id = it.first;
+ const cros::mojom::CameraInfoPtr& camera_info = it.second;
if (!camera_info) {
continue;
}
+ VideoCaptureDeviceDescriptor desc;
desc.device_id = camera_id;
desc.capture_api = VideoCaptureApi::ANDROID_API2_LIMITED;
desc.transport_type = VideoCaptureTransportType::OTHER_TRANSPORT;
@@ -196,6 +200,7 @@ void CameraHalDelegate::GetDeviceDescriptors(
}
device_descriptors->push_back(desc);
}
+ // TODO(shik): Report external camera first when lid is closed.
// TODO(jcliang): Remove this after JS API supports query camera facing
// (http://crbug.com/543997).
std::sort(device_descriptors->begin(), device_descriptors->end());
@@ -248,6 +253,9 @@ void CameraHalDelegate::ResetMojoInterfaceOnIpcThread() {
}
builtin_camera_info_updated_.Reset();
camera_module_has_been_set_.Reset();
+
+ // Clear all cached camera info, especially external cameras.
+ camera_info_.clear();
}
bool CameraHalDelegate::UpdateBuiltInCameraInfo() {
@@ -279,7 +287,7 @@ void CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread() {
void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- if (num_cameras <= 0) {
+ if (num_cameras < 0) {
builtin_camera_info_updated_.Signal();
LOG(ERROR) << "Failed to get number of cameras: " << num_cameras;
return;
@@ -303,9 +311,16 @@ void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
if (result) {
num_builtin_cameras_ = 0;
builtin_camera_info_updated_.Signal();
- LOG(ERROR) << "Failed to set camera module callbacks: " << strerror(result);
+ LOG(ERROR) << "Failed to set camera module callbacks: "
+ << base::safe_strerror(-result);
+ return;
+ }
+
+ if (num_builtin_cameras_ == 0) {
+ builtin_camera_info_updated_.Signal();
return;
}
+
for (size_t camera_id = 0; camera_id < num_builtin_cameras_; ++camera_id) {
GetCameraInfoOnIpcThread(
camera_id,
@@ -332,9 +347,25 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
}
// In case of error |camera_info| is empty.
SortCameraMetadata(&camera_info->static_camera_characteristics);
+
+ base::AutoLock lock(camera_info_lock_);
camera_info_[std::to_string(camera_id)] = std::move(camera_info);
- if (camera_info_.size() == num_builtin_cameras_) {
- builtin_camera_info_updated_.Signal();
+
+ if (camera_id < base::checked_cast<int32_t>(num_builtin_cameras_)) {
+ // |camera_info_| might contain some entries for external cameras as well,
+ // we should check all built-in cameras explicitly.
+ bool all_updated = [&]() {
+ for (size_t i = 0; i < num_builtin_cameras_; i++) {
+ if (camera_info_.find(std::to_string(i)) == camera_info_.end()) {
+ return false;
+ }
+ }
+ return true;
+ }();
+
+ if (all_updated) {
+ builtin_camera_info_updated_.Signal();
+ }
}
}
@@ -352,8 +383,30 @@ void CameraHalDelegate::CameraDeviceStatusChange(
int32_t camera_id,
cros::mojom::CameraDeviceStatus new_status) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- // TODO(jcliang): Handle status change for external cameras.
- NOTIMPLEMENTED() << "CameraDeviceStatusChange is not implemented";
+ VLOG(1) << "camera_id = " << camera_id << ", new_status = " << new_status;
+ base::AutoLock lock(camera_info_lock_);
+ auto it = camera_info_.find(std::to_string(camera_id));
+ switch (new_status) {
+ case cros::mojom::CameraDeviceStatus::CAMERA_DEVICE_STATUS_PRESENT:
+ if (it == camera_info_.end()) {
+ GetCameraInfoOnIpcThread(
+ camera_id,
+ base::BindOnce(&CameraHalDelegate::OnGotCameraInfoOnIpcThread, this,
+ camera_id));
+ } else {
+ LOG(WARNING) << "Ignore duplicated camera_id = " << camera_id;
+ }
+ break;
+ case cros::mojom::CameraDeviceStatus::CAMERA_DEVICE_STATUS_NOT_PRESENT:
+ if (it != camera_info_.end()) {
+ camera_info_.erase(it);
+ } else {
+ LOG(WARNING) << "Ignore nonexistent camera_id = " << camera_id;
+ }
+ break;
+ default:
+ NOTREACHED() << "Unexpected new status " << new_status;
+ }
}
void CameraHalDelegate::TorchModeStatusChange(
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index b8c0745e063..19050a1d91b 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/sequence_checker.h"
#include "base/single_thread_task_runner.h"
+#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
@@ -129,9 +130,11 @@ class CAPTURE_EXPORT CameraHalDelegate final
// reported by the camera HAL, and |camera_info_| stores the camera info of
// each camera device. They are modified only on |ipc_task_runner_|. They
// are also read in GetSupportedFormats and GetDeviceDescriptors, in which the
- // access is sequenced through UpdateBuiltInCameraInfo and
- // |builtin_camera_info_updated_| to avoid race conditions.
+ // access is protected by |camera_info_lock_| and sequenced through
+ // UpdateBuiltInCameraInfo and |builtin_camera_info_updated_| to avoid race
+ // conditions.
size_t num_builtin_cameras_;
+ base::Lock camera_info_lock_;
std::unordered_map<std::string, cros::mojom::CameraInfoPtr> camera_info_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index 33349860f1c..4f1f101aa1a 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -31,7 +31,7 @@ class CameraHalDelegateTest : public ::testing::Test {
hal_delegate_thread_("HalDelegateThread") {}
void SetUp() override {
- VideoCaptureDeviceFactoryChromeOS::SetBufferManagerForTesting(
+ VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
&mock_gpu_memory_buffer_manager_);
hal_delegate_thread_.Start();
camera_hal_delegate_ =
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index 50eb9cb16a8..ade9c3e55ce 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -14,15 +14,14 @@
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/posix/eintr_wrapper.h"
+#include "base/rand_util.h"
#include "base/single_thread_task_runner.h"
+#include "base/strings/string_number_conversions.h"
#include "base/synchronization/waitable_event.h"
-#include "mojo/edk/embedder/embedder.h"
-#include "mojo/edk/embedder/named_platform_handle.h"
-#include "mojo/edk/embedder/named_platform_handle_utils.h"
-#include "mojo/edk/embedder/outgoing_broker_client_invitation.h"
-#include "mojo/edk/embedder/platform_channel_pair.h"
-#include "mojo/edk/embedder/platform_channel_utils_posix.h"
-#include "mojo/edk/embedder/scoped_platform_handle.h"
+#include "mojo/public/cpp/platform/named_platform_channel.h"
+#include "mojo/public/cpp/platform/platform_channel.h"
+#include "mojo/public/cpp/platform/socket_utils_posix.h"
+#include "mojo/public/cpp/system/invitation.h"
namespace media {
@@ -32,6 +31,12 @@ const base::FilePath::CharType kArcCamera3SocketPath[] =
"/var/run/camera/camera3.sock";
const char kArcCameraGroup[] = "arc-camera";
+std::string GenerateRandomToken() {
+ char random_bytes[16];
+ base::RandBytes(random_bytes, 16);
+ return base::HexEncode(random_bytes, 16);
+}
+
// Creates a pipe. Returns true on success, otherwise false.
// On success, |read_fd| will be set to the fd of the read side, and
// |write_fd| will be set to the one of write side.
@@ -211,10 +216,10 @@ void CameraHalDispatcherImpl::CreateSocket(base::WaitableEvent* started) {
DCHECK(blocking_io_task_runner_->BelongsToCurrentThread());
base::FilePath socket_path(kArcCamera3SocketPath);
- mojo::edk::ScopedInternalPlatformHandle socket_fd =
- mojo::edk::CreateServerHandle(
- mojo::edk::NamedPlatformHandle(socket_path.value()));
- if (!socket_fd.is_valid()) {
+ mojo::NamedPlatformChannel::Options options;
+ options.server_name = socket_path.value();
+ mojo::NamedPlatformChannel channel(options);
+ if (!channel.server_endpoint().is_valid()) {
LOG(ERROR) << "Failed to create the socket file: " << kArcCamera3SocketPath;
started->Signal();
return;
@@ -253,13 +258,13 @@ void CameraHalDispatcherImpl::CreateSocket(base::WaitableEvent* started) {
blocking_io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&CameraHalDispatcherImpl::StartServiceLoop,
- base::Unretained(this), base::Passed(&socket_fd),
+ base::Unretained(this),
+ channel.TakeServerEndpoint().TakePlatformHandle().TakeFD(),
base::Unretained(started)));
}
-void CameraHalDispatcherImpl::StartServiceLoop(
- mojo::edk::ScopedInternalPlatformHandle socket_fd,
- base::WaitableEvent* started) {
+void CameraHalDispatcherImpl::StartServiceLoop(base::ScopedFD socket_fd,
+ base::WaitableEvent* started) {
DCHECK(blocking_io_task_runner_->BelongsToCurrentThread());
DCHECK(!proxy_fd_.is_valid());
DCHECK(!cancel_pipe_.is_valid());
@@ -277,34 +282,35 @@ void CameraHalDispatcherImpl::StartServiceLoop(
VLOG(1) << "CameraHalDispatcherImpl started; waiting for incoming connection";
while (true) {
- if (!WaitForSocketReadable(proxy_fd_.get().handle, cancel_fd.get())) {
+ if (!WaitForSocketReadable(proxy_fd_.get(), cancel_fd.get())) {
VLOG(1) << "Quit CameraHalDispatcherImpl IO thread";
return;
}
- mojo::edk::ScopedInternalPlatformHandle accepted_fd;
- if (mojo::edk::ServerAcceptConnection(proxy_fd_, &accepted_fd, false) &&
+ base::ScopedFD accepted_fd;
+ if (mojo::AcceptSocketConnection(proxy_fd_.get(), &accepted_fd, false) &&
accepted_fd.is_valid()) {
VLOG(1) << "Accepted a connection";
// Hardcode pid 0 since it is unused in mojo.
const base::ProcessHandle kUnusedChildProcessHandle = 0;
- mojo::edk::PlatformChannelPair channel_pair;
- mojo::edk::OutgoingBrokerClientInvitation invitation;
+ mojo::PlatformChannel channel;
+ mojo::OutgoingInvitation invitation;
- std::string token = mojo::edk::GenerateRandomToken();
+ // Generate an arbitrary 32-byte string, as we use this length as a
+ // protocol version identifier.
+ std::string token = GenerateRandomToken();
mojo::ScopedMessagePipeHandle pipe = invitation.AttachMessagePipe(token);
+ mojo::OutgoingInvitation::Send(std::move(invitation),
+ kUnusedChildProcessHandle,
+ channel.TakeLocalEndpoint());
- invitation.Send(
- kUnusedChildProcessHandle,
- mojo::edk::ConnectionParams(mojo::edk::TransportProtocol::kLegacy,
- channel_pair.PassServerHandle()));
-
- std::vector<mojo::edk::ScopedInternalPlatformHandle> handles;
- handles.emplace_back(channel_pair.PassClientHandle());
+ auto remote_endpoint = channel.TakeRemoteEndpoint();
+ std::vector<base::ScopedFD> fds;
+ fds.emplace_back(remote_endpoint.TakePlatformHandle().TakeFD());
struct iovec iov = {const_cast<char*>(token.c_str()), token.length()};
- ssize_t result = mojo::edk::PlatformChannelSendmsgWithHandles(
- accepted_fd, &iov, 1, handles);
+ ssize_t result =
+ mojo::SendmsgWithHandles(accepted_fd.get(), &iov, 1, fds);
if (result == -1) {
PLOG(ERROR) << "sendmsg()";
} else {
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
index 7470f777b4b..1eecadcc781 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
@@ -8,14 +8,15 @@
#include <memory>
#include <set>
+#include "base/files/scoped_file.h"
#include "base/memory/singleton.h"
#include "base/threading/thread.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/mojo/cros_camera_service.mojom.h"
#include "media/capture/video/video_capture_device_factory.h"
-#include "mojo/edk/embedder/scoped_platform_handle.h"
#include "mojo/public/cpp/bindings/binding_set.h"
#include "mojo/public/cpp/bindings/interface_ptr_set.h"
+#include "mojo/public/cpp/platform/platform_channel_server_endpoint.h"
namespace base {
@@ -78,8 +79,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
// Waits for incoming connections (from HAL process or from client processes).
// Runs on |blocking_io_thread_|.
- void StartServiceLoop(mojo::edk::ScopedInternalPlatformHandle socket_fd,
- base::WaitableEvent* started);
+ void StartServiceLoop(base::ScopedFD socket_fd, base::WaitableEvent* started);
void AddClientObserverOnProxyThread(
std::unique_ptr<CameraClientObserver> observer);
@@ -95,7 +95,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
void StopOnProxyThread();
- mojo::edk::ScopedInternalPlatformHandle proxy_fd_;
+ base::ScopedFD proxy_fd_;
base::ScopedFD cancel_pipe_;
base::Thread proxy_thread_;
diff --git a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
index 020ca137016..eab11a8e646 100644
--- a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
@@ -150,7 +150,9 @@ class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
gfx::GpuMemoryBufferId GetId() const override { return handle_.id; }
- gfx::GpuMemoryBufferHandle GetHandle() const override { return handle_; }
+ gfx::GpuMemoryBufferHandle GetHandle() const override {
+ return gfx::CloneHandleForIPC(handle_);
+ }
ClientBuffer AsClientBuffer() override {
return reinterpret_cast<ClientBuffer>(this);
diff --git a/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom b/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom
index d4543e7bb73..4fcfad8dcd7 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom
+++ b/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom
@@ -1,5 +1,5 @@
/*
- * Copyright 2017 The Android Open Source Project
+ * Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -55,7 +55,9 @@ enum CameraMetadataSection {
ANDROID_SYNC = 0x17,
ANDROID_REPROCESS = 0x18,
ANDROID_DEPTH = 0x19,
- ANDROID_SECTION_COUNT = 0x1A,
+ ANDROID_LOGICAL_MULTI_CAMERA = 0x1A,
+ ANDROID_DISTORTION_CORRECTION = 0x1B,
+ ANDROID_SECTION_COUNT = 0x1C,
VENDOR_SECTION = 0x8000
};
@@ -92,6 +94,10 @@ enum CameraMetadataSectionStart {
ANDROID_SYNC_START = 0x170000,
ANDROID_REPROCESS_START = 0x180000,
ANDROID_DEPTH_START = 0x190000,
+ ANDROID_LOGICAL_MULTI_CAMERA_START
+ = 0x1A0000,
+ ANDROID_DISTORTION_CORRECTION_START
+ = 0x1B0000,
// Mojom maps enum to int32_t in C++. This causes problem on the VENDOR_SECTION_START
// below as 0x80000000 would generate -Wc++11-narrowing warnings while compiling the
@@ -155,6 +161,7 @@ enum CameraMetadataTag {
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
ANDROID_CONTROL_ENABLE_ZSL,
+ ANDROID_CONTROL_AF_SCENE_CHANGE,
ANDROID_CONTROL_END,
ANDROID_DEMOSAIC_MODE = 0x20000, // ANDROID_DEMOSAIC_START,
ANDROID_DEMOSAIC_END,
@@ -198,6 +205,8 @@ enum CameraMetadataTag {
ANDROID_LENS_STATE,
ANDROID_LENS_INTRINSIC_CALIBRATION,
ANDROID_LENS_RADIAL_DISTORTION,
+ ANDROID_LENS_POSE_REFERENCE,
+ ANDROID_LENS_DISTORTION,
ANDROID_LENS_END,
ANDROID_LENS_INFO_AVAILABLE_APERTURES = 0x90000, // ANDROID_LENS_INFO_START,
ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
@@ -234,6 +243,8 @@ enum CameraMetadataTag {
ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
+ ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
ANDROID_REQUEST_END,
ANDROID_SCALER_CROP_REGION = 0xD0000, // ANDROID_SCALER_START,
ANDROID_SCALER_AVAILABLE_FORMATS,
@@ -315,6 +326,10 @@ enum CameraMetadataTag {
ANDROID_STATISTICS_SCENE_FLICKER,
ANDROID_STATISTICS_HOT_PIXEL_MAP,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ ANDROID_STATISTICS_OIS_DATA_MODE,
+ ANDROID_STATISTICS_OIS_TIMESTAMPS,
+ ANDROID_STATISTICS_OIS_X_SHIFTS,
+ ANDROID_STATISTICS_OIS_Y_SHIFTS,
ANDROID_STATISTICS_END,
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
0x120000, // ANDROID_STATISTICS_INFO_START,
@@ -325,6 +340,7 @@ enum CameraMetadataTag {
ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+ ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
ANDROID_STATISTICS_INFO_END,
ANDROID_TONEMAP_CURVE_BLUE = 0x130000, // ANDROID_TONEMAP_START,
ANDROID_TONEMAP_CURVE_GREEN,
@@ -339,6 +355,7 @@ enum CameraMetadataTag {
ANDROID_LED_AVAILABLE_LEDS,
ANDROID_LED_END,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL = 0x150000, // ANDROID_INFO_START,
+ ANDROID_INFO_VERSION,
ANDROID_INFO_END,
ANDROID_BLACK_LEVEL_LOCK = 0x160000, // ANDROID_BLACK_LEVEL_START,
ANDROID_BLACK_LEVEL_END,
@@ -354,6 +371,12 @@ enum CameraMetadataTag {
ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
ANDROID_DEPTH_END,
+ ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = 0x1A0000, // ANDROID_LOGICAL_MULTI_CAMERA_START,
+ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE,
+ ANDROID_LOGICAL_MULTI_CAMERA_END,
+ ANDROID_DISTORTION_CORRECTION_MODE = 0x1B0000, // ANDROID_DISTORTION_CORRECTION_START,
+ ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES,
+ ANDROID_DISTORTION_CORRECTION_END,
};
/**
@@ -395,6 +418,7 @@ enum AndroidControlAeMode {
ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,
ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,
ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
+ ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH,
};
// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER
@@ -449,6 +473,7 @@ enum AndroidControlCaptureIntent {
ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT,
ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG,
ANDROID_CONTROL_CAPTURE_INTENT_MANUAL,
+ ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING,
};
// ANDROID_CONTROL_EFFECT_MODE
@@ -551,6 +576,12 @@ enum AndroidControlEnableZsl {
ANDROID_CONTROL_ENABLE_ZSL_TRUE,
};
+// ANDROID_CONTROL_AF_SCENE_CHANGE
+enum AndroidControlAfSceneChange {
+ ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED,
+ ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED,
+};
+
// ANDROID_DEMOSAIC_MODE
enum AndroidDemosaicMode {
ANDROID_DEMOSAIC_MODE_FAST,
@@ -613,6 +644,12 @@ enum AndroidLensState {
ANDROID_LENS_STATE_MOVING,
};
+// ANDROID_LENS_POSE_REFERENCE
+enum AndroidLensPoseReference {
+ ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA,
+ ANDROID_LENS_POSE_REFERENCE_GYROSCOPE,
+};
+
// ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
enum AndroidLensInfoFocusDistanceCalibration {
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
@@ -659,6 +696,9 @@ enum AndroidRequestAvailableCapabilities {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME,
};
// ANDROID_SCALER_AVAILABLE_FORMATS
@@ -783,6 +823,12 @@ enum AndroidStatisticsLensShadingMapMode {
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON,
};
+// ANDROID_STATISTICS_OIS_DATA_MODE
+enum AndroidStatisticsOisDataMode {
+ ANDROID_STATISTICS_OIS_DATA_MODE_OFF,
+ ANDROID_STATISTICS_OIS_DATA_MODE_ON,
+};
+
// ANDROID_TONEMAP_MODE
enum AndroidTonemapMode {
ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
@@ -815,6 +861,7 @@ enum AndroidInfoSupportedHardwareLevel {
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3,
+ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL,
};
// ANDROID_BLACK_LEVEL_LOCK
@@ -846,3 +893,16 @@ enum AndroidDepthDepthIsExclusive {
ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE,
ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE,
};
+
+// ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+enum AndroidLogicalMultiCameraSensorSyncType {
+ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE,
+ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED,
+};
+
+// ANDROID_DISTORTION_CORRECTION_MODE
+enum AndroidDistortionCorrectionMode {
+ ANDROID_DISTORTION_CORRECTION_MODE_OFF,
+ ANDROID_DISTORTION_CORRECTION_MODE_FAST,
+ ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY,
+};
diff --git a/chromium/media/capture/video/chromeos/pixel_format_utils.cc b/chromium/media/capture/video/chromeos/pixel_format_utils.cc
index 6f9d4e3645c..a4197a81921 100644
--- a/chromium/media/capture/video/chromeos/pixel_format_utils.cc
+++ b/chromium/media/capture/video/chromeos/pixel_format_utils.cc
@@ -4,7 +4,7 @@
#include "media/capture/video/chromeos/pixel_format_utils.h"
-#include <libdrm/drm_fourcc.h>
+#include <drm_fourcc.h>
namespace media {
diff --git a/chromium/media/capture/video/chromeos/public/BUILD.gn b/chromium/media/capture/video/chromeos/public/BUILD.gn
new file mode 100644
index 00000000000..092ba2c6fc2
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/public/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+source_set("public") {
+ sources = [
+ "cros_features.cc",
+ "cros_features.h",
+ ]
+
+ public_deps = [
+ "//base",
+ ]
+}
diff --git a/chromium/media/capture/video/chromeos/public/cros_features.cc b/chromium/media/capture/video/chromeos/public/cros_features.cc
new file mode 100644
index 00000000000..4658f93134c
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/public/cros_features.cc
@@ -0,0 +1,19 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/public/cros_features.h"
+
+#include "base/files/file_util.h"
+
+namespace media {
+
+bool ShouldUseCrosCameraService() {
+ // Checks whether the Chrome OS binary which provides the HAL v3 camera
+ // service is installed on the device. If the binary exists we assume the
+ // device is using the new camera HAL v3 stack.
+ const base::FilePath kCrosCameraService("/usr/bin/cros_camera_service");
+ return base::PathExists(kCrosCameraService);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/public/cros_features.h b/chromium/media/capture/video/chromeos/public/cros_features.h
new file mode 100644
index 00000000000..94797d4d8a5
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/public/cros_features.h
@@ -0,0 +1,16 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_PUBLIC_CROS_FEATURES_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_PUBLIC_CROS_FEATURES_H_
+
+namespace media {
+
+// A run-time check for whether or not we should use the OS-level camera
+// service on ChromeOS for video capture.
+bool ShouldUseCrosCameraService();
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_PUBLIC_CROS_FEATURES_H_
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index 2cd54bb1114..ad4af81973f 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -7,11 +7,12 @@
#include <sync/sync.h>
#include <memory>
+#include "base/posix/safe_strerror.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include "mojo/edk/embedder/embedder.h"
-#include "mojo/edk/embedder/scoped_platform_handle.h"
+#include "mojo/public/cpp/platform/platform_handle.h"
+#include "mojo/public/cpp/system/platform_handle.h"
namespace media {
@@ -318,24 +319,19 @@ void StreamBufferManager::RegisterBuffer(StreamType stream_type) {
size_t num_planes = buffer_handle.planes.size();
std::vector<StreamCaptureInterface::Plane> planes(num_planes);
for (size_t i = 0; i < num_planes; ++i) {
- // Wrap the platform handle.
- MojoHandle wrapped_handle;
// There is only one fd.
int dup_fd = dup(buffer_handle.fds[0].fd);
if (dup_fd == -1) {
device_context_->SetErrorState(FROM_HERE, "Failed to dup fd");
return;
}
- MojoResult result = mojo::edk::CreateInternalPlatformHandleWrapper(
- mojo::edk::ScopedInternalPlatformHandle(
- mojo::edk::InternalPlatformHandle(dup_fd)),
- &wrapped_handle);
- if (result != MOJO_RESULT_OK) {
+ planes[i].fd =
+ mojo::WrapPlatformHandle(mojo::PlatformHandle(base::ScopedFD(dup_fd)));
+ if (!planes[i].fd.is_valid()) {
device_context_->SetErrorState(FROM_HERE,
"Failed to wrap gpu memory handle");
return;
}
- planes[i].fd.reset(mojo::Handle(wrapped_handle));
planes[i].stride = buffer_handle.planes[i].stride;
planes[i].offset = buffer_handle.planes[i].offset;
}
@@ -367,7 +363,7 @@ void StreamBufferManager::OnRegisteredBuffer(StreamType stream_type,
if (result) {
device_context_->SetErrorState(FROM_HERE,
std::string("Failed to register buffer: ") +
- std::string(strerror(result)));
+ base::safe_strerror(-result));
return;
}
stream_context_[stream_type]->registered_buffers.push(buffer_id);
@@ -439,8 +435,8 @@ void StreamBufferManager::OnProcessedCaptureRequest(int32_t result) {
}
if (result) {
device_context_->SetErrorState(
- FROM_HERE, std::string("Process capture request failed") +
- std::string(strerror(result)));
+ FROM_HERE, std::string("Process capture request failed: ") +
+ base::safe_strerror(-result));
return;
}
// Keeps the preview stream going.
@@ -702,15 +698,14 @@ void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number,
// Wait on release fence before delivering the result buffer to client.
if (stream_buffer->release_fence.is_valid()) {
const int kSyncWaitTimeoutMs = 1000;
- mojo::edk::ScopedInternalPlatformHandle fence;
- MojoResult result = mojo::edk::PassWrappedInternalPlatformHandle(
- stream_buffer->release_fence.release().value(), &fence);
- if (result != MOJO_RESULT_OK) {
+ mojo::PlatformHandle fence =
+ mojo::UnwrapPlatformHandle(std::move(stream_buffer->release_fence));
+ if (!fence.is_valid()) {
device_context_->SetErrorState(FROM_HERE,
"Failed to unwrap release fence fd");
return;
}
- if (!sync_wait(fence.get().handle, kSyncWaitTimeoutMs)) {
+ if (!sync_wait(fence.GetFD().get(), kSyncWaitTimeoutMs)) {
device_context_->SetErrorState(FROM_HERE,
"Sync wait on release fence timed out");
return;
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
index dbd982e71b5..6fb9ccac642 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
@@ -4,10 +4,8 @@
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
-#include "base/files/file_util.h"
#include "base/memory/ptr_util.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-#include "media/capture/video/linux/video_capture_device_factory_linux.h"
namespace media {
@@ -18,20 +16,14 @@ gpu::GpuMemoryBufferManager* g_gpu_buffer_manager = nullptr;
} // namespace
VideoCaptureDeviceFactoryChromeOS::VideoCaptureDeviceFactoryChromeOS(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory)
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer)
: task_runner_for_screen_observer_(task_runner_for_screen_observer),
camera_hal_ipc_thread_("CameraHalIpcThread"),
- initialized_(Init(std::move(jda_factory), std::move(jea_factory))) {
- g_gpu_buffer_manager = gpu_buffer_manager;
-}
+ initialized_(Init()) {}
VideoCaptureDeviceFactoryChromeOS::~VideoCaptureDeviceFactoryChromeOS() {
camera_hal_delegate_->Reset();
camera_hal_ipc_thread_.Stop();
- g_gpu_buffer_manager = nullptr;
}
std::unique_ptr<VideoCaptureDevice>
@@ -49,9 +41,6 @@ void VideoCaptureDeviceFactoryChromeOS::GetSupportedFormats(
const VideoCaptureDeviceDescriptor& device_descriptor,
VideoCaptureFormats* supported_formats) {
DCHECK(thread_checker_.CalledOnValidThread());
- if (!initialized_) {
- return;
- }
camera_hal_delegate_->GetSupportedFormats(device_descriptor,
supported_formats);
}
@@ -66,38 +55,25 @@ void VideoCaptureDeviceFactoryChromeOS::GetDeviceDescriptors(
}
// static
-bool VideoCaptureDeviceFactoryChromeOS::ShouldEnable() {
- // Checks whether the Chrome OS binary which provides the HAL v3 camera
- // service is installed on the device. If the binary exists we assume the
- // device is using the new camera HAL v3 stack.
- const base::FilePath kCrosCameraService("/usr/bin/cros_camera_service");
- return base::PathExists(kCrosCameraService);
-}
-
-// static
gpu::GpuMemoryBufferManager*
VideoCaptureDeviceFactoryChromeOS::GetBufferManager() {
return g_gpu_buffer_manager;
}
// static
-void VideoCaptureDeviceFactoryChromeOS::SetBufferManagerForTesting(
+void VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
gpu::GpuMemoryBufferManager* buffer_manager) {
g_gpu_buffer_manager = buffer_manager;
}
-bool VideoCaptureDeviceFactoryChromeOS::Init(
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
+bool VideoCaptureDeviceFactoryChromeOS::Init() {
if (!camera_hal_ipc_thread_.Start()) {
LOG(ERROR) << "Module thread failed to start";
return false;
}
- if (!CameraHalDispatcherImpl::GetInstance()->IsStarted() &&
- !CameraHalDispatcherImpl::GetInstance()->Start(std::move(jda_factory),
- std::move(jea_factory))) {
- LOG(ERROR) << "Failed to start CameraHalDispatcherImpl";
+ if (!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
+ LOG(ERROR) << "CameraHalDispatcherImpl is not started";
return false;
}
@@ -107,31 +83,4 @@ bool VideoCaptureDeviceFactoryChromeOS::Init(
return true;
}
-#if defined(OS_CHROMEOS)
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- // On Chrome OS we have to support two use cases:
- //
- // 1. For devices that have the camera HAL v3 service running on Chrome OS,
- // we use the HAL v3 capture device which VideoCaptureDeviceFactoryChromeOS
- // provides.
- // 2. Existing devices that use UVC cameras need to use the V4L2 capture
- // device which VideoCaptureDeviceFacotoryLinux provides; there are also
- // some special devices that may never be able to implement a camera HAL
- // v3.
- if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
- return new VideoCaptureDeviceFactoryChromeOS(
- task_runner_for_screen_observer, gpu_buffer_manager,
- std::move(jda_factory), std::move(jea_factory));
- } else {
- return new VideoCaptureDeviceFactoryLinux(task_runner_for_screen_observer);
- }
-}
-#endif
-
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index 4507469bb03..bc452c30b03 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -19,10 +19,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
public:
explicit VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner>
- task_runner_for_screen_observer,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory);
+ task_runner_for_screen_observer);
~VideoCaptureDeviceFactoryChromeOS() override;
@@ -35,21 +32,13 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
void GetDeviceDescriptors(
VideoCaptureDeviceDescriptors* device_descriptors) final;
- // A run-time check for whether we should enable
- // VideoCaptureDeviceFactoryChromeOS on the device.
- static bool ShouldEnable();
-
static gpu::GpuMemoryBufferManager* GetBufferManager();
-
- // For testing purpose only.
- static void SetBufferManagerForTesting(
- gpu::GpuMemoryBufferManager* buffer_manager);
+ static void SetGpuBufferManager(gpu::GpuMemoryBufferManager* buffer_manager);
private:
// Initializes the factory. The factory is functional only after this call
// succeeds.
- bool Init(MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory);
+ bool Init();
const scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer_;
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.cc b/chromium/media/capture/video/create_video_capture_device_factory.cc
new file mode 100644
index 00000000000..9ca6f8c3e1a
--- /dev/null
+++ b/chromium/media/capture/video/create_video_capture_device_factory.cc
@@ -0,0 +1,91 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/create_video_capture_device_factory.h"
+
+#include "base/command_line.h"
+#include "build/build_config.h"
+#include "media/base/media_switches.h"
+#include "media/capture/video/fake_video_capture_device_factory.h"
+#include "media/capture/video/file_video_capture_device_factory.h"
+
+#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
+#include "media/capture/video/linux/video_capture_device_factory_linux.h"
+#elif defined(OS_CHROMEOS)
+#include "media/capture/video/chromeos/public/cros_features.h"
+#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
+#include "media/capture/video/linux/video_capture_device_factory_linux.h"
+#elif defined(OS_WIN)
+#include "media/capture/video/win/video_capture_device_factory_win.h"
+#elif defined(OS_MACOSX)
+#include "media/capture/video/mac/video_capture_device_factory_mac.h"
+#elif defined(OS_ANDROID)
+#include "media/capture/video/android/video_capture_device_factory_android.h"
+#endif
+
+namespace media {
+
+namespace {
+
+std::unique_ptr<VideoCaptureDeviceFactory>
+CreatePlatformSpecificVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
+#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
+ return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
+#elif defined(OS_CHROMEOS)
+ // On Chrome OS we have to support two use cases:
+ //
+ // 1. For devices that have the camera HAL v3 service running on Chrome OS,
+ // we use the HAL v3 capture device which VideoCaptureDeviceFactoryChromeOS
+ // provides.
+ // 2. Existing devices that use UVC cameras need to use the V4L2 capture
+ // device which VideoCaptureDeviceFacotoryLinux provides; there are also
+ // some special devices that may never be able to implement a camera HAL
+ // v3.
+ if (ShouldUseCrosCameraService()) {
+ return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(ui_task_runner);
+ } else {
+ return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
+ }
+#elif defined(OS_WIN)
+ return std::make_unique<VideoCaptureDeviceFactoryWin>();
+#elif defined(OS_MACOSX)
+ return std::make_unique<VideoCaptureDeviceFactoryMac>();
+#elif defined(OS_ANDROID)
+ return std::make_unique<VideoCaptureDeviceFactoryAndroid>();
+#else
+ NOTIMPLEMENTED();
+ return nullptr;
+#endif
+}
+
+} // anonymous namespace
+
+std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ // Use a Fake or File Video Device Factory if the command line flags are
+ // present, otherwise use the normal, platform-dependent, device factory.
+ if (command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream)) {
+ if (command_line->HasSwitch(switches::kUseFileForFakeVideoCapture)) {
+ return std::make_unique<FileVideoCaptureDeviceFactory>();
+ } else {
+ std::vector<FakeVideoCaptureDeviceSettings> config;
+ FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
+ command_line->GetSwitchValueASCII(
+ switches::kUseFakeDeviceForMediaStream),
+ &config);
+ auto result = std::make_unique<FakeVideoCaptureDeviceFactory>();
+ result->SetToCustomDevicesConfig(config);
+ return std::move(result);
+ }
+ } else {
+ // |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
+ // screen rotations.
+ return CreatePlatformSpecificVideoCaptureDeviceFactory(ui_task_runner);
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.h b/chromium/media/capture/video/create_video_capture_device_factory.h
new file mode 100644
index 00000000000..7199bf800e3
--- /dev/null
+++ b/chromium/media/capture/video/create_video_capture_device_factory.h
@@ -0,0 +1,22 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
+#define MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
+
+#include <memory>
+
+#include "base/single_thread_task_runner.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/video_capture_device_factory.h"
+
+namespace media {
+
+std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
+CreateVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
diff --git a/chromium/media/capture/video/linux/camera_config_chromeos.h b/chromium/media/capture/video/linux/camera_config_chromeos.h
index 3b3bb395995..df4556e40a2 100644
--- a/chromium/media/capture/video/linux/camera_config_chromeos.h
+++ b/chromium/media/capture/video/linux/camera_config_chromeos.h
@@ -61,16 +61,6 @@ class CAPTURE_EXPORT CameraConfigChromeOS {
CAPTURE_EXPORT VideoFacingMode
GetCameraFacing(const std::string& device_id,
const std::string& model_id) const;
- // Get the orientation of the camera. The value is the angle that the camera
- // image needs to be rotated clockwise so it shows correctly on the display in
- // its natural orientation. It should be 0, 90, 180, or 270.
- //
- // For example, suppose a device has a naturally tall screen. The back-facing
- // camera sensor is mounted in landscape. You are looking at the screen. If
- // the top side of the camera sensor is aligned with the right edge of the
- // screen in natural orientation, the value should be 90. If the top side of a
- // front-facing camera sensor is aligned with the right of the screen, the
- // value should be 270.
int GetOrientation(const std::string& device_id,
const std::string& model_id) const;
diff --git a/chromium/media/capture/video/linux/fake_v4l2_impl.cc b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
new file mode 100644
index 00000000000..2c33b40820a
--- /dev/null
+++ b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
@@ -0,0 +1,198 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/linux/fake_v4l2_impl.h"
+
+#include <linux/videodev2.h>
+#include <string.h>
+
+#include "base/stl_util.h"
+
+#define KERNEL_VERSION(a, b, c) (((a) << 16) + ((b) << 8) + (c))
+
+namespace media {
+
+static const int kInvalidId = -1;
+static const int kSuccessReturnValue = 0;
+static const int kErrorReturnValue = -1;
+
+class FakeV4L2Impl::OpenedDevice {
+ public:
+ explicit OpenedDevice(const std::string& device_id, int open_flags)
+ : device_id_(device_id), open_flags_(open_flags) {}
+
+ const std::string& device_id() const { return device_id_; }
+ int open_flags() const { return open_flags_; }
+
+ private:
+ const std::string device_id_;
+ const int open_flags_;
+};
+
+FakeV4L2Impl::FakeV4L2Impl() : next_id_to_return_from_open_(1) {}
+
+FakeV4L2Impl::~FakeV4L2Impl() = default;
+
+void FakeV4L2Impl::AddDevice(const std::string& device_name,
+ const FakeV4L2DeviceConfig& config) {
+ device_configs_.emplace(device_name, config);
+}
+
+int FakeV4L2Impl::open(const char* device_name, int flags) {
+ std::string device_name_as_string(device_name);
+ if (!base::ContainsKey(device_configs_, device_name_as_string))
+ return kInvalidId;
+
+ auto id_iter = device_name_to_open_id_map_.find(device_name_as_string);
+ if (id_iter != device_name_to_open_id_map_.end()) {
+ // Device is already open
+ return kInvalidId;
+ }
+
+ auto device_id = next_id_to_return_from_open_++;
+ device_name_to_open_id_map_.emplace(device_name_as_string, device_id);
+ opened_devices_.emplace(
+ device_id, std::make_unique<OpenedDevice>(device_name_as_string, flags));
+ return device_id;
+}
+
+int FakeV4L2Impl::close(int fd) {
+ auto device_iter = opened_devices_.find(fd);
+ if (device_iter == opened_devices_.end())
+ return kErrorReturnValue;
+ device_name_to_open_id_map_.erase(device_iter->second->device_id());
+ opened_devices_.erase(device_iter->first);
+ return kSuccessReturnValue;
+}
+
+int FakeV4L2Impl::ioctl(int fd, int request, void* argp) {
+ auto device_iter = opened_devices_.find(fd);
+ if (device_iter == opened_devices_.end())
+ return EBADF;
+ auto& opened_device = device_iter->second;
+ auto& device_config = device_configs_.at(opened_device->device_id());
+
+ switch (request) {
+ case VIDIOC_ENUM_FMT: {
+ auto* fmtdesc = reinterpret_cast<v4l2_fmtdesc*>(argp);
+ if (fmtdesc->index > 0u) {
+ // We only support a single format for now.
+ return EINVAL;
+ }
+ if (fmtdesc->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) {
+ // We only support video capture.
+ return EINVAL;
+ }
+ fmtdesc->flags = 0u;
+ strcpy(reinterpret_cast<char*>(fmtdesc->description), "YUV420");
+ fmtdesc->pixelformat = V4L2_PIX_FMT_YUV420;
+ memset(fmtdesc->reserved, 0, 4);
+ return kSuccessReturnValue;
+ }
+ case VIDIOC_QUERYCAP: {
+ auto* cap = reinterpret_cast<v4l2_capability*>(argp);
+ strcpy(reinterpret_cast<char*>(cap->driver), "FakeV4L2");
+ CHECK(device_config.descriptor.display_name().size() < 31);
+ strcpy(reinterpret_cast<char*>(cap->driver),
+ device_config.descriptor.display_name().c_str());
+ cap->bus_info[0] = 0;
+ // Provide arbitrary version info
+ cap->version = KERNEL_VERSION(1, 0, 0);
+ cap->capabilities = V4L2_CAP_VIDEO_CAPTURE;
+ memset(cap->reserved, 0, 4);
+ return kSuccessReturnValue;
+ }
+ case VIDIOC_STREAMON:
+ case VIDIOC_STREAMOFF:
+ NOTIMPLEMENTED();
+ return kSuccessReturnValue;
+ case VIDIOC_CROPCAP:
+ case VIDIOC_DBG_G_REGISTER:
+ case VIDIOC_DBG_S_REGISTER:
+ case VIDIOC_ENCODER_CMD:
+ case VIDIOC_TRY_ENCODER_CMD:
+ case VIDIOC_ENUMAUDIO:
+ case VIDIOC_ENUMAUDOUT:
+ case VIDIOC_ENUM_FRAMESIZES:
+ case VIDIOC_ENUM_FRAMEINTERVALS:
+ case VIDIOC_ENUMINPUT:
+ case VIDIOC_ENUMOUTPUT:
+ case VIDIOC_ENUMSTD:
+ case VIDIOC_G_AUDIO:
+ case VIDIOC_S_AUDIO:
+ case VIDIOC_G_AUDOUT:
+ case VIDIOC_S_AUDOUT:
+ case VIDIOC_G_CROP:
+ case VIDIOC_S_CROP:
+ case VIDIOC_G_CTRL:
+ case VIDIOC_S_CTRL:
+ case VIDIOC_G_ENC_INDEX:
+ case VIDIOC_G_EXT_CTRLS:
+ case VIDIOC_S_EXT_CTRLS:
+ case VIDIOC_TRY_EXT_CTRLS:
+ case VIDIOC_G_FBUF:
+ case VIDIOC_S_FBUF:
+ case VIDIOC_G_FMT:
+ case VIDIOC_S_FMT:
+ case VIDIOC_TRY_FMT:
+ case VIDIOC_G_FREQUENCY:
+ case VIDIOC_S_FREQUENCY:
+ case VIDIOC_G_INPUT:
+ case VIDIOC_S_INPUT:
+ case VIDIOC_G_JPEGCOMP:
+ case VIDIOC_S_JPEGCOMP:
+ case VIDIOC_G_MODULATOR:
+ case VIDIOC_S_MODULATOR:
+ case VIDIOC_G_OUTPUT:
+ case VIDIOC_S_OUTPUT:
+ case VIDIOC_G_PARM:
+ case VIDIOC_S_PARM:
+ case VIDIOC_G_PRIORITY:
+ case VIDIOC_S_PRIORITY:
+ case VIDIOC_G_SLICED_VBI_CAP:
+ case VIDIOC_G_STD:
+ case VIDIOC_S_STD:
+ case VIDIOC_G_TUNER:
+ case VIDIOC_S_TUNER:
+ case VIDIOC_LOG_STATUS:
+ case VIDIOC_OVERLAY:
+ case VIDIOC_QBUF:
+ case VIDIOC_DQBUF:
+ case VIDIOC_QUERYBUF:
+ case VIDIOC_QUERYCTRL:
+ case VIDIOC_QUERYMENU:
+ case VIDIOC_QUERYSTD:
+ case VIDIOC_REQBUFS:
+ case VIDIOC_S_HW_FREQ_SEEK:
+ // Unsupported |request| code.
+ NOTREACHED() << "Unsupported request code " << request;
+ return kErrorReturnValue;
+ }
+
+ // Invalid |request|.
+ NOTREACHED();
+ return kErrorReturnValue;
+}
+
+void* FakeV4L2Impl::mmap(void* start,
+ size_t length,
+ int prot,
+ int flags,
+ int fd,
+ off_t offset) {
+ NOTREACHED();
+ return nullptr;
+}
+
+int FakeV4L2Impl::munmap(void* start, size_t length) {
+ NOTREACHED();
+ return kErrorReturnValue;
+}
+
+int FakeV4L2Impl::poll(struct pollfd* ufds, unsigned int nfds, int timeout) {
+ NOTREACHED();
+ return kErrorReturnValue;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/linux/fake_v4l2_impl.h b/chromium/media/capture/video/linux/fake_v4l2_impl.h
new file mode 100644
index 00000000000..1dd4b6c1c0c
--- /dev/null
+++ b/chromium/media/capture/video/linux/fake_v4l2_impl.h
@@ -0,0 +1,62 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_LINUX_FAKE_V4L2_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_LINUX_FAKE_V4L2_IMPL_H_
+
+#include <map>
+#include <string>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/linux/v4l2_capture_device.h"
+#include "media/capture/video/video_capture_device_descriptor.h"
+
+namespace media {
+
+struct FakeV4L2DeviceConfig {
+ FakeV4L2DeviceConfig(const VideoCaptureDeviceDescriptor& descriptor)
+ : descriptor(descriptor) {}
+
+ const VideoCaptureDeviceDescriptor descriptor;
+};
+
+// Implementation of V4L2CaptureDevice interface that allows configuring fake
+// devices useful for testing.
+class CAPTURE_EXPORT FakeV4L2Impl : public V4L2CaptureDevice {
+ public:
+ FakeV4L2Impl();
+
+ void AddDevice(const std::string& device_name,
+ const FakeV4L2DeviceConfig& config);
+
+ // Implementation of V4L2CaptureDevice interface:
+ int open(const char* device_name, int flags) override;
+ int close(int fd) override;
+ int ioctl(int fd, int request, void* argp) override;
+ void* mmap(void* start,
+ size_t length,
+ int prot,
+ int flags,
+ int fd,
+ off_t offset) override;
+
+ int munmap(void* start, size_t length) override;
+ int poll(struct pollfd* ufds, unsigned int nfds, int timeout) override;
+
+ protected:
+ ~FakeV4L2Impl() override;
+
+ private:
+ class OpenedDevice;
+
+ int next_id_to_return_from_open_;
+ std::map<std::string, FakeV4L2DeviceConfig> device_configs_;
+ std::map<std::string, int> device_name_to_open_id_map_;
+ std::map<int /*value returned by open()*/, std::unique_ptr<OpenedDevice>>
+ opened_devices_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_LINUX_FAKE_V4L2_IMPL_H_
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
index ce969d7e40a..c152e8c2822 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -55,8 +55,9 @@ const int kMjpegHeight = 480;
// Typical framerate, in fps
const int kTypicalFramerate = 30;
-// V4L2 color formats supported by V4L2CaptureDelegate derived classes.
-// This list is ordered by precedence of use -- but see caveats for MJPEG.
+// V4L2CaptureDevice color formats supported by V4L2CaptureDelegate derived
+// classes. This list is ordered by precedence of use -- but see caveats for
+// MJPEG.
static struct {
uint32_t fourcc;
VideoPixelFormat pixel_format;
@@ -124,44 +125,6 @@ static std::string FourccToString(uint32_t fourcc) {
(fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF);
}
-// Running ioctl() on some devices, especially shortly after (re)opening the
-// device file descriptor or (re)starting streaming, can fail but works after
-// retrying (https://crbug.com/670262).
-// Returns false if the |request| ioctl fails too many times.
-static bool RunIoctl(int fd, int request, void* argp) {
- int num_retries = 0;
- for (; HANDLE_EINTR(ioctl(fd, request, argp)) < 0 &&
- num_retries < kMaxIOCtrlRetries;
- ++num_retries) {
- DPLOG(WARNING) << "ioctl";
- }
- DPLOG_IF(ERROR, num_retries != kMaxIOCtrlRetries);
- return num_retries != kMaxIOCtrlRetries;
-}
-
-// Creates a mojom::RangePtr with the (min, max, current, step) values of the
-// control associated with |control_id|. Returns an empty Range otherwise.
-static mojom::RangePtr RetrieveUserControlRange(int device_fd, int control_id) {
- mojom::RangePtr capability = mojom::Range::New();
-
- v4l2_queryctrl range = {};
- range.id = control_id;
- range.type = V4L2_CTRL_TYPE_INTEGER;
- if (!RunIoctl(device_fd, VIDIOC_QUERYCTRL, &range))
- return mojom::Range::New();
- capability->max = range.maximum;
- capability->min = range.minimum;
- capability->step = range.step;
-
- v4l2_control current = {};
- current.id = control_id;
- if (!RunIoctl(device_fd, VIDIOC_G_CTRL, &current))
- return mojom::Range::New();
- capability->current = current.value;
-
- return capability;
-}
-
// Determines if |control_id| is special, i.e. controls another one's state.
static bool IsSpecialControl(int control_id) {
switch (control_id) {
@@ -203,114 +166,12 @@ static bool IsBlacklistedControl(int control_id) {
return false;
}
-// Sets all user control to their default. Some controls are enabled by another
-// flag, usually having the word "auto" in the name, see IsSpecialControl().
-// These flags are preset beforehand, then set to their defaults individually
-// afterwards.
-static void ResetUserAndCameraControlsToDefault(int device_fd) {
- // Set V4L2_CID_AUTO_WHITE_BALANCE to false first.
- v4l2_control auto_white_balance = {};
- auto_white_balance.id = V4L2_CID_AUTO_WHITE_BALANCE;
- auto_white_balance.value = false;
- if (!RunIoctl(device_fd, VIDIOC_S_CTRL, &auto_white_balance))
- return;
-
- std::vector<struct v4l2_ext_control> special_camera_controls;
- // Set V4L2_CID_EXPOSURE_AUTO to V4L2_EXPOSURE_MANUAL.
- v4l2_ext_control auto_exposure = {};
- auto_exposure.id = V4L2_CID_EXPOSURE_AUTO;
- auto_exposure.value = V4L2_EXPOSURE_MANUAL;
- special_camera_controls.push_back(auto_exposure);
- // Set V4L2_CID_EXPOSURE_AUTO_PRIORITY to false.
- v4l2_ext_control priority_auto_exposure = {};
- priority_auto_exposure.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
- priority_auto_exposure.value = false;
- special_camera_controls.push_back(priority_auto_exposure);
- // Set V4L2_CID_FOCUS_AUTO to false.
- v4l2_ext_control auto_focus = {};
- auto_focus.id = V4L2_CID_FOCUS_AUTO;
- auto_focus.value = false;
- special_camera_controls.push_back(auto_focus);
-
- struct v4l2_ext_controls ext_controls = {};
- ext_controls.ctrl_class = V4L2_CID_CAMERA_CLASS;
- ext_controls.count = special_camera_controls.size();
- ext_controls.controls = special_camera_controls.data();
- if (HANDLE_EINTR(ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) < 0)
- DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
-
- std::vector<struct v4l2_ext_control> camera_controls;
- for (const auto& control : kControls) {
- std::vector<struct v4l2_ext_control> camera_controls;
-
- v4l2_queryctrl range = {};
- range.id = control.control_base | V4L2_CTRL_FLAG_NEXT_CTRL;
- while (0 == HANDLE_EINTR(ioctl(device_fd, VIDIOC_QUERYCTRL, &range))) {
- if (V4L2_CTRL_ID2CLASS(range.id) != V4L2_CTRL_ID2CLASS(control.class_id))
- break;
- range.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
-
- if (IsSpecialControl(range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL))
- continue;
- if (IsBlacklistedControl(range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL))
- continue;
-
- struct v4l2_ext_control ext_control = {};
- ext_control.id = range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL;
- ext_control.value = range.default_value;
- camera_controls.push_back(ext_control);
- }
-
- if (!camera_controls.empty()) {
- struct v4l2_ext_controls ext_controls = {};
- ext_controls.ctrl_class = control.class_id;
- ext_controls.count = camera_controls.size();
- ext_controls.controls = camera_controls.data();
- if (HANDLE_EINTR(ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) < 0)
- DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
- }
- }
-
- // Now set the special flags to the default values
- v4l2_queryctrl range = {};
- range.id = V4L2_CID_AUTO_WHITE_BALANCE;
- HANDLE_EINTR(ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
- auto_white_balance.value = range.default_value;
- HANDLE_EINTR(ioctl(device_fd, VIDIOC_S_CTRL, &auto_white_balance));
-
- special_camera_controls.clear();
- memset(&range, 0, sizeof(struct v4l2_queryctrl));
- range.id = V4L2_CID_EXPOSURE_AUTO;
- HANDLE_EINTR(ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
- auto_exposure.value = range.default_value;
- special_camera_controls.push_back(auto_exposure);
-
- memset(&range, 0, sizeof(struct v4l2_queryctrl));
- range.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
- HANDLE_EINTR(ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
- priority_auto_exposure.value = range.default_value;
- special_camera_controls.push_back(priority_auto_exposure);
-
- memset(&range, 0, sizeof(struct v4l2_queryctrl));
- range.id = V4L2_CID_FOCUS_AUTO;
- HANDLE_EINTR(ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
- auto_focus.value = range.default_value;
- special_camera_controls.push_back(auto_focus);
-
- memset(&ext_controls, 0, sizeof(struct v4l2_ext_controls));
- ext_controls.ctrl_class = V4L2_CID_CAMERA_CLASS;
- ext_controls.count = special_camera_controls.size();
- ext_controls.controls = special_camera_controls.data();
- if (HANDLE_EINTR(ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) < 0)
- DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
-}
-
-// Class keeping track of a SPLANE V4L2 buffer, mmap()ed on construction and
-// munmap()ed on destruction.
+// Class keeping track of a SPLANE V4L2CaptureDevice buffer, mmap()ed on
+// construction and munmap()ed on destruction.
class V4L2CaptureDelegate::BufferTracker
: public base::RefCounted<BufferTracker> {
public:
- BufferTracker();
+ BufferTracker(V4L2CaptureDevice* v4l2);
// Abstract method to mmap() given |fd| according to |buffer|.
bool Init(int fd, const v4l2_buffer& buffer);
@@ -325,6 +186,7 @@ class V4L2CaptureDelegate::BufferTracker
friend class base::RefCounted<BufferTracker>;
virtual ~BufferTracker();
+ V4L2CaptureDevice* const v4l2_;
uint8_t* start_;
size_t length_;
size_t payload_size_;
@@ -368,13 +230,39 @@ std::list<uint32_t> V4L2CaptureDelegate::GetListOfUsableFourCcs(
return supported_formats;
}
+V4L2CaptureDelegate::ScopedV4L2DeviceFD::ScopedV4L2DeviceFD(
+ V4L2CaptureDevice* v4l2)
+ : device_fd_(kInvalidId), v4l2_(v4l2) {}
+
+V4L2CaptureDelegate::ScopedV4L2DeviceFD::~ScopedV4L2DeviceFD() {
+ if (is_valid())
+ reset();
+}
+
+int V4L2CaptureDelegate::ScopedV4L2DeviceFD::get() {
+ return device_fd_;
+}
+
+void V4L2CaptureDelegate::ScopedV4L2DeviceFD::reset(int fd /*= kInvalidId*/) {
+ if (is_valid())
+ v4l2_->close(device_fd_);
+ device_fd_ = fd;
+}
+
+bool V4L2CaptureDelegate::ScopedV4L2DeviceFD::is_valid() {
+ return device_fd_ != kInvalidId;
+}
+
V4L2CaptureDelegate::V4L2CaptureDelegate(
+ V4L2CaptureDevice* v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor,
const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
int power_line_frequency)
- : v4l2_task_runner_(v4l2_task_runner),
+ : v4l2_(v4l2),
+ v4l2_task_runner_(v4l2_task_runner),
device_descriptor_(device_descriptor),
power_line_frequency_(power_line_frequency),
+ device_fd_(v4l2),
is_capturing_(false),
timeout_count_(0),
rotation_(0),
@@ -391,20 +279,23 @@ void V4L2CaptureDelegate::AllocateAndStart(
// Need to open camera with O_RDWR after Linux kernel 3.3.
device_fd_.reset(
- HANDLE_EINTR(open(device_descriptor_.device_id.c_str(), O_RDWR)));
+ HANDLE_EINTR(v4l2_->open(device_descriptor_.device_id.c_str(), O_RDWR)));
if (!device_fd_.is_valid()) {
- SetErrorState(FROM_HERE, "Failed to open V4L2 device driver file.");
+ SetErrorState(FROM_HERE,
+ "Failed to open V4L2CaptureDevice device driver file.");
return;
}
ResetUserAndCameraControlsToDefault(device_fd_.get());
v4l2_capability cap = {};
- if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
+ if (!((HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) ==
+ 0) &&
((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
!(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) {
device_fd_.reset();
- SetErrorState(FROM_HERE, "This is not a V4L2 video capture device");
+ SetErrorState(FROM_HERE,
+ "This is not a V4L2CaptureDevice video capture device");
return;
}
@@ -416,7 +307,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
v4l2_fmtdesc fmtdesc = {};
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
+ for (; HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
++fmtdesc.index) {
best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat);
}
@@ -428,7 +320,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
DVLOG(1) << "Chosen pixel format is " << FourccToString(*best);
FillV4L2Format(&video_fmt_, width, height, *best);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) < 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt_)) <
+ 0) {
SetErrorState(FROM_HERE, "Failed to set video capture format");
return;
}
@@ -443,7 +336,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
v4l2_streamparm streamparm = {};
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// The following line checks that the driver knows about framerate get/set.
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) {
+ if (HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) {
// Now check if the device is able to accept a capture framerate set.
if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
// |frame_rate| is float, approximate by a fraction.
@@ -452,8 +346,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
(frame_rate) ? (frame_rate * kFrameRatePrecision)
: (kTypicalFramerate * kFrameRatePrecision);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) <
- 0) {
+ if (HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < 0) {
SetErrorState(FROM_HERE, "Failed to set camera framerate");
return;
}
@@ -474,7 +368,7 @@ void V4L2CaptureDelegate::AllocateAndStart(
control.id = V4L2_CID_POWER_LINE_FREQUENCY;
control.value = power_line_frequency_;
const int retval =
- HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control));
+ HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control));
if (retval != 0)
DVLOG(1) << "Error setting power line frequency removal";
}
@@ -486,8 +380,10 @@ void V4L2CaptureDelegate::AllocateAndStart(
v4l2_requestbuffers r_buffer;
FillV4L2RequestBuffer(&r_buffer, kNumVideoBuffers);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) {
- SetErrorState(FROM_HERE, "Error requesting MMAP buffers from V4L2");
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) <
+ 0) {
+ SetErrorState(FROM_HERE,
+ "Error requesting MMAP buffers from V4L2CaptureDevice");
return;
}
for (unsigned int i = 0; i < r_buffer.count; ++i) {
@@ -498,8 +394,8 @@ void V4L2CaptureDelegate::AllocateAndStart(
}
v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type)) <
- 0) {
+ if (HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type)) < 0) {
SetErrorState(FROM_HERE, "VIDIOC_STREAMON failed");
return;
}
@@ -517,8 +413,8 @@ void V4L2CaptureDelegate::StopAndDeAllocate() {
// The order is important: stop streaming, clear |buffer_pool_|,
// thus munmap()ing the v4l2_buffers, and then return them to the OS.
v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type)) <
- 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_STREAMOFF,
+ &capture_type)) < 0) {
SetErrorState(FROM_HERE, "VIDIOC_STREAMOFF failed");
return;
}
@@ -527,7 +423,8 @@ void V4L2CaptureDelegate::StopAndDeAllocate() {
v4l2_requestbuffers r_buffer;
FillV4L2RequestBuffer(&r_buffer, 0);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0)
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) <
+ 0)
SetErrorState(FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0");
// At this point we can close the device.
@@ -569,8 +466,8 @@ void V4L2CaptureDelegate::GetPhotoState(
photo_capabilities->current_focus_mode = MeteringMode::NONE;
v4l2_control auto_focus_current = {};
auto_focus_current.id = V4L2_CID_FOCUS_AUTO;
- if (HANDLE_EINTR(
- ioctl(device_fd_.get(), VIDIOC_G_CTRL, &auto_focus_current)) >= 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_G_CTRL,
+ &auto_focus_current)) >= 0) {
photo_capabilities->current_focus_mode = auto_focus_current.value
? MeteringMode::CONTINUOUS
: MeteringMode::MANUAL;
@@ -588,8 +485,8 @@ void V4L2CaptureDelegate::GetPhotoState(
photo_capabilities->current_exposure_mode = MeteringMode::NONE;
v4l2_control exposure_current = {};
exposure_current.id = V4L2_CID_EXPOSURE_AUTO;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_CTRL, &exposure_current)) >=
- 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_G_CTRL,
+ &exposure_current)) >= 0) {
photo_capabilities->current_exposure_mode =
exposure_current.value == V4L2_EXPOSURE_MANUAL
? MeteringMode::MANUAL
@@ -616,8 +513,8 @@ void V4L2CaptureDelegate::GetPhotoState(
photo_capabilities->current_white_balance_mode = MeteringMode::NONE;
v4l2_control white_balance_current = {};
white_balance_current.id = V4L2_CID_AUTO_WHITE_BALANCE;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_CTRL,
- &white_balance_current)) >= 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_G_CTRL,
+ &white_balance_current)) >= 0) {
photo_capabilities->current_white_balance_mode =
white_balance_current.value ? MeteringMode::CONTINUOUS
: MeteringMode::MANUAL;
@@ -656,7 +553,8 @@ void V4L2CaptureDelegate::SetPhotoOptions(
v4l2_control zoom_current = {};
zoom_current.id = V4L2_CID_ZOOM_ABSOLUTE;
zoom_current.value = settings->zoom;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &zoom_current)) < 0)
+ if (HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &zoom_current)) < 0)
DPLOG(ERROR) << "setting zoom to " << settings->zoom;
}
@@ -667,20 +565,22 @@ void V4L2CaptureDelegate::SetPhotoOptions(
white_balance_set.id = V4L2_CID_AUTO_WHITE_BALANCE;
white_balance_set.value =
settings->white_balance_mode == mojom::MeteringMode::CONTINUOUS;
- HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &white_balance_set));
+ HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &white_balance_set));
}
if (settings->has_color_temperature) {
v4l2_control auto_white_balance_current = {};
auto_white_balance_current.id = V4L2_CID_AUTO_WHITE_BALANCE;
- const int result = HANDLE_EINTR(
- ioctl(device_fd_.get(), VIDIOC_G_CTRL, &auto_white_balance_current));
+ const int result = HANDLE_EINTR(v4l2_->ioctl(
+ device_fd_.get(), VIDIOC_G_CTRL, &auto_white_balance_current));
// Color temperature can only be applied if Auto White Balance is off.
if (result >= 0 && !auto_white_balance_current.value) {
v4l2_control set_temperature = {};
set_temperature.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
set_temperature.value = settings->color_temperature;
- HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &set_temperature));
+ HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &set_temperature));
}
}
@@ -693,20 +593,22 @@ void V4L2CaptureDelegate::SetPhotoOptions(
settings->exposure_mode == mojom::MeteringMode::CONTINUOUS
? V4L2_EXPOSURE_APERTURE_PRIORITY
: V4L2_EXPOSURE_MANUAL;
- HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &exposure_mode_set));
+ HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &exposure_mode_set));
}
if (settings->has_exposure_compensation) {
v4l2_control auto_exposure_current = {};
auto_exposure_current.id = V4L2_CID_EXPOSURE_AUTO;
const int result = HANDLE_EINTR(
- ioctl(device_fd_.get(), VIDIOC_G_CTRL, &auto_exposure_current));
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_G_CTRL, &auto_exposure_current));
// Exposure Compensation can only be applied if Auto Exposure is off.
if (result >= 0 && auto_exposure_current.value == V4L2_EXPOSURE_MANUAL) {
v4l2_control set_exposure = {};
set_exposure.id = V4L2_CID_EXPOSURE_ABSOLUTE;
set_exposure.value = settings->exposure_compensation;
- HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &set_exposure));
+ HANDLE_EINTR(
+ v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &set_exposure));
}
}
@@ -714,28 +616,32 @@ void V4L2CaptureDelegate::SetPhotoOptions(
v4l2_control current = {};
current.id = V4L2_CID_BRIGHTNESS;
current.value = settings->brightness;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) < 0)
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) <
+ 0)
DPLOG(ERROR) << "setting brightness to " << settings->brightness;
}
if (settings->has_contrast) {
v4l2_control current = {};
current.id = V4L2_CID_CONTRAST;
current.value = settings->contrast;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) < 0)
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) <
+ 0)
DPLOG(ERROR) << "setting contrast to " << settings->contrast;
}
if (settings->has_saturation) {
v4l2_control current = {};
current.id = V4L2_CID_SATURATION;
current.value = settings->saturation;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) < 0)
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) <
+ 0)
DPLOG(ERROR) << "setting saturation to " << settings->saturation;
}
if (settings->has_sharpness) {
v4l2_control current = {};
current.id = V4L2_CID_SHARPNESS;
current.value = settings->sharpness;
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) < 0)
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_S_CTRL, &current)) <
+ 0)
DPLOG(ERROR) << "setting sharpness to " << settings->sharpness;
}
@@ -754,16 +660,162 @@ base::WeakPtr<V4L2CaptureDelegate> V4L2CaptureDelegate::GetWeakPtr() {
V4L2CaptureDelegate::~V4L2CaptureDelegate() = default;
+// Running v4l2_->ioctl() on some devices, especially shortly after (re)opening
+// the device file descriptor or (re)starting streaming, can fail but works
+// after retrying (https://crbug.com/670262). Returns false if the |request|
+// ioctl fails too many times.
+bool V4L2CaptureDelegate::RunIoctl(int fd, int request, void* argp) {
+ int num_retries = 0;
+ for (; HANDLE_EINTR(v4l2_->ioctl(fd, request, argp)) < 0 &&
+ num_retries < kMaxIOCtrlRetries;
+ ++num_retries) {
+ DPLOG(WARNING) << "ioctl";
+ }
+ DPLOG_IF(ERROR, num_retries != kMaxIOCtrlRetries);
+ return num_retries != kMaxIOCtrlRetries;
+}
+
+// Creates a mojom::RangePtr with the (min, max, current, step) values of the
+// control associated with |control_id|. Returns an empty Range otherwise.
+mojom::RangePtr V4L2CaptureDelegate::RetrieveUserControlRange(int device_fd,
+ int control_id) {
+ mojom::RangePtr capability = mojom::Range::New();
+
+ v4l2_queryctrl range = {};
+ range.id = control_id;
+ range.type = V4L2_CTRL_TYPE_INTEGER;
+ if (!RunIoctl(device_fd, VIDIOC_QUERYCTRL, &range))
+ return mojom::Range::New();
+ capability->max = range.maximum;
+ capability->min = range.minimum;
+ capability->step = range.step;
+
+ v4l2_control current = {};
+ current.id = control_id;
+ if (!RunIoctl(device_fd, VIDIOC_G_CTRL, &current))
+ return mojom::Range::New();
+ capability->current = current.value;
+
+ return capability;
+}
+
+// Sets all user control to their default. Some controls are enabled by another
+// flag, usually having the word "auto" in the name, see IsSpecialControl().
+// These flags are preset beforehand, then set to their defaults individually
+// afterwards.
+void V4L2CaptureDelegate::ResetUserAndCameraControlsToDefault(int device_fd) {
+ // Set V4L2_CID_AUTO_WHITE_BALANCE to false first.
+ v4l2_control auto_white_balance = {};
+ auto_white_balance.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ auto_white_balance.value = false;
+ if (!RunIoctl(device_fd, VIDIOC_S_CTRL, &auto_white_balance))
+ return;
+
+ std::vector<struct v4l2_ext_control> special_camera_controls;
+ // Set V4L2_CID_EXPOSURE_AUTO to V4L2_EXPOSURE_MANUAL.
+ v4l2_ext_control auto_exposure = {};
+ auto_exposure.id = V4L2_CID_EXPOSURE_AUTO;
+ auto_exposure.value = V4L2_EXPOSURE_MANUAL;
+ special_camera_controls.push_back(auto_exposure);
+ // Set V4L2_CID_EXPOSURE_AUTO_PRIORITY to false.
+ v4l2_ext_control priority_auto_exposure = {};
+ priority_auto_exposure.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
+ priority_auto_exposure.value = false;
+ special_camera_controls.push_back(priority_auto_exposure);
+ // Set V4L2_CID_FOCUS_AUTO to false.
+ v4l2_ext_control auto_focus = {};
+ auto_focus.id = V4L2_CID_FOCUS_AUTO;
+ auto_focus.value = false;
+ special_camera_controls.push_back(auto_focus);
+
+ struct v4l2_ext_controls ext_controls = {};
+ ext_controls.ctrl_class = V4L2_CID_CAMERA_CLASS;
+ ext_controls.count = special_camera_controls.size();
+ ext_controls.controls = special_camera_controls.data();
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) <
+ 0)
+ DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
+
+ std::vector<struct v4l2_ext_control> camera_controls;
+ for (const auto& control : kControls) {
+ std::vector<struct v4l2_ext_control> camera_controls;
+
+ v4l2_queryctrl range = {};
+ range.id = control.control_base | V4L2_CTRL_FLAG_NEXT_CTRL;
+ while (0 ==
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_QUERYCTRL, &range))) {
+ if (V4L2_CTRL_ID2CLASS(range.id) != V4L2_CTRL_ID2CLASS(control.class_id))
+ break;
+ range.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
+
+ if (IsSpecialControl(range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL))
+ continue;
+ if (IsBlacklistedControl(range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL))
+ continue;
+
+ struct v4l2_ext_control ext_control = {};
+ ext_control.id = range.id & ~V4L2_CTRL_FLAG_NEXT_CTRL;
+ ext_control.value = range.default_value;
+ camera_controls.push_back(ext_control);
+ }
+
+ if (!camera_controls.empty()) {
+ struct v4l2_ext_controls ext_controls = {};
+ ext_controls.ctrl_class = control.class_id;
+ ext_controls.count = camera_controls.size();
+ ext_controls.controls = camera_controls.data();
+ if (HANDLE_EINTR(
+ v4l2_->ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) < 0)
+ DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
+ }
+ }
+
+ // Now set the special flags to the default values
+ v4l2_queryctrl range = {};
+ range.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
+ auto_white_balance.value = range.default_value;
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_S_CTRL, &auto_white_balance));
+
+ special_camera_controls.clear();
+ memset(&range, 0, sizeof(struct v4l2_queryctrl));
+ range.id = V4L2_CID_EXPOSURE_AUTO;
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
+ auto_exposure.value = range.default_value;
+ special_camera_controls.push_back(auto_exposure);
+
+ memset(&range, 0, sizeof(struct v4l2_queryctrl));
+ range.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
+ priority_auto_exposure.value = range.default_value;
+ special_camera_controls.push_back(priority_auto_exposure);
+
+ memset(&range, 0, sizeof(struct v4l2_queryctrl));
+ range.id = V4L2_CID_FOCUS_AUTO;
+ HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_QUERYCTRL, &range));
+ auto_focus.value = range.default_value;
+ special_camera_controls.push_back(auto_focus);
+
+ memset(&ext_controls, 0, sizeof(struct v4l2_ext_controls));
+ ext_controls.ctrl_class = V4L2_CID_CAMERA_CLASS;
+ ext_controls.count = special_camera_controls.size();
+ ext_controls.controls = special_camera_controls.data();
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd, VIDIOC_S_EXT_CTRLS, &ext_controls)) <
+ 0)
+ DPLOG(ERROR) << "VIDIOC_S_EXT_CTRLS";
+}
+
bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
v4l2_buffer buffer;
FillV4L2Buffer(&buffer, index);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
- DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer";
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) <
+ 0) {
+ DLOG(ERROR) << "Error querying status of a MMAP V4L2CaptureDevice buffer";
return false;
}
- const scoped_refptr<BufferTracker> buffer_tracker(new BufferTracker());
+ const scoped_refptr<BufferTracker> buffer_tracker(new BufferTracker(v4l2_));
if (!buffer_tracker->Init(device_fd_.get(), buffer)) {
DLOG(ERROR) << "Error creating BufferTracker";
return false;
@@ -771,8 +823,9 @@ bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
buffer_tracker_pool_.push_back(buffer_tracker);
// Enqueue the buffer in the drivers incoming queue.
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
- DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver";
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
+ DLOG(ERROR)
+ << "Error enqueuing a V4L2CaptureDevice buffer back into the driver";
return false;
}
return true;
@@ -786,7 +839,8 @@ void V4L2CaptureDelegate::DoCapture() {
pollfd device_pfd = {};
device_pfd.fd = device_fd_.get();
device_pfd.events = POLLIN;
- const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs));
+ const int result =
+ HANDLE_EINTR(v4l2_->poll(&device_pfd, 1, kCaptureTimeoutMs));
if (result < 0) {
SetErrorState(FROM_HERE, "Poll failed");
return;
@@ -810,7 +864,8 @@ void V4L2CaptureDelegate::DoCapture() {
v4l2_buffer buffer;
FillV4L2Buffer(&buffer, 0);
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) <
+ 0) {
SetErrorState(FROM_HERE, "Failed to dequeue capture buffer");
return;
}
@@ -856,7 +911,8 @@ void V4L2CaptureDelegate::DoCapture() {
std::move(cb).Run(std::move(blob));
}
- if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
+ if (HANDLE_EINTR(v4l2_->ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) <
+ 0) {
SetErrorState(FROM_HERE, "Failed to enqueue capture buffer");
return;
}
@@ -873,23 +929,24 @@ void V4L2CaptureDelegate::SetErrorState(const base::Location& from_here,
client_->OnError(from_here, reason);
}
-V4L2CaptureDelegate::BufferTracker::BufferTracker() = default;
+V4L2CaptureDelegate::BufferTracker::BufferTracker(V4L2CaptureDevice* v4l2)
+ : v4l2_(v4l2) {}
V4L2CaptureDelegate::BufferTracker::~BufferTracker() {
if (start_ == nullptr)
return;
- const int result = munmap(start_, length_);
- PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer";
+ const int result = v4l2_->munmap(start_, length_);
+ PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2CaptureDevice buffer";
}
bool V4L2CaptureDelegate::BufferTracker::Init(int fd,
const v4l2_buffer& buffer) {
// Some devices require mmap() to be called with both READ and WRITE.
// See http://crbug.com/178582.
- void* const start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
- MAP_SHARED, fd, buffer.m.offset);
+ void* const start = v4l2_->mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
+ MAP_SHARED, fd, buffer.m.offset);
if (start == MAP_FAILED) {
- DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace";
+ DLOG(ERROR) << "Error mmap()ing a V4L2CaptureDevice buffer into userspace";
return false;
}
start_ = static_cast<uint8_t*>(start);
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.h b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
index 37f1964b088..5a246a3b7d0 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.h
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
@@ -12,6 +12,7 @@
#include "base/files/scoped_file.h"
#include "base/macros.h"
#include "build/build_config.h"
+#include "media/capture/video/linux/v4l2_capture_device_impl.h"
#include "media/capture/video/video_capture_device.h"
#if defined(OS_OPENBSD)
@@ -26,9 +27,10 @@ class Location;
namespace media {
-// Class doing the actual Linux capture using V4L2 API. V4L2 SPLANE/MPLANE
-// capture specifics are implemented in derived classes. Created on the owner's
-// thread, otherwise living, operating and destroyed on |v4l2_task_runner_|.
+// Class doing the actual Linux capture using V4L2CaptureDevice API.
+// V4L2CaptureDevice SPLANE/MPLANE capture specifics are implemented in derived
+// classes. Created on the owner's thread, otherwise living, operating and
+// destroyed on |v4l2_task_runner_|.
class CAPTURE_EXPORT V4L2CaptureDelegate final {
public:
// Retrieves the #planes for a given |fourcc|, or 0 if unknown.
@@ -42,6 +44,7 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
static std::list<uint32_t> GetListOfUsableFourCcs(bool prefer_mjpeg);
V4L2CaptureDelegate(
+ V4L2CaptureDevice* v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor,
const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner,
int power_line_frequency);
@@ -68,9 +71,28 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
friend class V4L2CaptureDelegateTest;
class BufferTracker;
-
- // VIDIOC_QUERYBUFs a buffer from V4L2, creates a BufferTracker for it and
- // enqueues it (VIDIOC_QBUF) back into V4L2.
+ class ScopedV4L2DeviceFD {
+ public:
+ static constexpr int kInvalidId = -1;
+ ScopedV4L2DeviceFD(V4L2CaptureDevice* v4l2);
+ ~ScopedV4L2DeviceFD();
+ int get();
+ void reset(int fd = kInvalidId);
+ bool is_valid();
+
+ private:
+ int device_fd_;
+ V4L2CaptureDevice* const v4l2_;
+ };
+
+ bool RunIoctl(int fd, int request, void* argp);
+ mojom::RangePtr RetrieveUserControlRange(int device_fd, int control_id);
+ void ResetUserAndCameraControlsToDefault(int device_fd);
+
+ // void CloseDevice();
+
+ // VIDIOC_QUERYBUFs a buffer from V4L2CaptureDevice, creates a BufferTracker
+ // for it and enqueues it (VIDIOC_QBUF) back into V4L2CaptureDevice.
bool MapAndQueueBuffer(int index);
void DoCapture();
@@ -78,6 +100,7 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
void SetErrorState(const base::Location& from_here,
const std::string& reason);
+ V4L2CaptureDevice* const v4l2_;
const scoped_refptr<base::SingleThreadTaskRunner> v4l2_task_runner_;
const VideoCaptureDeviceDescriptor device_descriptor_;
const int power_line_frequency_;
@@ -86,7 +109,7 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
VideoCaptureFormat capture_format_;
v4l2_format video_fmt_;
std::unique_ptr<VideoCaptureDevice::Client> client_;
- base::ScopedFD device_fd_;
+ ScopedV4L2DeviceFD device_fd_;
base::queue<VideoCaptureDevice::TakePhotoCallback> take_photo_callbacks_;
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
index bbbd9b72b03..446aab373a7 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
@@ -221,7 +221,9 @@ class V4L2CaptureDelegateTest : public ::testing::Test {
public:
V4L2CaptureDelegateTest()
: device_descriptor_("Device 0", "/dev/video0"),
+ v4l2_(new V4L2CaptureDeviceImpl()),
delegate_(std::make_unique<V4L2CaptureDelegate>(
+ v4l2_.get(),
device_descriptor_,
base::ThreadTaskRunnerHandle::Get(),
50)) {}
@@ -229,6 +231,7 @@ class V4L2CaptureDelegateTest : public ::testing::Test {
base::test::ScopedTaskEnvironment scoped_task_environment_;
VideoCaptureDeviceDescriptor device_descriptor_;
+ scoped_refptr<V4L2CaptureDevice> v4l2_;
std::unique_ptr<V4L2CaptureDelegate> delegate_;
};
diff --git a/chromium/media/capture/video/linux/v4l2_capture_device.h b/chromium/media/capture/video/linux/v4l2_capture_device.h
new file mode 100644
index 00000000000..2c95357c9a2
--- /dev/null
+++ b/chromium/media/capture/video/linux/v4l2_capture_device.h
@@ -0,0 +1,44 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_H_
+#define MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_H_
+
+#include <poll.h>
+#include <sys/fcntl.h>
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
+#include "media/capture/capture_export.h"
+
+namespace media {
+
+// Interface for abstracting out the V4L2 API. This allows using a mock or fake
+// implementation in testing.
+class CAPTURE_EXPORT V4L2CaptureDevice
+ : public base::RefCounted<V4L2CaptureDevice> {
+ public:
+ virtual int open(const char* device_name, int flags) = 0;
+ virtual int close(int fd) = 0;
+ virtual int ioctl(int fd, int request, void* argp) = 0;
+ virtual void* mmap(void* start,
+ size_t length,
+ int prot,
+ int flags,
+ int fd,
+ off_t offset) = 0;
+
+ virtual int munmap(void* start, size_t length) = 0;
+ virtual int poll(struct pollfd* ufds, unsigned int nfds, int timeout) = 0;
+
+ protected:
+ virtual ~V4L2CaptureDevice() {}
+
+ private:
+ friend class base::RefCounted<V4L2CaptureDevice>;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_H_
diff --git a/chromium/media/capture/video/linux/v4l2_capture_device_impl.cc b/chromium/media/capture/video/linux/v4l2_capture_device_impl.cc
new file mode 100644
index 00000000000..c9040f5dee6
--- /dev/null
+++ b/chromium/media/capture/video/linux/v4l2_capture_device_impl.cc
@@ -0,0 +1,48 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/linux/v4l2_capture_device_impl.h"
+
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/poll.h>
+#include <unistd.h>
+
+namespace media {
+
+V4L2CaptureDeviceImpl::~V4L2CaptureDeviceImpl() = default;
+
+int V4L2CaptureDeviceImpl::open(const char* device_name, int flags) {
+ return ::open(device_name, flags);
+}
+
+int V4L2CaptureDeviceImpl::close(int fd) {
+ return ::close(fd);
+}
+
+int V4L2CaptureDeviceImpl::ioctl(int fd, int request, void* argp) {
+ return ::ioctl(fd, request, argp);
+}
+
+void* V4L2CaptureDeviceImpl::mmap(void* start,
+ size_t length,
+ int prot,
+ int flags,
+ int fd,
+ off_t offset) {
+ return ::mmap(start, length, prot, flags, fd, offset);
+}
+
+int V4L2CaptureDeviceImpl::munmap(void* start, size_t length) {
+ return ::munmap(start, length);
+}
+
+int V4L2CaptureDeviceImpl::poll(struct pollfd* ufds,
+ unsigned int nfds,
+ int timeout) {
+ return ::poll(ufds, nfds, timeout);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/linux/v4l2_capture_device_impl.h b/chromium/media/capture/video/linux/v4l2_capture_device_impl.h
new file mode 100644
index 00000000000..936c8b0938b
--- /dev/null
+++ b/chromium/media/capture/video/linux/v4l2_capture_device_impl.h
@@ -0,0 +1,39 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_IMPL_H_
+
+#include <poll.h>
+#include <sys/fcntl.h>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/linux/v4l2_capture_device.h"
+
+namespace media {
+
+// Implementation of V4L2CaptureDevice interface that delegates to the actual
+// V4L2 APIs.
+class CAPTURE_EXPORT V4L2CaptureDeviceImpl : public V4L2CaptureDevice {
+ public:
+ int open(const char* device_name, int flags) override;
+ int close(int fd) override;
+ int ioctl(int fd, int request, void* argp) override;
+ void* mmap(void* start,
+ size_t length,
+ int prot,
+ int flags,
+ int fd,
+ off_t offset) override;
+
+ int munmap(void* start, size_t length) override;
+ int poll(struct pollfd* ufds, unsigned int nfds, int timeout) override;
+
+ private:
+ ~V4L2CaptureDeviceImpl() override;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_LINUX_V4L2_CAPTURE_DEVICE_IMPL_H_
diff --git a/chromium/media/capture/video/linux/video_capture_device_chromeos.cc b/chromium/media/capture/video/linux/video_capture_device_chromeos.cc
index 329939be1f9..8d4ba37da13 100644
--- a/chromium/media/capture/video/linux/video_capture_device_chromeos.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_chromeos.cc
@@ -17,36 +17,25 @@
namespace media {
-static CameraConfigChromeOS* GetCameraConfig() {
- static CameraConfigChromeOS* config = new CameraConfigChromeOS();
- return config;
-}
-
VideoCaptureDeviceChromeOS::VideoCaptureDeviceChromeOS(
+ const ChromeOSDeviceCameraConfig& camera_config,
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ scoped_refptr<V4L2CaptureDevice> v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor)
- : VideoCaptureDeviceLinux(device_descriptor),
+ : VideoCaptureDeviceLinux(std::move(v4l2), device_descriptor),
+ camera_config_(camera_config),
screen_observer_delegate_(
- new ScreenObserverDelegate(this, ui_task_runner)),
- lens_facing_(
- GetCameraConfig()->GetCameraFacing(device_descriptor.device_id,
- device_descriptor.model_id)),
- camera_orientation_(
- GetCameraConfig()->GetOrientation(device_descriptor.device_id,
- device_descriptor.model_id)),
- // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
- // We don't want to rotate the frame even if the device rotates.
- rotates_with_device_(lens_facing_ !=
- VideoFacingMode::MEDIA_VIDEO_FACING_NONE) {}
+ new ScreenObserverDelegate(this, ui_task_runner)) {}
VideoCaptureDeviceChromeOS::~VideoCaptureDeviceChromeOS() {
screen_observer_delegate_->RemoveObserver();
}
void VideoCaptureDeviceChromeOS::SetRotation(int rotation) {
- if (!rotates_with_device_) {
+ if (!camera_config_.rotates_with_device) {
rotation = 0;
- } else if (lens_facing_ == VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
+ } else if (camera_config_.lens_facing ==
+ VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
// Original frame when |rotation| = 0
// -----------------------
// | * |
@@ -82,7 +71,7 @@ void VideoCaptureDeviceChromeOS::SetRotation(int rotation) {
}
// Take into account camera orientation w.r.t. the display. External cameras
// would have camera_orientation_ as 0.
- rotation = (rotation + camera_orientation_) % 360;
+ rotation = (rotation + camera_config_.camera_orientation) % 360;
VideoCaptureDeviceLinux::SetRotation(rotation);
}
diff --git a/chromium/media/capture/video/linux/video_capture_device_chromeos.h b/chromium/media/capture/video/linux/video_capture_device_chromeos.h
index d9e738ed2f2..ffb6a377bc7 100644
--- a/chromium/media/capture/video/linux/video_capture_device_chromeos.h
+++ b/chromium/media/capture/video/linux/video_capture_device_chromeos.h
@@ -8,7 +8,6 @@
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
#include "media/capture/video/chromeos/display_rotation_observer.h"
-#include "media/capture/video/linux/camera_config_chromeos.h"
#include "media/capture/video/linux/video_capture_device_linux.h"
namespace display {
@@ -17,14 +16,32 @@ class Display;
namespace media {
+struct ChromeOSDeviceCameraConfig {
+ ChromeOSDeviceCameraConfig(VideoFacingMode lens_facing,
+ int camera_orientation)
+ : lens_facing(lens_facing),
+ camera_orientation(camera_orientation),
+ // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
+ // We don't want to rotate the frame even if the device rotates.
+ rotates_with_device(lens_facing !=
+ VideoFacingMode::MEDIA_VIDEO_FACING_NONE) {}
+
+ const VideoFacingMode lens_facing;
+ const int camera_orientation;
+ // Whether the incoming frames should rotate when the device rotates.
+ const bool rotates_with_device;
+};
+
// This class is functionally the same as VideoCaptureDeviceLinux, with the
// exception that it is aware of the orientation of the internal Display. When
// the internal Display is rotated, the frames captured are rotated to match.
class VideoCaptureDeviceChromeOS : public VideoCaptureDeviceLinux,
public DisplayRotationObserver {
public:
- explicit VideoCaptureDeviceChromeOS(
+ VideoCaptureDeviceChromeOS(
+ const ChromeOSDeviceCameraConfig& camera_config,
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ scoped_refptr<V4L2CaptureDevice> v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor);
~VideoCaptureDeviceChromeOS() override;
@@ -34,11 +51,8 @@ class VideoCaptureDeviceChromeOS : public VideoCaptureDeviceLinux,
private:
// DisplayRotationObserver implementation.
void SetDisplayRotation(const display::Display& display) override;
+ const ChromeOSDeviceCameraConfig camera_config_;
scoped_refptr<ScreenObserverDelegate> screen_observer_delegate_;
- const VideoFacingMode lens_facing_;
- const int camera_orientation_;
- // Whether the incoming frames should rotate when the device rotates.
- const bool rotates_with_device_;
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOS);
};
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index 3699cb953ec..99c9912010e 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -44,6 +44,13 @@ const char kPidPathTemplate[] = "/sys/class/video4linux/%s/device/../idProduct";
const char kInterfacePathTemplate[] =
"/sys/class/video4linux/%s/device/interface";
+#if defined(OS_CHROMEOS)
+static CameraConfigChromeOS* GetCameraConfig() {
+ static CameraConfigChromeOS* config = new CameraConfigChromeOS();
+ return config;
+}
+#endif
+
bool ReadIdFile(const std::string& path, std::string* id) {
char id_buf[kVidPidSize];
FILE* file = fopen(path.c_str(), "rb");
@@ -57,155 +64,110 @@ bool ReadIdFile(const std::string& path, std::string* id) {
return true;
}
-bool HasUsableFormats(int fd, uint32_t capabilities) {
- if (!(capabilities & V4L2_CAP_VIDEO_CAPTURE))
- return false;
-
- const std::list<uint32_t>& usable_fourccs =
- VideoCaptureDeviceLinux::GetListOfUsableFourCCs(false);
- v4l2_fmtdesc fmtdesc = {};
- fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
- ++fmtdesc.index) {
- if (std::find(usable_fourccs.begin(), usable_fourccs.end(),
- fmtdesc.pixelformat) != usable_fourccs.end()) {
- return true;
- }
- }
-
- DLOG(ERROR) << "No usable formats found";
- return false;
+std::string ExtractFileNameFromDeviceId(const std::string& device_id) {
+ // |unique_id| is of the form "/dev/video2". |file_name| is "video2".
+ const char kDevDir[] = "/dev/";
+ DCHECK(base::StartsWith(device_id, kDevDir, base::CompareCase::SENSITIVE));
+ return device_id.substr(strlen(kDevDir), device_id.length());
}
-std::list<float> GetFrameRateList(int fd,
- uint32_t fourcc,
- uint32_t width,
- uint32_t height) {
- std::list<float> frame_rates;
-
- v4l2_frmivalenum frame_interval = {};
- frame_interval.pixel_format = fourcc;
- frame_interval.width = width;
- frame_interval.height = height;
- for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frame_interval)) ==
- 0;
- ++frame_interval.index) {
- if (frame_interval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
- if (frame_interval.discrete.numerator != 0) {
- frame_rates.push_back(
- frame_interval.discrete.denominator /
- static_cast<float>(frame_interval.discrete.numerator));
- }
- } else if (frame_interval.type == V4L2_FRMIVAL_TYPE_CONTINUOUS ||
- frame_interval.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
- // TODO(mcasas): see http://crbug.com/249953, support these devices.
- NOTIMPLEMENTED_LOG_ONCE();
- break;
+class DevVideoFilePathsDeviceProvider
+ : public VideoCaptureDeviceFactoryLinux::DeviceProvider {
+ public:
+ void GetDeviceIds(std::vector<std::string>* target_container) override {
+ const base::FilePath path("/dev/");
+ base::FileEnumerator enumerator(path, false, base::FileEnumerator::FILES,
+ "video*");
+ while (!enumerator.Next().empty()) {
+ const base::FileEnumerator::FileInfo info = enumerator.GetInfo();
+ target_container->emplace_back(path.value() + info.GetName().value());
}
}
- // Some devices, e.g. Kinect, do not enumerate any frame rates, see
- // http://crbug.com/412284. Set their frame_rate to zero.
- if (frame_rates.empty())
- frame_rates.push_back(0);
- return frame_rates;
-}
-void GetSupportedFormatsForV4L2BufferType(
- int fd,
- VideoCaptureFormats* supported_formats) {
- v4l2_fmtdesc v4l2_format = {};
- v4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &v4l2_format)) == 0;
- ++v4l2_format.index) {
- VideoCaptureFormat supported_format;
- supported_format.pixel_format =
- VideoCaptureDeviceLinux::V4l2FourCcToChromiumPixelFormat(
- v4l2_format.pixelformat);
+ std::string GetDeviceModelId(const std::string& device_id) override {
+ const std::string file_name = ExtractFileNameFromDeviceId(device_id);
+ std::string usb_id;
+ const std::string vid_path =
+ base::StringPrintf(kVidPathTemplate, file_name.c_str());
+ if (!ReadIdFile(vid_path, &usb_id))
+ return usb_id;
- if (supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
- continue;
+ usb_id.append(":");
+ const std::string pid_path =
+ base::StringPrintf(kPidPathTemplate, file_name.c_str());
+ if (!ReadIdFile(pid_path, &usb_id))
+ usb_id.clear();
- v4l2_frmsizeenum frame_size = {};
- frame_size.pixel_format = v4l2_format.pixelformat;
- for (; HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frame_size)) == 0;
- ++frame_size.index) {
- if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- supported_format.frame_size.SetSize(frame_size.discrete.width,
- frame_size.discrete.height);
- } else if (frame_size.type == V4L2_FRMSIZE_TYPE_STEPWISE ||
- frame_size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
- // TODO(mcasas): see http://crbug.com/249953, support these devices.
- NOTIMPLEMENTED_LOG_ONCE();
- }
+ return usb_id;
+ }
- const std::list<float> frame_rates = GetFrameRateList(
- fd, v4l2_format.pixelformat, frame_size.discrete.width,
- frame_size.discrete.height);
- for (const auto& frame_rate : frame_rates) {
- supported_format.frame_rate = frame_rate;
- supported_formats->push_back(supported_format);
- DVLOG(1) << VideoCaptureFormat::ToString(supported_format);
- }
+ std::string GetDeviceDisplayName(const std::string& device_id) override {
+ const std::string file_name = ExtractFileNameFromDeviceId(device_id);
+ const std::string interface_path =
+ base::StringPrintf(kInterfacePathTemplate, file_name.c_str());
+ std::string display_name;
+ if (!base::ReadFileToStringWithMaxSize(base::FilePath(interface_path),
+ &display_name,
+ kMaxInterfaceNameSize)) {
+ return std::string();
}
+ return display_name;
}
-}
-std::string ExtractFileNameFromDeviceId(const std::string& device_id) {
- // |unique_id| is of the form "/dev/video2". |file_name| is "video2".
- const char kDevDir[] = "/dev/";
- DCHECK(base::StartsWith(device_id, kDevDir, base::CompareCase::SENSITIVE));
- return device_id.substr(strlen(kDevDir), device_id.length());
-}
-
-std::string GetDeviceModelId(const std::string& device_id) {
- const std::string file_name = ExtractFileNameFromDeviceId(device_id);
- std::string usb_id;
- const std::string vid_path =
- base::StringPrintf(kVidPathTemplate, file_name.c_str());
- if (!ReadIdFile(vid_path, &usb_id))
- return usb_id;
-
- usb_id.append(":");
- const std::string pid_path =
- base::StringPrintf(kPidPathTemplate, file_name.c_str());
- if (!ReadIdFile(pid_path, &usb_id))
- usb_id.clear();
-
- return usb_id;
-}
+ VideoFacingMode GetCameraFacing(const std::string& device_id,
+ const std::string& model_id) override {
+#if defined(OS_CHROMEOS)
+ return GetCameraConfig()->GetCameraFacing(device_id, model_id);
+#else
+ NOTREACHED();
+ return MEDIA_VIDEO_FACING_NONE;
+#endif
+ }
-std::string GetDeviceDisplayName(const std::string& device_id) {
- const std::string file_name = ExtractFileNameFromDeviceId(device_id);
- const std::string interface_path =
- base::StringPrintf(kInterfacePathTemplate, file_name.c_str());
- std::string display_name;
- if (!base::ReadFileToStringWithMaxSize(base::FilePath(interface_path),
- &display_name,
- kMaxInterfaceNameSize)) {
- return std::string();
+ int GetOrientation(const std::string& device_id,
+ const std::string& model_id) override {
+#if defined(OS_CHROMEOS)
+ return GetCameraConfig()->GetOrientation(device_id, model_id);
+#else
+ NOTREACHED();
+ return 0;
+#endif
}
- return display_name;
-}
+};
} // namespace
VideoCaptureDeviceFactoryLinux::VideoCaptureDeviceFactoryLinux(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner)
- : ui_task_runner_(ui_task_runner) {
-}
+ : v4l2_(new V4L2CaptureDeviceImpl()),
+ device_provider_(new DevVideoFilePathsDeviceProvider()),
+ ui_task_runner_(ui_task_runner) {}
VideoCaptureDeviceFactoryLinux::~VideoCaptureDeviceFactoryLinux() = default;
+void VideoCaptureDeviceFactoryLinux::SetV4L2EnvironmentForTesting(
+ scoped_refptr<V4L2CaptureDevice> v4l2,
+ std::unique_ptr<VideoCaptureDeviceFactoryLinux::DeviceProvider>
+ device_provider) {
+ v4l2_ = std::move(v4l2);
+ device_provider_ = std::move(device_provider);
+}
+
std::unique_ptr<VideoCaptureDevice>
VideoCaptureDeviceFactoryLinux::CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK(thread_checker_.CalledOnValidThread());
#if defined(OS_CHROMEOS)
- VideoCaptureDeviceChromeOS* self =
- new VideoCaptureDeviceChromeOS(ui_task_runner_, device_descriptor);
+ ChromeOSDeviceCameraConfig camera_config(
+ device_provider_->GetCameraFacing(device_descriptor.device_id,
+ device_descriptor.model_id),
+ device_provider_->GetOrientation(device_descriptor.device_id,
+ device_descriptor.model_id));
+ VideoCaptureDeviceChromeOS* self = new VideoCaptureDeviceChromeOS(
+ camera_config, ui_task_runner_, v4l2_.get(), device_descriptor);
#else
VideoCaptureDeviceLinux* self =
- new VideoCaptureDeviceLinux(device_descriptor);
+ new VideoCaptureDeviceLinux(v4l2_.get(), device_descriptor);
#endif
if (!self)
return std::unique_ptr<VideoCaptureDevice>();
@@ -227,38 +189,36 @@ void VideoCaptureDeviceFactoryLinux::GetDeviceDescriptors(
VideoCaptureDeviceDescriptors* device_descriptors) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(device_descriptors->empty());
- const base::FilePath path("/dev/");
- base::FileEnumerator enumerator(path, false, base::FileEnumerator::FILES,
- "video*");
-
- while (!enumerator.Next().empty()) {
- const base::FileEnumerator::FileInfo info = enumerator.GetInfo();
- const std::string unique_id = path.value() + info.GetName().value();
- const base::ScopedFD fd(HANDLE_EINTR(open(unique_id.c_str(), O_RDONLY)));
+ std::vector<std::string> filepaths;
+ device_provider_->GetDeviceIds(&filepaths);
+ for (auto& unique_id : filepaths) {
+ const base::ScopedFD fd(
+ HANDLE_EINTR(v4l2_->open(unique_id.c_str(), O_RDONLY)));
if (!fd.is_valid()) {
- DLOG(ERROR) << "Couldn't open " << info.GetName().value();
+ DLOG(ERROR) << "Couldn't open " << unique_id;
continue;
}
- // Test if this is a V4L2 capture device and if it has at least one
- // supported capture format. Devices that have capture and output
+ // Test if this is a V4L2CaptureDevice capture device and if it has at least
+ // one supported capture format. Devices that have capture and output
// capabilities at the same time are memory-to-memory and are skipped, see
// http://crbug.com/139356.
v4l2_capability cap;
- if ((HANDLE_EINTR(ioctl(fd.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
+ if ((HANDLE_EINTR(v4l2_->ioctl(fd.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
!(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) &&
HasUsableFormats(fd.get(), cap.capabilities)) {
- const std::string model_id = GetDeviceModelId(unique_id);
- std::string display_name = GetDeviceDisplayName(unique_id);
+ const std::string model_id =
+ device_provider_->GetDeviceModelId(unique_id);
+ std::string display_name =
+ device_provider_->GetDeviceDisplayName(unique_id);
if (display_name.empty())
display_name = reinterpret_cast<char*>(cap.card);
#if defined(OS_CHROMEOS)
- static CameraConfigChromeOS* config = new CameraConfigChromeOS();
device_descriptors->emplace_back(
display_name, unique_id, model_id,
VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE,
VideoCaptureTransportType::OTHER_TRANSPORT,
- config->GetCameraFacing(unique_id, model_id));
+ device_provider_->GetCameraFacing(unique_id, model_id));
#else
device_descriptors->emplace_back(
display_name, unique_id, model_id,
@@ -278,7 +238,8 @@ void VideoCaptureDeviceFactoryLinux::GetSupportedFormats(
DCHECK(thread_checker_.CalledOnValidThread());
if (device.device_id.empty())
return;
- base::ScopedFD fd(HANDLE_EINTR(open(device.device_id.c_str(), O_RDONLY)));
+ base::ScopedFD fd(
+ HANDLE_EINTR(v4l2_->open(device.device_id.c_str(), O_RDONLY)));
if (!fd.is_valid()) // Failed to open this device.
return;
supported_formats->clear();
@@ -287,16 +248,100 @@ void VideoCaptureDeviceFactoryLinux::GetSupportedFormats(
GetSupportedFormatsForV4L2BufferType(fd.get(), supported_formats);
}
-#if !defined(OS_CHROMEOS)
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- return new VideoCaptureDeviceFactoryLinux(ui_task_runner);
+bool VideoCaptureDeviceFactoryLinux::HasUsableFormats(int fd,
+ uint32_t capabilities) {
+ if (!(capabilities & V4L2_CAP_VIDEO_CAPTURE))
+ return false;
+
+ const std::list<uint32_t>& usable_fourccs =
+ VideoCaptureDeviceLinux::GetListOfUsableFourCCs(false);
+ v4l2_fmtdesc fmtdesc = {};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (; HANDLE_EINTR(v4l2_->ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0;
+ ++fmtdesc.index) {
+ if (std::find(usable_fourccs.begin(), usable_fourccs.end(),
+ fmtdesc.pixelformat) != usable_fourccs.end()) {
+ return true;
+ }
+ }
+
+ DLOG(ERROR) << "No usable formats found";
+ return false;
+}
+
+std::list<float> VideoCaptureDeviceFactoryLinux::GetFrameRateList(
+ int fd,
+ uint32_t fourcc,
+ uint32_t width,
+ uint32_t height) {
+ std::list<float> frame_rates;
+
+ v4l2_frmivalenum frame_interval = {};
+ frame_interval.pixel_format = fourcc;
+ frame_interval.width = width;
+ frame_interval.height = height;
+ for (; HANDLE_EINTR(v4l2_->ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS,
+ &frame_interval)) == 0;
+ ++frame_interval.index) {
+ if (frame_interval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ if (frame_interval.discrete.numerator != 0) {
+ frame_rates.push_back(
+ frame_interval.discrete.denominator /
+ static_cast<float>(frame_interval.discrete.numerator));
+ }
+ } else if (frame_interval.type == V4L2_FRMIVAL_TYPE_CONTINUOUS ||
+ frame_interval.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ // TODO(mcasas): see http://crbug.com/249953, support these devices.
+ NOTIMPLEMENTED_LOG_ONCE();
+ break;
+ }
+ }
+ // Some devices, e.g. Kinect, do not enumerate any frame rates, see
+ // http://crbug.com/412284. Set their frame_rate to zero.
+ if (frame_rates.empty())
+ frame_rates.push_back(0);
+ return frame_rates;
+}
+
+void VideoCaptureDeviceFactoryLinux::GetSupportedFormatsForV4L2BufferType(
+ int fd,
+ VideoCaptureFormats* supported_formats) {
+ v4l2_fmtdesc v4l2_format = {};
+ v4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ for (; HANDLE_EINTR(v4l2_->ioctl(fd, VIDIOC_ENUM_FMT, &v4l2_format)) == 0;
+ ++v4l2_format.index) {
+ VideoCaptureFormat supported_format;
+ supported_format.pixel_format =
+ VideoCaptureDeviceLinux::V4l2FourCcToChromiumPixelFormat(
+ v4l2_format.pixelformat);
+
+ if (supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
+ continue;
+
+ v4l2_frmsizeenum frame_size = {};
+ frame_size.pixel_format = v4l2_format.pixelformat;
+ for (; HANDLE_EINTR(
+ v4l2_->ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frame_size)) == 0;
+ ++frame_size.index) {
+ if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ supported_format.frame_size.SetSize(frame_size.discrete.width,
+ frame_size.discrete.height);
+ } else if (frame_size.type == V4L2_FRMSIZE_TYPE_STEPWISE ||
+ frame_size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ // TODO(mcasas): see http://crbug.com/249953, support these devices.
+ NOTIMPLEMENTED_LOG_ONCE();
+ }
+
+ const std::list<float> frame_rates = GetFrameRateList(
+ fd, v4l2_format.pixelformat, frame_size.discrete.width,
+ frame_size.discrete.height);
+ for (const auto& frame_rate : frame_rates) {
+ supported_format.frame_rate = frame_rate;
+ supported_formats->push_back(supported_format);
+ DVLOG(1) << VideoCaptureFormat::ToString(supported_format);
+ }
+ }
+ }
}
-#endif
} // namespace media
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
index 50e8db4db88..d3f06f9168e 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
+#include "media/capture/video/linux/v4l2_capture_device.h"
#include "media/capture/video_capture_types.h"
namespace media {
@@ -20,10 +21,36 @@ namespace media {
class CAPTURE_EXPORT VideoCaptureDeviceFactoryLinux
: public VideoCaptureDeviceFactory {
public:
+ class CAPTURE_EXPORT DeviceProvider {
+ public:
+ virtual ~DeviceProvider() {}
+ virtual void GetDeviceIds(std::vector<std::string>* target_container) = 0;
+ virtual std::string GetDeviceModelId(const std::string& device_id) = 0;
+ virtual std::string GetDeviceDisplayName(const std::string& device_id) = 0;
+ virtual VideoFacingMode GetCameraFacing(const std::string& device_id,
+ const std::string& model_id) = 0;
+ // Get the orientation of the camera. The value is the angle that the camera
+ // image needs to be rotated clockwise so it shows correctly on the display
+ // in its natural orientation. It should be 0, 90, 180, or 270.
+ //
+ // For example, suppose a device has a naturally tall screen. The
+ // back-facing camera sensor is mounted in landscape. You are looking at the
+ // screen. If the top side of the camera sensor is aligned with the right
+ // edge of the screen in natural orientation, the value should be 90. If the
+ // top side of a front-facing camera sensor is aligned with the right of the
+ // screen, the value should be 270.
+ virtual int GetOrientation(const std::string& device_id,
+ const std::string& model_id) = 0;
+ };
+
explicit VideoCaptureDeviceFactoryLinux(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
~VideoCaptureDeviceFactoryLinux() override;
+ void SetV4L2EnvironmentForTesting(
+ scoped_refptr<V4L2CaptureDevice> v4l2,
+ std::unique_ptr<DeviceProvider> device_provider);
+
std::unique_ptr<VideoCaptureDevice> CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) override;
void GetDeviceDescriptors(
@@ -33,6 +60,17 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryLinux
VideoCaptureFormats* supported_formats) override;
private:
+ bool HasUsableFormats(int fd, uint32_t capabilities);
+ std::list<float> GetFrameRateList(int fd,
+ uint32_t fourcc,
+ uint32_t width,
+ uint32_t height);
+ void GetSupportedFormatsForV4L2BufferType(
+ int fd,
+ VideoCaptureFormats* supported_formats);
+
+ scoped_refptr<V4L2CaptureDevice> v4l2_;
+ std::unique_ptr<DeviceProvider> device_provider_;
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryLinux);
};
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
new file mode 100644
index 00000000000..aa25be89fb6
--- /dev/null
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
@@ -0,0 +1,107 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/linux/video_capture_device_factory_linux.h"
+#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
+#include "media/capture/video/linux/fake_v4l2_impl.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+
+namespace media {
+
+class DescriptorDeviceProvider
+ : public VideoCaptureDeviceFactoryLinux::DeviceProvider {
+ public:
+ void AddDevice(const VideoCaptureDeviceDescriptor& descriptor) {
+ descriptors_.emplace_back(descriptor);
+ }
+
+ void GetDeviceIds(std::vector<std::string>* target_container) override {
+ for (const auto& entry : descriptors_) {
+ target_container->emplace_back(entry.device_id);
+ }
+ }
+
+ std::string GetDeviceModelId(const std::string& device_id) override {
+ auto iter =
+ std::find_if(descriptors_.begin(), descriptors_.end(),
+ [&device_id](const VideoCaptureDeviceDescriptor& val) {
+ return val.device_id == device_id;
+ });
+ if (iter == descriptors_.end())
+ CHECK(false) << "Unknown device_id " << device_id;
+
+ return iter->model_id;
+ }
+
+ std::string GetDeviceDisplayName(const std::string& device_id) override {
+ auto iter =
+ std::find_if(descriptors_.begin(), descriptors_.end(),
+ [&device_id](const VideoCaptureDeviceDescriptor& val) {
+ return val.device_id == device_id;
+ });
+ if (iter == descriptors_.end())
+ CHECK(false) << "Unknown device_id " << device_id;
+
+ return iter->display_name();
+ }
+
+ VideoFacingMode GetCameraFacing(const std::string& device_id,
+ const std::string& model_id) override {
+ return MEDIA_VIDEO_FACING_NONE;
+ }
+
+ int GetOrientation(const std::string& device_id,
+ const std::string& model_id) override {
+ return 0;
+ }
+
+ private:
+ std::vector<VideoCaptureDeviceDescriptor> descriptors_;
+};
+
+class VideoCaptureDeviceFactoryLinuxTest : public ::testing::Test {
+ public:
+ VideoCaptureDeviceFactoryLinuxTest() {}
+ ~VideoCaptureDeviceFactoryLinuxTest() override = default;
+
+ void SetUp() override {
+ factory_ = std::make_unique<VideoCaptureDeviceFactoryLinux>(
+ base::ThreadTaskRunnerHandle::Get());
+ scoped_refptr<FakeV4L2Impl> fake_v4l2(new FakeV4L2Impl());
+ fake_v4l2_ = fake_v4l2.get();
+ auto fake_device_provider = std::make_unique<DescriptorDeviceProvider>();
+ fake_device_provider_ = fake_device_provider.get();
+ factory_->SetV4L2EnvironmentForTesting(std::move(fake_v4l2),
+ std::move(fake_device_provider));
+ }
+
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+ FakeV4L2Impl* fake_v4l2_;
+ DescriptorDeviceProvider* fake_device_provider_;
+ std::unique_ptr<VideoCaptureDeviceFactoryLinux> factory_;
+};
+
+TEST_F(VideoCaptureDeviceFactoryLinuxTest, EnumerateSingleFakeV4L2Device) {
+ // Setup
+ const std::string stub_display_name = "Fake Device 0";
+ const std::string stub_device_id = "/dev/video0";
+ VideoCaptureDeviceDescriptor descriptor(stub_display_name, stub_device_id);
+ fake_device_provider_->AddDevice(descriptor);
+ fake_v4l2_->AddDevice(stub_device_id, FakeV4L2DeviceConfig(descriptor));
+
+ // Exercise
+ VideoCaptureDeviceDescriptors descriptors;
+ factory_->GetDeviceDescriptors(&descriptors);
+
+ // Verification
+ ASSERT_EQ(1u, descriptors.size());
+ ASSERT_EQ(stub_device_id, descriptors[0].device_id);
+ ASSERT_EQ(stub_display_name, descriptors[0].display_name());
+}
+
+}; // namespace media
diff --git a/chromium/media/capture/video/linux/video_capture_device_linux.cc b/chromium/media/capture/video/linux/video_capture_device_linux.cc
index 594596a07de..808b74f418d 100644
--- a/chromium/media/capture/video/linux/video_capture_device_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_linux.cc
@@ -37,8 +37,10 @@ std::list<uint32_t> VideoCaptureDeviceLinux::GetListOfUsableFourCCs(
}
VideoCaptureDeviceLinux::VideoCaptureDeviceLinux(
+ scoped_refptr<V4L2CaptureDevice> v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor)
: device_descriptor_(device_descriptor),
+ v4l2_(std::move(v4l2)),
v4l2_thread_("V4L2CaptureThread") {}
VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() {
@@ -59,7 +61,8 @@ void VideoCaptureDeviceLinux::AllocateAndStart(
const int line_frequency =
TranslatePowerLineFrequencyToV4L2(GetPowerLineFrequency(params));
capture_impl_ = std::make_unique<V4L2CaptureDelegate>(
- device_descriptor_, v4l2_thread_.task_runner(), line_frequency);
+ v4l2_.get(), device_descriptor_, v4l2_thread_.task_runner(),
+ line_frequency);
if (!capture_impl_) {
client->OnError(FROM_HERE, "Failed to create VideoCaptureDelegate");
return;
diff --git a/chromium/media/capture/video/linux/video_capture_device_linux.h b/chromium/media/capture/video/linux/video_capture_device_linux.h
index cd34355e84b..e94cc6ac3e1 100644
--- a/chromium/media/capture/video/linux/video_capture_device_linux.h
+++ b/chromium/media/capture/video/linux/video_capture_device_linux.h
@@ -18,6 +18,7 @@
#include "base/files/scoped_file.h"
#include "base/macros.h"
#include "base/threading/thread.h"
+#include "media/capture/video/linux/v4l2_capture_device_impl.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h"
@@ -32,6 +33,7 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
static std::list<uint32_t> GetListOfUsableFourCCs(bool favour_mjpeg);
explicit VideoCaptureDeviceLinux(
+ scoped_refptr<V4L2CaptureDevice> v4l2,
const VideoCaptureDeviceDescriptor& device_descriptor);
~VideoCaptureDeviceLinux() override;
@@ -52,6 +54,8 @@ class VideoCaptureDeviceLinux : public VideoCaptureDevice {
private:
static int TranslatePowerLineFrequencyToV4L2(PowerLineFrequency frequency);
+ const scoped_refptr<V4L2CaptureDevice> v4l2_;
+
// Internal delegate doing the actual capture setting, buffer allocation and
// circulation with the V4L2 API. Created in the thread where
// VideoCaptureDeviceLinux lives but otherwise operating and deleted on
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
index d240605877f..48d3552524d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
@@ -120,14 +120,4 @@ void VideoCaptureDeviceFactoryMac::GetSupportedFormats(
}
}
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- return new VideoCaptureDeviceFactoryMac();
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
index 69629786d89..9b8befe84bf 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -93,7 +93,9 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
gfx::GpuMemoryBufferId GetId() const override { return handle_.id; }
- gfx::GpuMemoryBufferHandle GetHandle() const override { return handle_; }
+ gfx::GpuMemoryBufferHandle GetHandle() const override {
+ return gfx::CloneHandleForIPC(handle_);
+ }
ClientBuffer AsClientBuffer() override {
NOTREACHED();
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.cc b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
index 7cd4a05e580..e375a183c12 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
@@ -39,4 +39,8 @@ SharedMemoryBufferTracker::GetNonOwnedSharedMemoryHandleForLegacyIPC() {
return provider_.GetNonOwnedSharedMemoryHandleForLegacyIPC();
}
+uint32_t SharedMemoryBufferTracker::GetMemorySizeInBytes() {
+ return provider_.GetMemorySizeInBytes();
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.h b/chromium/media/capture/video/shared_memory_buffer_tracker.h
index 2c92ca017e2..92abef763c9 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.h
@@ -27,6 +27,7 @@ class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
mojo::ScopedSharedBufferHandle GetHandleForTransit(bool read_only) override;
base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC() override;
+ uint32_t GetMemorySizeInBytes() override;
private:
SharedMemoryHandleProvider provider_;
diff --git a/chromium/media/capture/video/shared_memory_handle_provider.cc b/chromium/media/capture/video/shared_memory_handle_provider.cc
index 72a909337e6..2e2e7e78603 100644
--- a/chromium/media/capture/video/shared_memory_handle_provider.cc
+++ b/chromium/media/capture/video/shared_memory_handle_provider.cc
@@ -63,6 +63,25 @@ bool SharedMemoryHandleProvider::InitFromMojoHandle(
return true;
}
+#if defined(OS_LINUX)
+bool SharedMemoryHandleProvider::InitAsReadOnlyFromRawFileDescriptor(
+ mojo::ScopedHandle fd_handle,
+ uint32_t memory_size_in_bytes) {
+ base::PlatformFile platform_file;
+ const MojoResult result =
+ mojo::UnwrapPlatformFile(std::move(fd_handle), &platform_file);
+ if (result != MOJO_RESULT_OK)
+ return false;
+ base::UnguessableToken guid = base::UnguessableToken::Create();
+ base::SharedMemoryHandle memory_handle(
+ base::FileDescriptor(platform_file, true), 0u, guid);
+ mapped_size_ = memory_size_in_bytes;
+ read_only_flag_ = true;
+ shared_memory_.emplace(memory_handle, read_only_flag_);
+ return true;
+}
+#endif // defined(OS_LINUX)
+
mojo::ScopedSharedBufferHandle
SharedMemoryHandleProvider::GetHandleForInterProcessTransit(bool read_only) {
if (read_only_flag_ && !read_only) {
@@ -88,6 +107,10 @@ SharedMemoryHandleProvider::GetNonOwnedSharedMemoryHandleForLegacyIPC() {
return shared_memory_->handle();
}
+uint32_t SharedMemoryHandleProvider::GetMemorySizeInBytes() {
+ return static_cast<uint32_t>(mapped_size_);
+}
+
std::unique_ptr<VideoCaptureBufferHandle>
SharedMemoryHandleProvider::GetHandleForInProcessAccess() {
{
diff --git a/chromium/media/capture/video/shared_memory_handle_provider.h b/chromium/media/capture/video/shared_memory_handle_provider.h
index 8a14f3a4d35..7cd578bcba6 100644
--- a/chromium/media/capture/video/shared_memory_handle_provider.h
+++ b/chromium/media/capture/video/shared_memory_handle_provider.h
@@ -10,6 +10,7 @@
#include "base/logging.h"
#include "base/memory/shared_memory.h"
#include "base/optional.h"
+#include "build/build_config.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_buffer_handle.h"
#include "media/capture/video/video_capture_device.h"
@@ -35,6 +36,15 @@ class CAPTURE_EXPORT SharedMemoryHandleProvider
// if the operation failed.
bool InitFromMojoHandle(mojo::ScopedSharedBufferHandle buffer_handle);
+// This requires platforms where base::SharedMemoryHandle is backed by a
+// file descriptor.
+#if defined(OS_LINUX)
+ bool InitAsReadOnlyFromRawFileDescriptor(mojo::ScopedHandle fd_handle,
+ uint32_t memory_size_in_bytes);
+#endif // defined(OS_LINUX)
+
+ uint32_t GetMemorySizeInBytes();
+
// Implementation of Buffer::HandleProvider:
mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
bool read_only) override;
diff --git a/chromium/media/capture/video/video_capture_buffer_pool.h b/chromium/media/capture/video/video_capture_buffer_pool.h
index 1760aea78a4..1c4aa541e93 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool.h
@@ -7,6 +7,7 @@
#include "base/memory/ref_counted.h"
#include "media/capture/capture_export.h"
+#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video_capture_types.h"
#include "mojo/public/cpp/system/buffer.h"
#include "ui/gfx/geometry/size.h"
@@ -50,6 +51,9 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
virtual base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC(
int buffer_id) = 0;
+ virtual mojom::SharedMemoryViaRawFileDescriptorPtr
+ CreateSharedMemoryViaRawFileDescriptorStruct(int buffer_id) = 0;
+
// Try and obtain a read/write access to the buffer.
virtual std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess(
int buffer_id) = 0;
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index 477f5697029..9359b38be22 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -11,6 +11,7 @@
#include "build/build_config.h"
#include "media/capture/video/video_capture_buffer_handle.h"
#include "media/capture/video/video_capture_buffer_tracker.h"
+#include "mojo/public/cpp/system/platform_handle.h"
#include "ui/gfx/buffer_format_util.h"
namespace media {
@@ -53,6 +54,33 @@ VideoCaptureBufferPoolImpl::GetNonOwnedSharedMemoryHandleForLegacyIPC(
return tracker->GetNonOwnedSharedMemoryHandleForLegacyIPC();
}
+mojom::SharedMemoryViaRawFileDescriptorPtr
+VideoCaptureBufferPoolImpl::CreateSharedMemoryViaRawFileDescriptorStruct(
+ int buffer_id) {
+// This requires platforms where base::SharedMemoryHandle is backed by a
+// file descriptor.
+#if defined(OS_LINUX)
+ base::AutoLock lock(lock_);
+
+ VideoCaptureBufferTracker* tracker = GetTracker(buffer_id);
+ if (!tracker) {
+ NOTREACHED() << "Invalid buffer_id.";
+ return 0u;
+ }
+
+ auto result = mojom::SharedMemoryViaRawFileDescriptor::New();
+ result->file_descriptor_handle = mojo::WrapPlatformFile(
+ base::SharedMemory::DuplicateHandle(
+ tracker->GetNonOwnedSharedMemoryHandleForLegacyIPC())
+ .GetHandle());
+ result->shared_memory_size_in_bytes = tracker->GetMemorySizeInBytes();
+ return result;
+#else
+ NOTREACHED();
+ return mojom::SharedMemoryViaRawFileDescriptorPtr();
+#endif
+}
+
std::unique_ptr<VideoCaptureBufferHandle>
VideoCaptureBufferPoolImpl::GetHandleForInProcessAccess(int buffer_id) {
base::AutoLock lock(lock_);
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.h b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
index 34b585f0885..2cde52214b3 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
@@ -40,6 +40,8 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
bool read_only) override;
base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC(
int buffer_id) override;
+ mojom::SharedMemoryViaRawFileDescriptorPtr
+ CreateSharedMemoryViaRawFileDescriptorStruct(int buffer_id) override;
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess(
int buffer_id) override;
int ReserveForProducer(const gfx::Size& dimensions,
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker.h b/chromium/media/capture/video/video_capture_buffer_tracker.h
index 040ff5368bb..a4f7dff560c 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker.h
@@ -44,6 +44,7 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
bool read_only) = 0;
virtual base::SharedMemoryHandle
GetNonOwnedSharedMemoryHandleForLegacyIPC() = 0;
+ virtual uint32_t GetMemorySizeInBytes() = 0;
private:
// |dimensions_| may change as a VideoCaptureBufferTracker is re-used, but
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index 1a1bc937b31..64dd779fc2f 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -64,7 +64,8 @@ void GetI420BufferAccess(
*y_plane_stride = dimensions.width();
*uv_plane_stride = *y_plane_stride / 2;
}
-}
+
+} // anonymous namespace
namespace media {
@@ -96,12 +97,14 @@ class BufferPoolBufferHandleProvider
};
VideoCaptureDeviceClient::VideoCaptureDeviceClient(
+ VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool,
- const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory)
- : receiver_(std::move(receiver)),
- jpeg_decoder_factory_callback_(jpeg_decoder_factory),
- external_jpeg_decoder_initialized_(false),
+ VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback)
+ : target_buffer_type_(target_buffer_type),
+ receiver_(std::move(receiver)),
+ optional_jpeg_decoder_factory_callback_(
+ std::move(optional_jpeg_decoder_factory_callback)),
buffer_pool_(std::move(buffer_pool)),
last_captured_pixel_format_(PIXEL_FORMAT_UNKNOWN) {
on_started_using_gpu_cb_ =
@@ -138,6 +141,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id) {
+ DFAKE_SCOPED_RECURSIVE_LOCK(call_from_producer_);
TRACE_EVENT0("media", "VideoCaptureDeviceClient::OnIncomingCapturedData");
if (last_captured_pixel_format_ != format.pixel_format) {
@@ -145,11 +149,11 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
last_captured_pixel_format_ = format.pixel_format;
if (format.pixel_format == PIXEL_FORMAT_MJPEG &&
- !external_jpeg_decoder_initialized_) {
- external_jpeg_decoder_initialized_ = true;
- external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run();
- if (external_jpeg_decoder_)
- external_jpeg_decoder_->Initialize();
+ optional_jpeg_decoder_factory_callback_) {
+ external_jpeg_decoder_ =
+ std::move(optional_jpeg_decoder_factory_callback_).Run();
+ DCHECK(external_jpeg_decoder_);
+ external_jpeg_decoder_->Initialize();
}
}
@@ -400,9 +404,21 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
if (!base::ContainsValue(buffer_ids_known_by_receiver_, buffer_id)) {
media::mojom::VideoBufferHandlePtr buffer_handle =
media::mojom::VideoBufferHandle::New();
- buffer_handle->set_shared_buffer_handle(
- buffer_pool_->GetHandleForInterProcessTransit(buffer_id,
- true /*read_only*/));
+ switch (target_buffer_type_) {
+ case VideoCaptureBufferType::kSharedMemory:
+ buffer_handle->set_shared_buffer_handle(
+ buffer_pool_->GetHandleForInterProcessTransit(buffer_id,
+ true /*read_only*/));
+ break;
+ case VideoCaptureBufferType::kSharedMemoryViaRawFileDescriptor:
+ buffer_handle->set_shared_memory_via_raw_file_descriptor(
+ buffer_pool_->CreateSharedMemoryViaRawFileDescriptorStruct(
+ buffer_id));
+ break;
+ case VideoCaptureBufferType::kMailboxHolder:
+ NOTREACHED();
+ break;
+ }
receiver_->OnNewBuffer(buffer_id, std::move(buffer_handle));
buffer_ids_known_by_receiver_.push_back(buffer_id);
}
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 7708de91fc7..01dacc7376c 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -15,6 +15,7 @@
#include "base/memory/ref_counted.h"
#include "base/threading/thread_collision_warner.h"
#include "media/capture/capture_export.h"
+#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_device.h"
namespace media {
@@ -23,11 +24,15 @@ class VideoFrameReceiver;
class VideoCaptureJpegDecoder;
using VideoCaptureJpegDecoderFactoryCB =
- base::Callback<std::unique_ptr<VideoCaptureJpegDecoder>()>;
+ base::OnceCallback<std::unique_ptr<VideoCaptureJpegDecoder>()>;
// Implementation of VideoCaptureDevice::Client that uses a buffer pool
// to provide buffers and converts incoming data to the I420 format for
-// consumption by a given VideoFrameReceiver.
+// consumption by a given VideoFrameReceiver. If
+// |optional_jpeg_decoder_factory_callback| is provided, the
+// VideoCaptureDeviceClient will attempt to use it for decoding of MJPEG frames.
+// Otherwise, it will use libyuv to perform MJPEG to I420 conversion in
+// software.
//
// Methods of this class may be called from any thread, and in practice will
// often be called on some auxiliary thread depending on the platform and the
@@ -39,9 +44,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
: public VideoCaptureDevice::Client {
public:
VideoCaptureDeviceClient(
+ VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool,
- const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory);
+ VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback);
~VideoCaptureDeviceClient() override;
static Buffer MakeBufferStruct(
@@ -95,15 +101,14 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
base::TimeDelta timestamp,
int frame_feedback_id);
+ const VideoCaptureBufferType target_buffer_type_;
+
// The receiver to which we post events.
const std::unique_ptr<VideoFrameReceiver> receiver_;
std::vector<int> buffer_ids_known_by_receiver_;
- const VideoCaptureJpegDecoderFactoryCB jpeg_decoder_factory_callback_;
+ VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback_;
std::unique_ptr<VideoCaptureJpegDecoder> external_jpeg_decoder_;
-
- // Whether |external_jpeg_decoder_| has been initialized.
- bool external_jpeg_decoder_initialized_;
base::OnceClosure on_started_using_gpu_cb_;
// The pool of shared-memory buffers used for capturing.
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index a3467e7e17a..56ada337446 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -54,8 +54,8 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
gpu_memory_buffer_manager_ =
std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
- std::move(controller), buffer_pool,
- base::Bind(&ReturnNullPtrAsJpecDecoder));
+ VideoCaptureBufferType::kSharedMemory, std::move(controller),
+ buffer_pool, base::BindRepeating(&ReturnNullPtrAsJpecDecoder));
}
~VideoCaptureDeviceClientTest() override = default;
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.cc b/chromium/media/capture/video/video_capture_device_descriptor.cc
index 3a718ed8c6c..be6c87ec4ce 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.cc
+++ b/chromium/media/capture/video/video_capture_device_descriptor.cc
@@ -61,10 +61,10 @@ bool VideoCaptureDeviceDescriptor::operator<(
"FACING_ENVIRONMENT has a wrong value");
static_assert(kFacingMapping[MEDIA_VIDEO_FACING_USER] == 2,
"FACING_USER has a wrong value");
- if (kFacingMapping[facing] > kFacingMapping[other.facing])
- return true;
- if (device_id < other.device_id)
- return true;
+ if (kFacingMapping[facing] != kFacingMapping[other.facing])
+ return kFacingMapping[facing] > kFacingMapping[other.facing];
+ if (device_id != other.device_id)
+ return device_id < other.device_id;
return capture_api < other.capture_api;
}
@@ -90,6 +90,8 @@ const char* VideoCaptureDeviceDescriptor::GetCaptureApiTypeString() const {
return "Camera API2 Full";
case VideoCaptureApi::ANDROID_API2_LIMITED:
return "Camera API2 Limited";
+ case VideoCaptureApi::VIRTUAL_DEVICE:
+ return "Virtual Device";
case VideoCaptureApi::UNKNOWN:
return "Unknown";
}
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.h b/chromium/media/capture/video/video_capture_device_descriptor.h
index 48eed13ece0..c53274df3bf 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.h
+++ b/chromium/media/capture/video/video_capture_device_descriptor.h
@@ -27,6 +27,7 @@ enum class VideoCaptureApi {
ANDROID_API2_LEGACY,
ANDROID_API2_FULL,
ANDROID_API2_LIMITED,
+ VIRTUAL_DEVICE,
UNKNOWN
};
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index 4dd4f7bde89..4d903b6a714 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -14,61 +14,12 @@
namespace media {
-// static
-std::unique_ptr<VideoCaptureDeviceFactory>
-VideoCaptureDeviceFactory::CreateFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- const base::CommandLine* command_line =
- base::CommandLine::ForCurrentProcess();
- // Use a Fake or File Video Device Factory if the command line flags are
- // present, otherwise use the normal, platform-dependent, device factory.
- if (command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream)) {
- if (command_line->HasSwitch(switches::kUseFileForFakeVideoCapture)) {
- return std::unique_ptr<VideoCaptureDeviceFactory>(
- new FileVideoCaptureDeviceFactory());
- } else {
- std::vector<FakeVideoCaptureDeviceSettings> config;
- FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
- command_line->GetSwitchValueASCII(
- switches::kUseFakeDeviceForMediaStream),
- &config);
- auto result = std::make_unique<FakeVideoCaptureDeviceFactory>();
- result->SetToCustomDevicesConfig(config);
- return std::move(result);
- }
- } else {
- // |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
- // screen rotations.
- return std::unique_ptr<VideoCaptureDeviceFactory>(
- CreateVideoCaptureDeviceFactory(ui_task_runner, gpu_buffer_manager,
- std::move(jda_factory),
- std::move(jea_factory)));
- }
-}
-
VideoCaptureDeviceFactory::VideoCaptureDeviceFactory() {
thread_checker_.DetachFromThread();
}
VideoCaptureDeviceFactory::~VideoCaptureDeviceFactory() = default;
-#if !defined(OS_MACOSX) && !defined(OS_LINUX) && !defined(OS_ANDROID) && \
- !defined(OS_WIN)
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- NOTIMPLEMENTED();
- return NULL;
-}
-#endif
-
void VideoCaptureDeviceFactory::GetCameraLocationsAsync(
std::unique_ptr<VideoCaptureDeviceDescriptors> device_descriptors,
DeviceDescriptorsCallback result_callback) {
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 1cd174ffc75..9ec274265c1 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -34,12 +34,6 @@ using MojoJpegEncodeAcceleratorFactoryCB =
// crbug.com/665065
class CAPTURE_EXPORT VideoCaptureDeviceFactory {
public:
- static std::unique_ptr<VideoCaptureDeviceFactory> CreateFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
- MojoJpegEncodeAcceleratorFactoryCB jpeg_encoder_factory);
-
VideoCaptureDeviceFactory();
virtual ~VideoCaptureDeviceFactory();
@@ -76,12 +70,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
base::ThreadChecker thread_checker_;
private:
- static VideoCaptureDeviceFactory* CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory);
-
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactory);
};
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 21143688320..dbb4f6565f5 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -21,7 +21,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
-#include "media/capture/video/video_capture_device_factory.h"
+#include "media/capture/video/create_video_capture_device_factory.h"
#include "media/capture/video_capture_types.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -49,6 +49,7 @@
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/fake_power_manager_client.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
+#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/local_gpu_memory_buffer_manager.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
@@ -287,22 +288,23 @@ class VideoCaptureDeviceTest
video_capture_client_(new MockVideoCaptureClient(
base::Bind(&VideoCaptureDeviceTest::OnFrameCaptured,
base::Unretained(this)))),
- image_capture_client_(new MockImageCaptureClient()),
+ image_capture_client_(new MockImageCaptureClient()) {
#if defined(OS_CHROMEOS)
- local_gpu_memory_buffer_manager_(new LocalGpuMemoryBufferManager()),
- dbus_setter_(chromeos::DBusThreadManager::GetSetterForTesting()),
-#endif
- video_capture_device_factory_(VideoCaptureDeviceFactory::CreateFactory(
- base::ThreadTaskRunnerHandle::Get(),
-#if defined(OS_CHROMEOS)
- local_gpu_memory_buffer_manager_.get(),
-#else
- nullptr,
+ local_gpu_memory_buffer_manager_ =
+ std::make_unique<LocalGpuMemoryBufferManager>();
+ dbus_setter_ = chromeos::DBusThreadManager::GetSetterForTesting();
+ VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
+ local_gpu_memory_buffer_manager_.get());
+ if (!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
+ CameraHalDispatcherImpl::GetInstance()->Start(
+ base::DoNothing::Repeatedly<
+ media::mojom::JpegDecodeAcceleratorRequest>(),
+ base::DoNothing::Repeatedly<
+ media::mojom::JpegEncodeAcceleratorRequest>());
+ }
#endif
- base::BindRepeating(
- [](media::mojom::JpegDecodeAcceleratorRequest) {}),
- base::DoNothing::Repeatedly<
- media::mojom::JpegEncodeAcceleratorRequest>())) {
+ video_capture_device_factory_ =
+ CreateVideoCaptureDeviceFactory(base::ThreadTaskRunnerHandle::Get());
}
void SetUp() override {
@@ -435,12 +437,10 @@ class VideoCaptureDeviceTest
const scoped_refptr<MockImageCaptureClient> image_capture_client_;
VideoCaptureFormat last_format_;
#if defined(OS_CHROMEOS)
- const std::unique_ptr<LocalGpuMemoryBufferManager>
- local_gpu_memory_buffer_manager_;
+ std::unique_ptr<LocalGpuMemoryBufferManager> local_gpu_memory_buffer_manager_;
std::unique_ptr<chromeos::DBusThreadManagerSetter> dbus_setter_;
#endif
- const std::unique_ptr<VideoCaptureDeviceFactory>
- video_capture_device_factory_;
+ std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
};
// Cause hangs on Windows Debug. http://crbug.com/417824
diff --git a/chromium/media/capture/video/video_capture_jpeg_decoder.h b/chromium/media/capture/video/video_capture_jpeg_decoder.h
index c5b32cf4519..e2da00d54c8 100644
--- a/chromium/media/capture/video/video_capture_jpeg_decoder.h
+++ b/chromium/media/capture/video/video_capture_jpeg_decoder.h
@@ -13,6 +13,8 @@
namespace media {
+// All methods are allowed to be called from any thread, but calls must be
+// non-concurrently.
class CAPTURE_EXPORT VideoCaptureJpegDecoder {
public:
// Enumeration of decoder status. The enumeration is published for clients to
@@ -24,7 +26,7 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoder {
// decode error.
};
- using DecodeDoneCB = base::Callback<void(
+ using DecodeDoneCB = base::RepeatingCallback<void(
int buffer_id,
int frame_feedback_id,
std::unique_ptr<VideoCaptureDevice::Client::Buffer::
diff --git a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc
new file mode 100644
index 00000000000..b82bae90b11
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.cc
@@ -0,0 +1,260 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_jpeg_decoder_impl.h"
+
+#include "base/metrics/histogram_macros.h"
+#include "media/base/media_switches.h"
+
+namespace media {
+
+VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
+ MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ DecodeDoneCB decode_done_cb,
+ base::RepeatingCallback<void(const std::string&)> send_log_message_cb)
+ : jpeg_decoder_factory_(std::move(jpeg_decoder_factory)),
+ decoder_task_runner_(std::move(decoder_task_runner)),
+ decode_done_cb_(std::move(decode_done_cb)),
+ send_log_message_cb_(std::move(send_log_message_cb)),
+ has_received_decoded_frame_(false),
+ next_bitstream_buffer_id_(0),
+ in_buffer_id_(media::JpegDecodeAccelerator::kInvalidBitstreamBufferId),
+ decoder_status_(INIT_PENDING),
+ weak_ptr_factory_(this) {}
+
+VideoCaptureJpegDecoderImpl::~VideoCaptureJpegDecoderImpl() {
+ // |this| was set as |decoder_|'s client. |decoder_| has to be deleted on
+ // |decoder_task_runner_| before this destructor returns to ensure that it
+ // doesn't call back into its client.
+
+ if (!decoder_)
+ return;
+
+ if (decoder_task_runner_->RunsTasksInCurrentSequence()) {
+ decoder_.reset();
+ return;
+ }
+
+ base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ // base::Unretained is safe because |this| will be valid until |event|
+ // is signaled.
+ decoder_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoCaptureJpegDecoderImpl::DestroyDecoderOnIOThread,
+ base::Unretained(this), &event));
+ event.Wait();
+}
+
+void VideoCaptureJpegDecoderImpl::Initialize() {
+ if (!IsVideoCaptureAcceleratedJpegDecodingEnabled()) {
+ decoder_status_ = FAILED;
+ RecordInitDecodeUMA_Locked();
+ return;
+ }
+
+ decoder_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoCaptureJpegDecoderImpl::FinishInitialization,
+ weak_ptr_factory_.GetWeakPtr()));
+}
+
+VideoCaptureJpegDecoderImpl::STATUS VideoCaptureJpegDecoderImpl::GetStatus()
+ const {
+ base::AutoLock lock(lock_);
+ return decoder_status_;
+}
+
+void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
+ const uint8_t* data,
+ size_t in_buffer_size,
+ const media::VideoCaptureFormat& frame_format,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ media::VideoCaptureDevice::Client::Buffer out_buffer) {
+ DCHECK(decoder_);
+
+ TRACE_EVENT_ASYNC_BEGIN0("jpeg", "VideoCaptureJpegDecoderImpl decoding",
+ next_bitstream_buffer_id_);
+ TRACE_EVENT0("jpeg", "VideoCaptureJpegDecoderImpl::DecodeCapturedData");
+
+ // TODO(kcwu): enqueue decode requests in case decoding is not fast enough
+ // (say, if decoding time is longer than 16ms for 60fps 4k image)
+ {
+ base::AutoLock lock(lock_);
+ if (IsDecoding_Locked()) {
+ DVLOG(1) << "Drop captured frame. Previous jpeg frame is still decoding";
+ return;
+ }
+ }
+
+ // Enlarge input buffer if necessary.
+ if (!in_shared_memory_.get() ||
+ in_buffer_size > in_shared_memory_->mapped_size()) {
+ // Reserve 2x space to avoid frequent reallocations for initial frames.
+ const size_t reserved_size = 2 * in_buffer_size;
+ in_shared_memory_.reset(new base::SharedMemory);
+ if (!in_shared_memory_->CreateAndMapAnonymous(reserved_size)) {
+ base::AutoLock lock(lock_);
+ decoder_status_ = FAILED;
+ LOG(WARNING) << "CreateAndMapAnonymous failed, size=" << reserved_size;
+ return;
+ }
+ }
+ memcpy(in_shared_memory_->memory(), data, in_buffer_size);
+
+ // No need to lock for |in_buffer_id_| since IsDecoding_Locked() is false.
+ in_buffer_id_ = next_bitstream_buffer_id_;
+ media::BitstreamBuffer in_buffer(in_buffer_id_, in_shared_memory_->handle(),
+ in_buffer_size);
+ // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
+ next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
+
+ // The API of |decoder_| requires us to wrap the |out_buffer| in a VideoFrame.
+ const gfx::Size dimensions = frame_format.frame_size;
+ std::unique_ptr<media::VideoCaptureBufferHandle> out_buffer_access =
+ out_buffer.handle_provider->GetHandleForInProcessAccess();
+ base::SharedMemoryHandle out_handle =
+ out_buffer.handle_provider->GetNonOwnedSharedMemoryHandleForLegacyIPC();
+ scoped_refptr<media::VideoFrame> out_frame =
+ media::VideoFrame::WrapExternalSharedMemory(
+ media::PIXEL_FORMAT_I420, // format
+ dimensions, // coded_size
+ gfx::Rect(dimensions), // visible_rect
+ dimensions, // natural_size
+ out_buffer_access->data(), // data
+ out_buffer_access->mapped_size(), // data_size
+ out_handle, // handle
+ 0, // shared_memory_offset
+ timestamp); // timestamp
+ if (!out_frame) {
+ base::AutoLock lock(lock_);
+ decoder_status_ = FAILED;
+ LOG(ERROR) << "DecodeCapturedData: WrapExternalSharedMemory failed";
+ return;
+ }
+ // Hold onto the buffer access handle for the lifetime of the VideoFrame, to
+ // ensure the data pointers remain valid.
+ out_frame->AddDestructionObserver(base::BindOnce(
+ [](std::unique_ptr<media::VideoCaptureBufferHandle> handle) {},
+ std::move(out_buffer_access)));
+ out_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
+ frame_format.frame_rate);
+
+ out_frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
+ reference_time);
+
+ media::mojom::VideoFrameInfoPtr out_frame_info =
+ media::mojom::VideoFrameInfo::New();
+ out_frame_info->timestamp = timestamp;
+ out_frame_info->pixel_format = media::PIXEL_FORMAT_I420;
+ out_frame_info->coded_size = dimensions;
+ out_frame_info->visible_rect = gfx::Rect(dimensions);
+ out_frame_info->metadata = out_frame->metadata()->GetInternalValues().Clone();
+
+ {
+ base::AutoLock lock(lock_);
+ decode_done_closure_ = base::BindOnce(
+ decode_done_cb_, out_buffer.id, out_buffer.frame_feedback_id,
+ base::Passed(&out_buffer.access_permission),
+ base::Passed(&out_frame_info));
+ }
+
+ // base::Unretained is safe because |decoder_| is deleted on
+ // |decoder_task_runner_|.
+ decoder_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&media::JpegDecodeAccelerator::Decode,
+ base::Unretained(decoder_.get()), in_buffer,
+ std::move(out_frame)));
+}
+
+void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t bitstream_buffer_id) {
+ DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
+ TRACE_EVENT0("jpeg", "VideoCaptureJpegDecoderImpl::VideoFrameReady");
+ if (!has_received_decoded_frame_) {
+ send_log_message_cb_.Run("Received decoded frame from Gpu Jpeg decoder");
+ has_received_decoded_frame_ = true;
+ }
+ base::AutoLock lock(lock_);
+
+ if (!IsDecoding_Locked()) {
+ LOG(ERROR) << "Got decode response while not decoding";
+ return;
+ }
+
+ if (bitstream_buffer_id != in_buffer_id_) {
+ LOG(ERROR) << "Unexpected bitstream_buffer_id " << bitstream_buffer_id
+ << ", expected " << in_buffer_id_;
+ return;
+ }
+ in_buffer_id_ = media::JpegDecodeAccelerator::kInvalidBitstreamBufferId;
+
+ std::move(decode_done_closure_).Run();
+
+ TRACE_EVENT_ASYNC_END0("jpeg", "VideoCaptureJpegDecoderImpl decoding",
+ bitstream_buffer_id);
+}
+
+void VideoCaptureJpegDecoderImpl::NotifyError(
+ int32_t bitstream_buffer_id,
+ media::JpegDecodeAccelerator::Error error) {
+ DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
+ LOG(ERROR) << "Decode error, bitstream_buffer_id=" << bitstream_buffer_id
+ << ", error=" << error;
+ send_log_message_cb_.Run("Gpu Jpeg decoder failed");
+ base::AutoLock lock(lock_);
+ decode_done_closure_.Reset();
+ decoder_status_ = FAILED;
+}
+
+void VideoCaptureJpegDecoderImpl::FinishInitialization() {
+ TRACE_EVENT0("gpu", "VideoCaptureJpegDecoderImpl::FinishInitialization");
+ DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
+
+ media::mojom::JpegDecodeAcceleratorPtr remote_decoder;
+ jpeg_decoder_factory_.Run(mojo::MakeRequest(&remote_decoder));
+
+ base::AutoLock lock(lock_);
+ decoder_ = std::make_unique<media::MojoJpegDecodeAccelerator>(
+ decoder_task_runner_, remote_decoder.PassInterface());
+
+ decoder_->InitializeAsync(
+ this,
+ base::BindRepeating(&VideoCaptureJpegDecoderImpl::OnInitializationDone,
+ weak_ptr_factory_.GetWeakPtr()));
+}
+
+void VideoCaptureJpegDecoderImpl::OnInitializationDone(bool success) {
+ TRACE_EVENT0("gpu", "VideoCaptureJpegDecoderImpl::OnInitializationDone");
+ DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
+
+ base::AutoLock lock(lock_);
+ if (!success) {
+ decoder_.reset();
+ DLOG(ERROR) << "Failed to initialize JPEG decoder";
+ }
+
+ decoder_status_ = success ? INIT_PASSED : FAILED;
+ RecordInitDecodeUMA_Locked();
+}
+
+bool VideoCaptureJpegDecoderImpl::IsDecoding_Locked() const {
+ lock_.AssertAcquired();
+ return !decode_done_closure_.is_null();
+}
+
+void VideoCaptureJpegDecoderImpl::RecordInitDecodeUMA_Locked() {
+ UMA_HISTOGRAM_BOOLEAN("Media.VideoCaptureGpuJpegDecoder.InitDecodeSuccess",
+ decoder_status_ == INIT_PASSED);
+}
+
+void VideoCaptureJpegDecoderImpl::DestroyDecoderOnIOThread(
+ base::WaitableEvent* event) {
+ DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
+ decoder_.reset();
+ event->Signal();
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h
new file mode 100644
index 00000000000..a1bfcee8b75
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_jpeg_decoder_impl.h
@@ -0,0 +1,119 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_JPEG_DECODER_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_JPEG_DECODER_IMPL_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/sequence_checker.h"
+#include "gpu/config/gpu_info.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/video_capture_device_factory.h"
+#include "media/capture/video/video_capture_jpeg_decoder.h"
+#include "media/mojo/clients/mojo_jpeg_decode_accelerator.h"
+
+namespace base {
+class WaitableEvent;
+}
+
+namespace media {
+
+// Implementation of media::VideoCaptureJpegDecoder that delegates to a
+// media::mojom::JpegDecodeAccelerator. When a frame is received in
+// DecodeCapturedData(), it is copied to |in_shared_memory| for IPC transport
+// to |decoder_|. When the decoder is finished with the frame, |decode_done_cb_|
+// is invoked. Until |decode_done_cb_| is invoked, subsequent calls to
+// DecodeCapturedData() are ignored.
+// The given |decoder_task_runner| must allow blocking on |lock_|.
+class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
+ : public media::VideoCaptureJpegDecoder,
+ public media::JpegDecodeAccelerator::Client {
+ public:
+ // |decode_done_cb| is called on the IO thread when decode succeeds. This can
+ // be on any thread. |decode_done_cb| is never called after
+ // VideoCaptureGpuJpegDecoder is destroyed.
+ VideoCaptureJpegDecoderImpl(
+ MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory,
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ DecodeDoneCB decode_done_cb,
+ base::RepeatingCallback<void(const std::string&)> send_log_message_cb);
+ ~VideoCaptureJpegDecoderImpl() override;
+
+ // Implementation of VideoCaptureJpegDecoder:
+ void Initialize() override;
+ STATUS GetStatus() const override;
+ void DecodeCapturedData(
+ const uint8_t* data,
+ size_t in_buffer_size,
+ const media::VideoCaptureFormat& frame_format,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ media::VideoCaptureDevice::Client::Buffer out_buffer) override;
+
+ // JpegDecodeAccelerator::Client implementation.
+ // These will be called on IO thread.
+ void VideoFrameReady(int32_t buffer_id) override;
+ void NotifyError(int32_t buffer_id,
+ media::JpegDecodeAccelerator::Error error) override;
+
+ private:
+ void FinishInitialization();
+ void OnInitializationDone(bool success);
+
+ // Returns true if the decoding of last frame is not finished yet.
+ bool IsDecoding_Locked() const;
+
+ // Records |decoder_status_| to histogram.
+ void RecordInitDecodeUMA_Locked();
+
+ void DestroyDecoderOnIOThread(base::WaitableEvent* event);
+
+ MojoJpegDecodeAcceleratorFactoryCB jpeg_decoder_factory_;
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
+
+ // The underlying JPEG decode accelerator.
+ std::unique_ptr<media::JpegDecodeAccelerator> decoder_;
+
+ // The callback to run when decode succeeds.
+ const DecodeDoneCB decode_done_cb_;
+
+ const base::RepeatingCallback<void(const std::string&)> send_log_message_cb_;
+ bool has_received_decoded_frame_;
+
+ // Guards |decode_done_closure_| and |decoder_status_|.
+ mutable base::Lock lock_;
+
+ // The closure of |decode_done_cb_| with bound parameters.
+ base::OnceClosure decode_done_closure_;
+
+ // Next id for input BitstreamBuffer.
+ int32_t next_bitstream_buffer_id_;
+
+ // The id for current input BitstreamBuffer being decoded.
+ int32_t in_buffer_id_;
+
+ // Shared memory to store JPEG stream buffer. The input BitstreamBuffer is
+ // backed by this.
+ std::unique_ptr<base::SharedMemory> in_shared_memory_;
+
+ STATUS decoder_status_;
+
+ SEQUENCE_CHECKER(sequence_checker_);
+
+ base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoCaptureJpegDecoderImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_JPEG_DECODER_IMPL_H_
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index ef04526c06f..bad25d00bf4 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -18,6 +18,7 @@
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
+#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
#include "base/win/core_winrt_util.h"
@@ -83,6 +84,11 @@ static_assert(arraysize(kBlacklistedCameraNames) == BLACKLISTED_CAMERA_MAX + 1,
"kBlacklistedCameraNames should be same size as "
"BlacklistedCameraNames enum");
+const char* const kModelIdsBlacklistedForMediaFoundation[] = {
+ // Devices using Empia 2860 or 2820 chips, see https://crbug.com/849636.
+ "eb1a:2860", "eb1a:2820",
+};
+
const std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>
kMfAttributes[] = {{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
{
@@ -100,6 +106,11 @@ bool IsDeviceBlacklistedForQueryingDetailedFrameRates(
return display_name.find("WebcamMax") != std::string::npos;
}
+bool IsDeviceBlacklistedForMediaFoundationByModelId(
+ const std::string& model_id) {
+ return base::ContainsValue(kModelIdsBlacklistedForMediaFoundation, model_id);
+}
+
bool LoadMediaFoundationDlls() {
static const wchar_t* const kMfDLLs[] = {
L"%WINDIR%\\system32\\mf.dll", L"%WINDIR%\\system32\\mfplat.dll",
@@ -354,7 +365,7 @@ VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
direct_show_get_supported_formats_func_ =
base::BindRepeating(&GetDeviceSupportedFormatsDirectShow);
- if (!PlatformSupportsMediaFoundation()) {
+ if (use_media_foundation_ && !PlatformSupportsMediaFoundation()) {
use_media_foundation_ = false;
LogVideoCaptureWinBackendUsed(
VideoCaptureWinBackendUsed::kUsingDirectShowAsFallback);
@@ -612,6 +623,8 @@ void VideoCaptureDeviceFactoryWin::GetDeviceDescriptorsMediaFoundation(
const std::string device_id =
base::SysWideToUTF8(std::wstring(id, id_size));
const std::string model_id = GetDeviceModelId(device_id);
+ if (IsDeviceBlacklistedForMediaFoundationByModelId(model_id))
+ continue;
if (list_was_empty ||
!DescriptorsContainDeviceId(*device_descriptors, device_id)) {
device_descriptors->emplace_back(
@@ -753,14 +766,4 @@ void VideoCaptureDeviceFactoryWin::GetSupportedFormats(
GetApiSpecificSupportedFormats(device, formats);
}
-// static
-VideoCaptureDeviceFactory*
-VideoCaptureDeviceFactory::CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
- MojoJpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
- return new VideoCaptureDeviceFactoryWin();
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
index 80b5532a6a0..27dd51e4f55 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
@@ -33,8 +33,16 @@ const wchar_t* kMFDeviceName2 = L"Device 2";
const wchar_t* kMFDeviceId5 = L"\\\\?\\usb#vid_0005&pid_0005&mi_00";
const wchar_t* kMFDeviceName5 = L"Dazzle";
+const wchar_t* kMFDeviceId6 = L"\\\\?\\usb#vid_eb1a&pid_2860&mi_00";
+const wchar_t* kMFDeviceName6 = L"Empia Device";
+
void GetMFSupportedFormats(const VideoCaptureDeviceDescriptor& device,
- VideoCaptureFormats* formats) {}
+ VideoCaptureFormats* formats) {
+ if (device.device_id == base::SysWideToUTF8(kMFDeviceId6)) {
+ VideoCaptureFormat arbitrary_format;
+ formats->emplace_back(arbitrary_format);
+ }
+}
// DirectShow devices
const wchar_t* kDirectShowDeviceId0 = L"\\\\?\\usb#vid_0000&pid_0000&mi_00";
@@ -52,6 +60,9 @@ const wchar_t* kDirectShowDeviceName4 = L"Virtual Camera";
const wchar_t* kDirectShowDeviceId5 = L"\\\\?\\usb#vid_0005&pid_0005&mi_00#5";
const wchar_t* kDirectShowDeviceName5 = L"Dazzle";
+const wchar_t* kDirectShowDeviceId6 = L"\\\\?\\usb#vid_eb1a&pid_2860&mi_00";
+const wchar_t* kDirectShowDeviceName6 = L"Empia Device";
+
void GetDirectShowSupportedFormats(const VideoCaptureDeviceDescriptor& device,
VideoCaptureFormats* formats) {
if (device.device_id == base::SysWideToUTF8(kDirectShowDeviceId5)) {
@@ -426,7 +437,8 @@ HRESULT __stdcall MockMFEnumDeviceSources(IMFAttributes* attributes,
new MockMFActivate(kMFDeviceId0, kMFDeviceName0, true, false),
new MockMFActivate(kMFDeviceId1, kMFDeviceName1, true, true),
new MockMFActivate(kMFDeviceId2, kMFDeviceName2, false, true),
- new MockMFActivate(kMFDeviceId5, kMFDeviceName5, true, false)};
+ new MockMFActivate(kMFDeviceId5, kMFDeviceName5, true, false),
+ new MockMFActivate(kMFDeviceId6, kMFDeviceName6, true, false)};
// Iterate once to get the match count and check for errors.
*count = 0U;
HRESULT hr;
@@ -456,7 +468,8 @@ HRESULT EnumerateStubDirectShowDevices(IEnumMoniker** enum_moniker) {
new StubMoniker(kDirectShowDeviceId1, kDirectShowDeviceName1),
new StubMoniker(kDirectShowDeviceId3, kDirectShowDeviceName3),
new StubMoniker(kDirectShowDeviceId4, kDirectShowDeviceName4),
- new StubMoniker(kDirectShowDeviceId5, kDirectShowDeviceName5)};
+ new StubMoniker(kDirectShowDeviceId5, kDirectShowDeviceName5),
+ new StubMoniker(kDirectShowDeviceId6, kDirectShowDeviceName6)};
StubEnumMoniker* stub_enum_moniker = new StubEnumMoniker();
for (StubMoniker* moniker : monikers)
@@ -511,7 +524,7 @@ TEST_F(VideoCaptureDeviceFactoryMFWinTest, GetDeviceDescriptors) {
base::BindRepeating(&EnumerateStubDirectShowDevices));
VideoCaptureDeviceDescriptors descriptors;
factory_.GetDeviceDescriptors(&descriptors);
- EXPECT_EQ(descriptors.size(), 6U);
+ EXPECT_EQ(descriptors.size(), 7U);
for (auto it = descriptors.begin(); it != descriptors.end(); it++) {
// Verify that there are no duplicates.
EXPECT_EQ(FindDescriptorInRange(descriptors.begin(), it, it->device_id),
@@ -555,6 +568,15 @@ TEST_F(VideoCaptureDeviceFactoryMFWinTest, GetDeviceDescriptors) {
EXPECT_NE(it, descriptors.end());
EXPECT_EQ(it->capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
EXPECT_EQ(it->display_name(), base::SysWideToUTF8(kDirectShowDeviceName5));
+
+ // Devices that are listed in both MediaFoundation and DirectShow but are
+ // blacklisted for use with MediaFoundation are expected to get enumerated
+ // with VideoCaptureApi::WIN_DIRECT_SHOW.
+ it = FindDescriptorInRange(descriptors.begin(), descriptors.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId6));
+ EXPECT_NE(it, descriptors.end());
+ EXPECT_EQ(it->capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->display_name(), base::SysWideToUTF8(kDirectShowDeviceName6));
}
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_win.cc b/chromium/media/capture/video/win/video_capture_device_win.cc
index 78355961260..ae1bf4d602b 100644
--- a/chromium/media/capture/video/win/video_capture_device_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_win.cc
@@ -83,9 +83,9 @@ mojom::RangePtr RetrieveControlRangeAndCurrent(
control_range->max = max;
control_range->step = step;
if (supported_modes != nullptr) {
- if (flags && CameraControl_Flags_Auto)
+ if (flags & CameraControl_Flags_Auto)
supported_modes->push_back(mojom::MeteringMode::CONTINUOUS);
- if (flags && CameraControl_Flags_Manual)
+ if (flags & CameraControl_Flags_Manual)
supported_modes->push_back(mojom::MeteringMode::MANUAL);
}
}
@@ -95,9 +95,9 @@ mojom::RangePtr RetrieveControlRangeAndCurrent(
if (SUCCEEDED(hr)) {
control_range->current = current;
if (current_mode != nullptr) {
- if (flags && CameraControl_Flags_Auto)
+ if (flags & CameraControl_Flags_Auto)
*current_mode = mojom::MeteringMode::CONTINUOUS;
- else if (flags && CameraControl_Flags_Manual)
+ else if (flags & CameraControl_Flags_Manual)
*current_mode = mojom::MeteringMode::MANUAL;
}
}
diff --git a/chromium/media/capture/video_capture_types.cc b/chromium/media/capture/video_capture_types.cc
index b6fc66e20c3..6cd3063bfee 100644
--- a/chromium/media/capture/video_capture_types.cc
+++ b/chromium/media/capture/video_capture_types.cc
@@ -70,7 +70,8 @@ bool VideoCaptureFormat::ComparePixelFormatPreference(
}
VideoCaptureParams::VideoCaptureParams()
- : resolution_change_policy(ResolutionChangePolicy::FIXED_RESOLUTION),
+ : buffer_type(VideoCaptureBufferType::kSharedMemory),
+ resolution_change_policy(ResolutionChangePolicy::FIXED_RESOLUTION),
power_line_frequency(PowerLineFrequency::FREQUENCY_DEFAULT) {}
bool VideoCaptureParams::IsValid() const {
diff --git a/chromium/media/capture/video_capture_types.h b/chromium/media/capture/video_capture_types.h
index bb43a319454..cc7a5c42f6a 100644
--- a/chromium/media/capture/video_capture_types.h
+++ b/chromium/media/capture/video_capture_types.h
@@ -54,6 +54,12 @@ enum class PowerLineFrequency {
FREQUENCY_MAX = FREQUENCY_60HZ
};
+enum class VideoCaptureBufferType {
+ kSharedMemory,
+ kSharedMemoryViaRawFileDescriptor,
+ kMailboxHolder
+};
+
// Assert that the int:frequency mapping is correct.
static_assert(static_cast<int>(PowerLineFrequency::FREQUENCY_DEFAULT) == 0,
"static_cast<int>(FREQUENCY_DEFAULT) must equal 0.");
@@ -135,6 +141,8 @@ struct CAPTURE_EXPORT VideoCaptureParams {
// Requests a resolution and format at which the capture will occur.
VideoCaptureFormat requested_format;
+ VideoCaptureBufferType buffer_type;
+
// Policy for resolution change.
ResolutionChangePolicy resolution_change_policy;
diff --git a/chromium/media/cast/BUILD.gn b/chromium/media/cast/BUILD.gn
index 07122a4d12b..e283cb5c349 100644
--- a/chromium/media/cast/BUILD.gn
+++ b/chromium/media/cast/BUILD.gn
@@ -411,7 +411,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
":receiver",
":test_support",
"//base",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//media:test_support",
"//net",
@@ -442,7 +441,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
":sender",
":test_support",
"//base",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//media",
]
@@ -469,7 +467,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
":test_support",
"//base",
"//base/test:test_support",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//media:test_support",
]
@@ -483,7 +480,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
deps = [
":test_support",
"//base",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//media",
]
@@ -497,7 +493,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
deps = [
":test_support",
"//base",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//media",
]
@@ -511,7 +506,6 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
deps = [
":test_support",
"//base",
- "//build/config:exe_and_shlib_deps",
"//build/win:default_exe_manifest",
"//net",
]
diff --git a/chromium/media/cast/logging/encoding_event_subscriber.cc b/chromium/media/cast/logging/encoding_event_subscriber.cc
index 27aae92d4ab..57cb282cfb3 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber.cc
+++ b/chromium/media/cast/logging/encoding_event_subscriber.cc
@@ -43,7 +43,7 @@ BasePacketEvent* GetNewBasePacketEvent(AggregatedPacketEvent* event_proto,
return base;
}
-}
+} // namespace
namespace media {
namespace cast {
@@ -68,66 +68,66 @@ void EncodingEventSubscriber::OnReceiveFrameEvent(
const RtpTimeDelta relative_rtp_timestamp =
GetRelativeRtpTimestamp(frame_event.rtp_timestamp);
- uint32_t lower_32_bits = relative_rtp_timestamp.lower_32_bits();
- FrameEventMap::iterator it = frame_event_map_.find(relative_rtp_timestamp);
- linked_ptr<AggregatedFrameEvent> event_proto;
+ const uint32_t lower_32_bits = relative_rtp_timestamp.lower_32_bits();
+ AggregatedFrameEvent* event_proto_ptr = nullptr;
// Look up existing entry. If not found, create a new entry and add to map.
+ FrameEventMap::iterator it = frame_event_map_.find(relative_rtp_timestamp);
if (it == frame_event_map_.end()) {
if (!ShouldCreateNewProto(lower_32_bits))
return;
IncrementStoredProtoCount(lower_32_bits);
- event_proto.reset(new AggregatedFrameEvent);
+ auto event_proto = std::make_unique<AggregatedFrameEvent>();
event_proto->set_relative_rtp_timestamp(lower_32_bits);
+ event_proto_ptr = event_proto.get();
frame_event_map_.insert(
- std::make_pair(relative_rtp_timestamp, event_proto));
+ std::make_pair(relative_rtp_timestamp, std::move(event_proto)));
} else {
- event_proto = it->second;
- if (event_proto->event_type_size() >= kMaxEventsPerProto) {
+ if (it->second->event_type_size() >= kMaxEventsPerProto) {
DVLOG(2) << "Too many events in frame " << frame_event.rtp_timestamp
<< ". Using new frame event proto.";
- AddFrameEventToStorage(event_proto);
+ AddFrameEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
frame_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
- event_proto.reset(new AggregatedFrameEvent);
- event_proto->set_relative_rtp_timestamp(lower_32_bits);
- it->second = event_proto;
+ it->second = std::make_unique<AggregatedFrameEvent>();
+ it->second->set_relative_rtp_timestamp(lower_32_bits);
}
+ event_proto_ptr = it->second.get();
}
- event_proto->add_event_type(ToProtoEventType(frame_event.type));
- event_proto->add_event_timestamp_ms(
+ event_proto_ptr->add_event_type(ToProtoEventType(frame_event.type));
+ event_proto_ptr->add_event_timestamp_ms(
(frame_event.timestamp - base::TimeTicks()).InMilliseconds());
if (frame_event.type == FRAME_CAPTURE_END) {
if (frame_event.media_type == VIDEO_EVENT &&
frame_event.width > 0 && frame_event.height > 0) {
- event_proto->set_width(frame_event.width);
- event_proto->set_height(frame_event.height);
+ event_proto_ptr->set_width(frame_event.width);
+ event_proto_ptr->set_height(frame_event.height);
}
} else if (frame_event.type == FRAME_ENCODED) {
- event_proto->set_encoded_frame_size(frame_event.size);
+ event_proto_ptr->set_encoded_frame_size(frame_event.size);
if (frame_event.encoder_cpu_utilization >= 0.0) {
- event_proto->set_encoder_cpu_percent_utilized(
+ event_proto_ptr->set_encoder_cpu_percent_utilized(
base::saturated_cast<int32_t>(
frame_event.encoder_cpu_utilization * 100.0 + 0.5));
}
if (frame_event.idealized_bitrate_utilization >= 0.0) {
- event_proto->set_idealized_bitrate_percent_utilized(
+ event_proto_ptr->set_idealized_bitrate_percent_utilized(
base::saturated_cast<int32_t>(
frame_event.idealized_bitrate_utilization * 100.0 + 0.5));
}
if (frame_event.media_type == VIDEO_EVENT) {
- event_proto->set_key_frame(frame_event.key_frame);
- event_proto->set_target_bitrate(frame_event.target_bitrate);
+ event_proto_ptr->set_key_frame(frame_event.key_frame);
+ event_proto_ptr->set_target_bitrate(frame_event.target_bitrate);
}
} else if (frame_event.type == FRAME_PLAYOUT) {
- event_proto->set_delay_millis(frame_event.delay_delta.InMilliseconds());
+ event_proto_ptr->set_delay_millis(frame_event.delay_delta.InMilliseconds());
}
if (frame_event_map_.size() > kMaxMapSize)
@@ -149,8 +149,7 @@ void EncodingEventSubscriber::OnReceivePacketEvent(
uint32_t lower_32_bits = relative_rtp_timestamp.lower_32_bits();
PacketEventMap::iterator it =
packet_event_map_.find(relative_rtp_timestamp);
- linked_ptr<AggregatedPacketEvent> event_proto;
- BasePacketEvent* base_packet_event_proto = NULL;
+ BasePacketEvent* base_packet_event_proto = nullptr;
// Look up existing entry. If not found, create a new entry and add to map.
if (it == packet_event_map_.end()) {
@@ -158,18 +157,17 @@ void EncodingEventSubscriber::OnReceivePacketEvent(
return;
IncrementStoredProtoCount(lower_32_bits);
- event_proto.reset(new AggregatedPacketEvent);
+ auto event_proto = std::make_unique<AggregatedPacketEvent>();
event_proto->set_relative_rtp_timestamp(lower_32_bits);
- packet_event_map_.insert(
- std::make_pair(relative_rtp_timestamp, event_proto));
base_packet_event_proto = GetNewBasePacketEvent(
event_proto.get(), packet_event.packet_id, packet_event.size);
+ packet_event_map_.insert(
+ std::make_pair(relative_rtp_timestamp, std::move(event_proto)));
} else {
// Found existing entry, now look up existing BasePacketEvent using packet
// ID. If not found, create a new entry and add to proto.
- event_proto = it->second;
RepeatedPtrField<BasePacketEvent>* field =
- event_proto->mutable_base_packet_event();
+ it->second->mutable_base_packet_event();
for (RepeatedPtrField<BasePacketEvent>::pointer_iterator base_it =
field->pointer_begin();
base_it != field->pointer_end();
@@ -180,41 +178,39 @@ void EncodingEventSubscriber::OnReceivePacketEvent(
}
}
if (!base_packet_event_proto) {
- if (event_proto->base_packet_event_size() >= kMaxPacketsPerFrame) {
+ if (it->second->base_packet_event_size() >= kMaxPacketsPerFrame) {
DVLOG(3) << "Too many packets in AggregatedPacketEvent "
<< packet_event.rtp_timestamp << ". "
<< "Using new packet event proto.";
- AddPacketEventToStorage(event_proto);
+ AddPacketEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
packet_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
- event_proto.reset(new AggregatedPacketEvent);
- event_proto->set_relative_rtp_timestamp(lower_32_bits);
- it->second = event_proto;
+ it->second = std::make_unique<AggregatedPacketEvent>();
+ it->second->set_relative_rtp_timestamp(lower_32_bits);
}
base_packet_event_proto = GetNewBasePacketEvent(
- event_proto.get(), packet_event.packet_id, packet_event.size);
+ it->second.get(), packet_event.packet_id, packet_event.size);
} else if (base_packet_event_proto->event_type_size() >=
kMaxEventsPerProto) {
DVLOG(3) << "Too many events in packet "
<< packet_event.rtp_timestamp << ", "
<< packet_event.packet_id << ". Using new packet event proto.";
- AddPacketEventToStorage(event_proto);
+ AddPacketEventToStorage(std::move(it->second));
if (!ShouldCreateNewProto(lower_32_bits)) {
packet_event_map_.erase(it);
return;
}
IncrementStoredProtoCount(lower_32_bits);
- event_proto.reset(new AggregatedPacketEvent);
- event_proto->set_relative_rtp_timestamp(lower_32_bits);
- it->second = event_proto;
+ it->second = std::make_unique<AggregatedPacketEvent>();
+ it->second->set_relative_rtp_timestamp(lower_32_bits);
base_packet_event_proto = GetNewBasePacketEvent(
- event_proto.get(), packet_event.packet_id, packet_event.size);
+ it->second.get(), packet_event.packet_id, packet_event.size);
}
}
@@ -245,9 +241,9 @@ void EncodingEventSubscriber::GetEventsAndReset(LogMetadata* metadata,
TransferFrameEvents(frame_event_map_.size());
TransferPacketEvents(packet_event_map_.size());
std::sort(frame_event_storage_.begin(), frame_event_storage_.end(),
- &IsRtpTimestampLessThan<linked_ptr<AggregatedFrameEvent> >);
+ &IsRtpTimestampLessThan<std::unique_ptr<AggregatedFrameEvent>>);
std::sort(packet_event_storage_.begin(), packet_event_storage_.end(),
- &IsRtpTimestampLessThan<linked_ptr<AggregatedPacketEvent> >);
+ &IsRtpTimestampLessThan<std::unique_ptr<AggregatedPacketEvent>>);
metadata->set_is_audio(event_media_type_ == AUDIO_EVENT);
metadata->set_first_rtp_timestamp(first_rtp_timestamp_.lower_32_bits());
@@ -267,7 +263,7 @@ void EncodingEventSubscriber::TransferFrameEvents(size_t max_num_entries) {
for (size_t i = 0;
i < max_num_entries && it != frame_event_map_.end();
i++, ++it) {
- AddFrameEventToStorage(it->second);
+ AddFrameEventToStorage(std::move(it->second));
}
frame_event_map_.erase(frame_event_map_.begin(), it);
@@ -278,33 +274,33 @@ void EncodingEventSubscriber::TransferPacketEvents(size_t max_num_entries) {
for (size_t i = 0;
i < max_num_entries && it != packet_event_map_.end();
i++, ++it) {
- AddPacketEventToStorage(it->second);
+ AddPacketEventToStorage(std::move(it->second));
}
packet_event_map_.erase(packet_event_map_.begin(), it);
}
void EncodingEventSubscriber::AddFrameEventToStorage(
- const linked_ptr<AggregatedFrameEvent>& frame_event_proto) {
+ std::unique_ptr<AggregatedFrameEvent> frame_event_proto) {
if (frame_event_storage_.size() >= max_frames_) {
auto& entry = frame_event_storage_[frame_event_storage_index_];
DecrementStoredProtoCount(entry->relative_rtp_timestamp());
- entry = frame_event_proto;
+ entry = std::move(frame_event_proto);
} else {
- frame_event_storage_.push_back(frame_event_proto);
+ frame_event_storage_.push_back(std::move(frame_event_proto));
}
frame_event_storage_index_ = (frame_event_storage_index_ + 1) % max_frames_;
}
void EncodingEventSubscriber::AddPacketEventToStorage(
- const linked_ptr<AggregatedPacketEvent>& packet_event_proto) {
+ std::unique_ptr<AggregatedPacketEvent> packet_event_proto) {
if (packet_event_storage_.size() >= max_frames_) {
auto& entry = packet_event_storage_[packet_event_storage_index_];
DecrementStoredProtoCount(entry->relative_rtp_timestamp());
- entry = packet_event_proto;
+ entry = std::move(packet_event_proto);
} else {
- packet_event_storage_.push_back(packet_event_proto);
+ packet_event_storage_.push_back(std::move(packet_event_proto));
}
packet_event_storage_index_ = (packet_event_storage_index_ + 1) % max_frames_;
diff --git a/chromium/media/cast/logging/encoding_event_subscriber.h b/chromium/media/cast/logging/encoding_event_subscriber.h
index c6385842e47..8e6f81b61d6 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber.h
+++ b/chromium/media/cast/logging/encoding_event_subscriber.h
@@ -8,9 +8,10 @@
#include <stddef.h>
#include <map>
+#include <memory>
+#include <vector>
#include "base/macros.h"
-#include "base/memory/linked_ptr.h"
#include "base/threading/thread_checker.h"
#include "media/cast/logging/logging_defines.h"
#include "media/cast/logging/proto/raw_events.pb.h"
@@ -32,10 +33,10 @@ static const int kMaxEventsPerProto = 16;
// further events for that frame will be dropped.
static const int kMaxProtosPerFrame = 10;
-typedef std::vector<linked_ptr<media::cast::proto::AggregatedFrameEvent> >
- FrameEventList;
-typedef std::vector<linked_ptr<media::cast::proto::AggregatedPacketEvent> >
- PacketEventList;
+using FrameEventList =
+ std::vector<std::unique_ptr<proto::AggregatedFrameEvent>>;
+using PacketEventList =
+ std::vector<std::unique_ptr<proto::AggregatedPacketEvent>>;
// A RawEventSubscriber implementation that subscribes to events,
// encodes them in protocol buffer format, and aggregates them into a more
@@ -67,17 +68,15 @@ class EncodingEventSubscriber : public RawEventSubscriber {
// In addition, assign metadata associated with these events to |metadata|.
// The protos in |frame_events| and |packets_events| are sorted in
// ascending RTP timestamp order.
- void GetEventsAndReset(media::cast::proto::LogMetadata* metadata,
+ void GetEventsAndReset(proto::LogMetadata* metadata,
FrameEventList* frame_events,
PacketEventList* packet_events);
private:
- typedef std::map<RtpTimeDelta,
- linked_ptr<media::cast::proto::AggregatedFrameEvent>>
- FrameEventMap;
- typedef std::map<RtpTimeDelta,
- linked_ptr<media::cast::proto::AggregatedPacketEvent>>
- PacketEventMap;
+ using FrameEventMap =
+ std::map<RtpTimeDelta, std::unique_ptr<proto::AggregatedFrameEvent>>;
+ using PacketEventMap =
+ std::map<RtpTimeDelta, std::unique_ptr<proto::AggregatedPacketEvent>>;
// Transfer up to |max_num_entries| smallest entries from |frame_event_map_|
// to |frame_event_storage_|. This helps keep size of |frame_event_map_| small
@@ -87,11 +86,9 @@ class EncodingEventSubscriber : public RawEventSubscriber {
void TransferPacketEvents(size_t max_num_entries);
void AddFrameEventToStorage(
- const linked_ptr<media::cast::proto::AggregatedFrameEvent>&
- frame_event_proto);
+ std::unique_ptr<proto::AggregatedFrameEvent> frame_event_proto);
void AddPacketEventToStorage(
- const linked_ptr<media::cast::proto::AggregatedPacketEvent>&
- packet_event_proto);
+ std::unique_ptr<proto::AggregatedPacketEvent> packet_event_proto);
bool ShouldCreateNewProto(
uint32_t relative_rtp_timestamp_lower_32_bits) const;
diff --git a/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc b/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
index 8ee5188fbab..9cde551e21e 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
+++ b/chromium/media/cast/logging/encoding_event_subscriber_unittest.cc
@@ -225,7 +225,7 @@ TEST_F(EncodingEventSubscriberTest, EventFiltering) {
ASSERT_EQ(1u, frame_events_.size());
FrameEventList::iterator it = frame_events_.begin();
- linked_ptr<AggregatedFrameEvent> frame_event = *it;
+ const AggregatedFrameEvent* frame_event = it->get();
ASSERT_EQ(1, frame_event->event_type_size());
EXPECT_EQ(media::cast::proto::FRAME_DECODED,
@@ -254,7 +254,7 @@ TEST_F(EncodingEventSubscriberTest, FrameEvent) {
FrameEventList::iterator it = frame_events_.begin();
- linked_ptr<AggregatedFrameEvent> event = *it;
+ const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -291,7 +291,7 @@ TEST_F(EncodingEventSubscriberTest, FrameEventDelay) {
FrameEventList::iterator it = frame_events_.begin();
- linked_ptr<AggregatedFrameEvent> event = *it;
+ const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -334,7 +334,7 @@ TEST_F(EncodingEventSubscriberTest, FrameEventSize) {
FrameEventList::iterator it = frame_events_.begin();
- linked_ptr<AggregatedFrameEvent> event = *it;
+ const AggregatedFrameEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -396,37 +396,41 @@ TEST_F(EncodingEventSubscriberTest, MultipleFrameEvents) {
FrameEventList::iterator it = frame_events_.begin();
- linked_ptr<AggregatedFrameEvent> event = *it;
+ {
+ const AggregatedFrameEvent* event = it->get();
- EXPECT_EQ((rtp_timestamp1 - first_rtp_timestamp_).lower_32_bits(),
- event->relative_rtp_timestamp());
+ EXPECT_EQ((rtp_timestamp1 - first_rtp_timestamp_).lower_32_bits(),
+ event->relative_rtp_timestamp());
- ASSERT_EQ(2, event->event_type_size());
- EXPECT_EQ(media::cast::proto::FRAME_PLAYOUT, event->event_type(0));
- EXPECT_EQ(media::cast::proto::FRAME_DECODED, event->event_type(1));
+ ASSERT_EQ(2, event->event_type_size());
+ EXPECT_EQ(media::cast::proto::FRAME_PLAYOUT, event->event_type(0));
+ EXPECT_EQ(media::cast::proto::FRAME_DECODED, event->event_type(1));
- ASSERT_EQ(2, event->event_timestamp_ms_size());
- EXPECT_EQ(InMilliseconds(now1), event->event_timestamp_ms(0));
- EXPECT_EQ(InMilliseconds(now3), event->event_timestamp_ms(1));
+ ASSERT_EQ(2, event->event_timestamp_ms_size());
+ EXPECT_EQ(InMilliseconds(now1), event->event_timestamp_ms(0));
+ EXPECT_EQ(InMilliseconds(now3), event->event_timestamp_ms(1));
- EXPECT_FALSE(event->has_key_frame());
+ EXPECT_FALSE(event->has_key_frame());
+ }
++it;
- event = *it;
+ {
+ const AggregatedFrameEvent* event = it->get();
- EXPECT_EQ((rtp_timestamp2 - first_rtp_timestamp_).lower_32_bits(),
- event->relative_rtp_timestamp());
+ EXPECT_EQ((rtp_timestamp2 - first_rtp_timestamp_).lower_32_bits(),
+ event->relative_rtp_timestamp());
- ASSERT_EQ(1, event->event_type_size());
- EXPECT_EQ(media::cast::proto::FRAME_ENCODED, event->event_type(0));
+ ASSERT_EQ(1, event->event_type_size());
+ EXPECT_EQ(media::cast::proto::FRAME_ENCODED, event->event_type(0));
- ASSERT_EQ(1, event->event_timestamp_ms_size());
- EXPECT_EQ(InMilliseconds(now2), event->event_timestamp_ms(0));
+ ASSERT_EQ(1, event->event_timestamp_ms_size());
+ EXPECT_EQ(InMilliseconds(now2), event->event_timestamp_ms(0));
- EXPECT_FALSE(event->has_key_frame());
- EXPECT_EQ(44, event->encoder_cpu_percent_utilized());
- EXPECT_EQ(55, event->idealized_bitrate_percent_utilized());
+ EXPECT_FALSE(event->has_key_frame());
+ EXPECT_EQ(44, event->encoder_cpu_percent_utilized());
+ EXPECT_EQ(55, event->idealized_bitrate_percent_utilized());
+ }
}
TEST_F(EncodingEventSubscriberTest, PacketEvent) {
@@ -452,7 +456,7 @@ TEST_F(EncodingEventSubscriberTest, PacketEvent) {
PacketEventList::iterator it = packet_events_.begin();
- linked_ptr<AggregatedPacketEvent> event = *it;
+ const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -507,7 +511,7 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForPacket) {
PacketEventList::iterator it = packet_events_.begin();
- linked_ptr<AggregatedPacketEvent> event = *it;
+ const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -562,7 +566,7 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEventsForFrame) {
PacketEventList::iterator it = packet_events_.begin();
- linked_ptr<AggregatedPacketEvent> event = *it;
+ const AggregatedPacketEvent* event = it->get();
EXPECT_EQ((rtp_timestamp - first_rtp_timestamp_).lower_32_bits(),
event->relative_rtp_timestamp());
@@ -623,35 +627,40 @@ TEST_F(EncodingEventSubscriberTest, MultiplePacketEvents) {
PacketEventList::iterator it = packet_events_.begin();
- linked_ptr<AggregatedPacketEvent> event = *it;
+ {
+ const AggregatedPacketEvent* event = it->get();
- EXPECT_EQ((rtp_timestamp_1 - first_rtp_timestamp_).lower_32_bits(),
- event->relative_rtp_timestamp());
+ EXPECT_EQ((rtp_timestamp_1 - first_rtp_timestamp_).lower_32_bits(),
+ event->relative_rtp_timestamp());
- ASSERT_EQ(1, event->base_packet_event_size());
- const BasePacketEvent& base_event = event->base_packet_event(0);
- EXPECT_EQ(packet_id_1, base_event.packet_id());
- ASSERT_EQ(1, base_event.event_type_size());
- EXPECT_EQ(media::cast::proto::PACKET_SENT_TO_NETWORK,
- base_event.event_type(0));
- ASSERT_EQ(1, base_event.event_timestamp_ms_size());
- EXPECT_EQ(InMilliseconds(now1), base_event.event_timestamp_ms(0));
+ ASSERT_EQ(1, event->base_packet_event_size());
+ const BasePacketEvent& base_event = event->base_packet_event(0);
+ EXPECT_EQ(packet_id_1, base_event.packet_id());
+ ASSERT_EQ(1, base_event.event_type_size());
+ EXPECT_EQ(media::cast::proto::PACKET_SENT_TO_NETWORK,
+ base_event.event_type(0));
+ ASSERT_EQ(1, base_event.event_timestamp_ms_size());
+ EXPECT_EQ(InMilliseconds(now1), base_event.event_timestamp_ms(0));
+ }
++it;
ASSERT_TRUE(it != packet_events_.end());
- event = *it;
- EXPECT_EQ((rtp_timestamp_2 - first_rtp_timestamp_).lower_32_bits(),
- event->relative_rtp_timestamp());
+ {
+ const AggregatedPacketEvent* event = it->get();
- ASSERT_EQ(1, event->base_packet_event_size());
- const BasePacketEvent& base_event_2 = event->base_packet_event(0);
- EXPECT_EQ(packet_id_2, base_event_2.packet_id());
- ASSERT_EQ(1, base_event_2.event_type_size());
- EXPECT_EQ(media::cast::proto::PACKET_RETRANSMITTED,
- base_event_2.event_type(0));
- ASSERT_EQ(1, base_event_2.event_timestamp_ms_size());
- EXPECT_EQ(InMilliseconds(now2), base_event_2.event_timestamp_ms(0));
+ EXPECT_EQ((rtp_timestamp_2 - first_rtp_timestamp_).lower_32_bits(),
+ event->relative_rtp_timestamp());
+
+ ASSERT_EQ(1, event->base_packet_event_size());
+ const BasePacketEvent& base_event_2 = event->base_packet_event(0);
+ EXPECT_EQ(packet_id_2, base_event_2.packet_id());
+ ASSERT_EQ(1, base_event_2.event_type_size());
+ EXPECT_EQ(media::cast::proto::PACKET_RETRANSMITTED,
+ base_event_2.event_type(0));
+ ASSERT_EQ(1, base_event_2.event_timestamp_ms_size());
+ EXPECT_EQ(InMilliseconds(now2), base_event_2.event_timestamp_ms(0));
+ }
}
TEST_F(EncodingEventSubscriberTest, FirstRtpTimeTicks) {
@@ -763,7 +772,7 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
FrameEventList::iterator frame_it = frame_events_.begin();
ASSERT_TRUE(frame_it != frame_events_.end());
- linked_ptr<AggregatedFrameEvent> frame_event = *frame_it;
+ const AggregatedFrameEvent* frame_event = frame_it->get();
EXPECT_EQ(kMaxEventsPerProto, frame_event->event_type_size());
@@ -789,14 +798,17 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
PacketEventList::iterator packet_it = packet_events_.begin();
ASSERT_TRUE(packet_it != packet_events_.end());
- linked_ptr<AggregatedPacketEvent> packet_event = *packet_it;
-
- EXPECT_EQ(kMaxPacketsPerFrame,
- packet_event->base_packet_event_size());
+ {
+ const AggregatedPacketEvent* packet_event = packet_it->get();
+ EXPECT_EQ(kMaxPacketsPerFrame, packet_event->base_packet_event_size());
+ }
++packet_it;
- packet_event = *packet_it;
- EXPECT_EQ(1, packet_event->base_packet_event_size());
+
+ {
+ const AggregatedPacketEvent* packet_event = packet_it->get();
+ EXPECT_EQ(1, packet_event->base_packet_event_size());
+ }
for (int j = 0; j < kMaxEventsPerProto + 1; j++) {
std::unique_ptr<PacketEvent> send_event(new PacketEvent());
@@ -819,14 +831,17 @@ TEST_F(EncodingEventSubscriberTest, MaxEventsPerProto) {
packet_it = packet_events_.begin();
ASSERT_TRUE(packet_it != packet_events_.end());
- packet_event = *packet_it;
-
- EXPECT_EQ(kMaxEventsPerProto,
- packet_event->base_packet_event(0).event_type_size());
+ {
+ const AggregatedPacketEvent* packet_event = packet_it->get();
+ EXPECT_EQ(kMaxEventsPerProto,
+ packet_event->base_packet_event(0).event_type_size());
+ }
++packet_it;
- packet_event = *packet_it;
- EXPECT_EQ(1, packet_event->base_packet_event(0).event_type_size());
+ {
+ const AggregatedPacketEvent* packet_event = packet_it->get();
+ EXPECT_EQ(1, packet_event->base_packet_event(0).event_type_size());
+ }
}
} // namespace cast
diff --git a/chromium/media/cast/logging/serialize_deserialize_test.cc b/chromium/media/cast/logging/serialize_deserialize_test.cc
index d8155a03f65..06dcd5d78ce 100644
--- a/chromium/media/cast/logging/serialize_deserialize_test.cc
+++ b/chromium/media/cast/logging/serialize_deserialize_test.cc
@@ -41,7 +41,7 @@ const int kIdealizedBitratePercentUtilized[] = {9, 9, 9, 15, 36, 38, 35, 40};
const int kMaxSerializedBytes = 10000;
-}
+} // namespace
namespace media {
namespace cast {
@@ -62,7 +62,7 @@ class SerializeDeserializeTest : public ::testing::Test {
int64_t event_time_ms = 0;
// Insert frame and packet events with RTP timestamps 0, 90, 180, ...
for (int i = 0; i < metadata_.num_frame_events(); i++) {
- linked_ptr<AggregatedFrameEvent> frame_event(new AggregatedFrameEvent);
+ auto frame_event = std::make_unique<AggregatedFrameEvent>();
frame_event->set_relative_rtp_timestamp(i * 90);
for (uint32_t event_index = 0; event_index < arraysize(kVideoFrameEvents);
++event_index) {
@@ -82,13 +82,13 @@ class SerializeDeserializeTest : public ::testing::Test {
kIdealizedBitratePercentUtilized[
i % arraysize(kIdealizedBitratePercentUtilized)]);
- frame_event_list_.push_back(frame_event);
+ frame_event_list_.push_back(std::move(frame_event));
}
event_time_ms = 0;
int packet_id = 0;
for (int i = 0; i < metadata_.num_packet_events(); i++) {
- linked_ptr<AggregatedPacketEvent> packet_event(new AggregatedPacketEvent);
+ auto packet_event = std::make_unique<AggregatedPacketEvent>();
packet_event->set_relative_rtp_timestamp(i * 90);
for (int j = 0; j < 10; j++) {
BasePacketEvent* base_event = packet_event->add_base_packet_event();
@@ -102,7 +102,7 @@ class SerializeDeserializeTest : public ::testing::Test {
event_time_ms += 256;
}
}
- packet_event_list_.push_back(packet_event);
+ packet_event_list_.push_back(std::move(packet_event));
}
}
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index 305af37053b..46f376a3990 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -253,9 +253,7 @@ class ExternalVideoEncoder::VEAClientImpl
// buffers. Package the result in a media::cast::EncodedFrame and post it
// to the Cast MAIN thread via the supplied callback.
void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta /* timestamp */) final {
+ const BitstreamBufferMetadata& metadata) final {
DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (bitstream_buffer_id < 0 ||
bitstream_buffer_id >= static_cast<int32_t>(output_buffers_.size())) {
@@ -267,14 +265,14 @@ class ExternalVideoEncoder::VEAClientImpl
}
base::SharedMemory* output_buffer =
output_buffers_[bitstream_buffer_id].get();
- if (payload_size > output_buffer->mapped_size()) {
+ if (metadata.payload_size_bytes > output_buffer->mapped_size()) {
NOTREACHED();
VLOG(1) << "BitstreamBufferReady(): invalid payload_size = "
- << payload_size;
+ << metadata.payload_size_bytes;
NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError);
return;
}
- if (key_frame)
+ if (metadata.key_frame)
key_frame_encountered_ = true;
if (!key_frame_encountered_) {
// Do not send video until we have encountered the first key frame.
@@ -284,16 +282,16 @@ class ExternalVideoEncoder::VEAClientImpl
// TODO(miu): Should |stream_header_| be an std::ostringstream for
// performance reasons?
stream_header_.append(static_cast<const char*>(output_buffer->memory()),
- payload_size);
+ metadata.payload_size_bytes);
} else if (!in_progress_frame_encodes_.empty()) {
const InProgressFrameEncode& request = in_progress_frame_encodes_.front();
std::unique_ptr<SenderEncodedFrame> encoded_frame(
new SenderEncodedFrame());
- encoded_frame->dependency = key_frame ? EncodedFrame::KEY :
- EncodedFrame::DEPENDENT;
+ encoded_frame->dependency =
+ metadata.key_frame ? EncodedFrame::KEY : EncodedFrame::DEPENDENT;
encoded_frame->frame_id = next_frame_id_++;
- if (key_frame)
+ if (metadata.key_frame)
encoded_frame->referenced_frame_id = encoded_frame->frame_id;
else
encoded_frame->referenced_frame_id = encoded_frame->frame_id - 1;
@@ -305,7 +303,8 @@ class ExternalVideoEncoder::VEAClientImpl
stream_header_.clear();
}
encoded_frame->data.append(
- static_cast<const char*>(output_buffer->memory()), payload_size);
+ static_cast<const char*>(output_buffer->memory()),
+ metadata.payload_size_bytes);
DCHECK(!encoded_frame->data.empty()) << "BUG: Encoder must provide data.";
// If FRAME_DURATION metadata was provided in the source VideoFrame,
@@ -340,7 +339,7 @@ class ExternalVideoEncoder::VEAClientImpl
// the following delta frames as well.
// Otherwise, switch back to entropy estimation for the key frame
// and all the following delta frames.
- if (key_frame || key_frame_quantizer_parsable_) {
+ if (metadata.key_frame || key_frame_quantizer_parsable_) {
if (codec_profile_ == media::VP8PROFILE_ANY) {
quantizer = ParseVp8HeaderQuantizer(
reinterpret_cast<const uint8_t*>(encoded_frame->data.data()),
@@ -354,15 +353,15 @@ class ExternalVideoEncoder::VEAClientImpl
}
if (quantizer < 0) {
LOG(ERROR) << "Unable to parse quantizer from encoded "
- << (key_frame ? "key" : "delta")
+ << (metadata.key_frame ? "key" : "delta")
<< " frame, id=" << encoded_frame->frame_id;
- if (key_frame) {
+ if (metadata.key_frame) {
key_frame_quantizer_parsable_ = false;
quantizer = quantizer_estimator_.EstimateForKeyFrame(
*request.video_frame);
}
} else {
- if (key_frame) {
+ if (metadata.key_frame) {
key_frame_quantizer_parsable_ = true;
}
}
diff --git a/chromium/media/cast/sender/frame_sender.cc b/chromium/media/cast/sender/frame_sender.cc
index 1707182c467..6cd4f67d8c2 100644
--- a/chromium/media/cast/sender/frame_sender.cc
+++ b/chromium/media/cast/sender/frame_sender.cc
@@ -20,12 +20,15 @@ namespace media {
namespace cast {
namespace {
-const int kMinSchedulingDelayMs = 1;
-const int kNumAggressiveReportsSentAtStart = 100;
+constexpr int kNumAggressiveReportsSentAtStart = 100;
+constexpr base::TimeDelta kMinSchedulingDelay =
+ base::TimeDelta::FromMilliseconds(1);
+constexpr base::TimeDelta kReceiverProcessTime =
+ base::TimeDelta::FromMilliseconds(250);
// The additional number of frames that can be in-flight when input exceeds the
// maximum frame rate.
-const int kMaxFrameBurst = 5;
+constexpr int kMaxFrameBurst = 5;
} // namespace
@@ -75,6 +78,7 @@ FrameSender::FrameSender(scoped_refptr<CastEnvironment> cast_environment,
picture_lost_at_receiver_(false),
rtp_timebase_(config.rtp_timebase),
is_audio_(config.rtp_payload_type <= RtpPayloadType::AUDIO_LAST),
+ max_ack_delay_(config.max_playout_delay),
weak_factory_(this) {
DCHECK(transport_sender_);
DCHECK_GT(rtp_timebase_, 0);
@@ -106,8 +110,8 @@ void FrameSender::ScheduleNextRtcpReport() {
cast_environment_->PostDelayedTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(&FrameSender::SendRtcpReport, weak_factory_.GetWeakPtr(),
- true),
+ base::BindRepeating(&FrameSender::SendRtcpReport,
+ weak_factory_.GetWeakPtr(), true),
base::TimeDelta::FromMilliseconds(kRtcpReportIntervalMs));
}
@@ -136,9 +140,12 @@ void FrameSender::SendRtcpReport(bool schedule_future_reports) {
ScheduleNextRtcpReport();
}
-void FrameSender::OnMeasuredRoundTripTime(base::TimeDelta rtt) {
- DCHECK(rtt > base::TimeDelta());
- current_round_trip_time_ = rtt;
+void FrameSender::OnMeasuredRoundTripTime(base::TimeDelta round_trip_time) {
+ DCHECK_GT(round_trip_time, base::TimeDelta());
+ current_round_trip_time_ = round_trip_time;
+ max_ack_delay_ = 2 * std::max(current_round_trip_time_, base::TimeDelta()) +
+ kReceiverProcessTime;
+ max_ack_delay_ = std::min(max_ack_delay_, target_playout_delay_);
}
void FrameSender::SetTargetPlayoutDelay(
@@ -155,6 +162,7 @@ void FrameSender::SetTargetPlayoutDelay(
<< target_playout_delay_.InMilliseconds() << " ms to "
<< new_target_playout_delay.InMilliseconds() << " ms.";
target_playout_delay_ = new_target_playout_delay;
+ max_ack_delay_ = std::min(max_ack_delay_, target_playout_delay_);
send_target_playout_delay_ = true;
congestion_control_->UpdateTargetPlayoutDelay(target_playout_delay_);
}
@@ -164,7 +172,7 @@ void FrameSender::ResendCheck() {
DCHECK(!last_send_time_.is_null());
const base::TimeDelta time_since_last_send =
cast_environment_->Clock()->NowTicks() - last_send_time_;
- if (time_since_last_send > target_playout_delay_) {
+ if (time_since_last_send > max_ack_delay_) {
if (latest_acked_frame_id_ == last_sent_frame_id_) {
// Last frame acked, no point in doing anything
} else {
@@ -180,14 +188,12 @@ void FrameSender::ScheduleNextResendCheck() {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
DCHECK(!last_send_time_.is_null());
base::TimeDelta time_to_next =
- last_send_time_ - cast_environment_->Clock()->NowTicks() +
- target_playout_delay_;
- time_to_next = std::max(
- time_to_next, base::TimeDelta::FromMilliseconds(kMinSchedulingDelayMs));
+ last_send_time_ - cast_environment_->Clock()->NowTicks() + max_ack_delay_;
+ time_to_next = std::max(time_to_next, kMinSchedulingDelay);
cast_environment_->PostDelayedTask(
- CastEnvironment::MAIN,
- FROM_HERE,
- base::Bind(&FrameSender::ResendCheck, weak_factory_.GetWeakPtr()),
+ CastEnvironment::MAIN, FROM_HERE,
+ base::BindRepeating(&FrameSender::ResendCheck,
+ weak_factory_.GetWeakPtr()),
time_to_next);
}
@@ -253,6 +259,7 @@ void FrameSender::SendEncodedFrame(
cancel_sending_frames.push_back(id);
}
transport_sender_->CancelSendingFrames(ssrc_, cancel_sending_frames);
+ OnCancelSendingFrames();
}
last_send_time_ = cast_environment_->Clock()->NowTicks();
@@ -325,6 +332,8 @@ void FrameSender::SendEncodedFrame(
transport_sender_->InsertFrame(ssrc_, *encoded_frame);
}
+void FrameSender::OnCancelSendingFrames() {}
+
void FrameSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN));
@@ -418,6 +427,7 @@ void FrameSender::OnReceivedCastFeedback(const RtcpCastMessage& cast_feedback) {
current_round_trip_time_.InMicroseconds());
} while (latest_acked_frame_id_ < cast_feedback.ack_frame_id);
transport_sender_->CancelSendingFrames(ssrc_, frames_to_cancel);
+ OnCancelSendingFrames();
}
}
diff --git a/chromium/media/cast/sender/frame_sender.h b/chromium/media/cast/sender/frame_sender.h
index 177193f0a9e..c4a4aced413 100644
--- a/chromium/media/cast/sender/frame_sender.h
+++ b/chromium/media/cast/sender/frame_sender.h
@@ -3,8 +3,6 @@
// found in the LICENSE file.
//
// This is the base class for an object that send frames to a receiver.
-// TODO(hclam): Refactor such that there is no separate AudioSender vs.
-// VideoSender, and the functionality of both is rolled into this class.
#ifndef MEDIA_CAST_SENDER_FRAME_SENDER_H_
#define MEDIA_CAST_SENDER_FRAME_SENDER_H_
@@ -56,6 +54,9 @@ class FrameSender {
// of sent, unacknowledged frames.
virtual base::TimeDelta GetInFlightMediaDuration() const = 0;
+ // One or more frames were canceled.
+ virtual void OnCancelSendingFrames();
+
protected:
class RtcpClient : public RtcpObserver {
public:
@@ -94,7 +95,7 @@ class FrameSender {
protected:
// Schedule and execute periodic checks for re-sending packets. If no
- // acknowledgements have been received for "too long," AudioSender will
+ // acknowledgements have been received for "too long," FrameSender will
// speculatively re-send certain packets of an unacked frame to kick-start
// re-transmission. This is a last resort tactic to prevent the session from
// getting stuck after a long outage.
@@ -164,7 +165,7 @@ class FrameSender {
// Counts the number of duplicate ACK that are being received. When this
// number reaches a threshold, the sender will take this as a sign that the
// receiver hasn't yet received the first packet of the next frame. In this
- // case, VideoSender will trigger a re-send of the next frame.
+ // case, FrameSender will trigger a re-send of the next frame.
int duplicate_ack_counter_;
// This object controls how we change the bitrate to make sure the
@@ -188,6 +189,10 @@ class FrameSender {
const bool is_audio_;
+ // This is the maximum delay that the sender should get ack from receiver.
+ // Otherwise, sender will call ResendForKickstart().
+ base::TimeDelta max_ack_delay_;
+
// Ring buffers to keep track of recent frame timestamps (both in terms of
// local reference time and RTP media time). These should only be accessed
// through the Record/GetXXX() methods. The index into this ring
diff --git a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
index 41ae937af49..110eb4b6d78 100644
--- a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -6,7 +6,6 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
-#include "base/command_line.h"
#include "base/containers/queue.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -51,8 +50,6 @@ class MediaTestSuite : public base::TestSuite {
void MediaTestSuite::Initialize() {
base::TestSuite::Initialize();
- base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
- command_line->AppendSwitch(switches::kEnableInbandTextTracks);
media::InitializeMediaLibrary();
}
@@ -194,6 +191,8 @@ void CreateFrameAndMemsetPlane(VideoFrameFactory* const video_frame_factory) {
class TestPowerSource : public base::PowerMonitorSource {
public:
+ void Shutdown() override {}
+
void GenerateSuspendEvent() {
ProcessPowerEvent(SUSPEND_EVENT);
base::RunLoop().RunUntilIdle();
diff --git a/chromium/media/cast/sender/video_encoder_unittest.cc b/chromium/media/cast/sender/video_encoder_unittest.cc
index 4e7dcb43613..c64f5b0e9bb 100644
--- a/chromium/media/cast/sender/video_encoder_unittest.cc
+++ b/chromium/media/cast/sender/video_encoder_unittest.cc
@@ -277,7 +277,15 @@ class VideoEncoderTest
// A simple test to encode three frames of video, expecting to see one key frame
// followed by two delta frames.
-TEST_P(VideoEncoderTest, GeneratesKeyFrameThenOnlyDeltaFrames) {
+// Fails consistently on official builds: crbug.com/612496
+#ifdef OFFICIAL_BUILD
+#define MAYBE_GeneratesKeyFrameThenOnlyDeltaFrames \
+ DISABLED_GeneratesKeyFrameThenOnlyDeltaFrames
+#else
+#define MAYBE_GeneratesKeyFrameThenOnlyDeltaFrames \
+ GeneratesKeyFrameThenOnlyDeltaFrames
+#endif
+TEST_P(VideoEncoderTest, MAYBE_GeneratesKeyFrameThenOnlyDeltaFrames) {
CreateEncoder();
SetVEAFactoryAutoRespond(true);
@@ -317,7 +325,13 @@ TEST_P(VideoEncoderTest, GeneratesKeyFrameThenOnlyDeltaFrames) {
// changes. See media/cast/receiver/video_decoder_unittest.cc for a complete
// encode/decode cycle of varied frame sizes that actually checks the frame
// content.
-TEST_P(VideoEncoderTest, EncodesVariedFrameSizes) {
+// Fails consistently on official builds: crbug.com/612496
+#ifdef OFFICIAL_BUILD
+#define MAYBE_EncodesVariedFrameSizes DISABLED_EncodesVariedFrameSizes
+#else
+#define MAYBE_EncodesVariedFrameSizes EncodesVariedFrameSizes
+#endif
+TEST_P(VideoEncoderTest, MAYBE_EncodesVariedFrameSizes) {
CreateEncoder();
SetVEAFactoryAutoRespond(true);
@@ -382,7 +396,14 @@ TEST_P(VideoEncoderTest, EncodesVariedFrameSizes) {
// before it has a chance to receive the VEA creation callback. For all other
// encoders, this tests that the encoder can be safely destroyed before the task
// is run that delivers the first EncodedFrame.
-TEST_P(VideoEncoderTest, CanBeDestroyedBeforeVEAIsCreated) {
+// Fails consistently on official builds: crbug.com/612496
+#ifdef OFFICIAL_BUILD
+#define MAYBE_CanBeDestroyedBeforeVEAIsCreated \
+ DISABLED_CanBeDestroyedBeforeVEAIsCreated
+#else
+#define MAYBE_CanBeDestroyedBeforeVEAIsCreated CanBeDestroyedBeforeVEAIsCreated
+#endif
+TEST_P(VideoEncoderTest, MAYBE_CanBeDestroyedBeforeVEAIsCreated) {
CreateEncoder();
// Send a frame to spawn creation of the ExternalVideoEncoder instance.
diff --git a/chromium/media/cdm/BUILD.gn b/chromium/media/cdm/BUILD.gn
index f3dbef75fb6..deff5005c73 100644
--- a/chromium/media/cdm/BUILD.gn
+++ b/chromium/media/cdm/BUILD.gn
@@ -98,12 +98,6 @@ source_set("cdm") {
"cdm_host_files.cc",
"cdm_host_files.h",
]
- deps += [
- # Needed for finding CDM path from CDM adapter path.
- # TODO(xhwang): Remove this dependency when CDM adapter is deprecated.
- # See http://crbug.com/403462
- "//third_party/widevine/cdm:headers",
- ]
}
}
}
diff --git a/chromium/media/cdm/aes_decryptor.cc b/chromium/media/cdm/aes_decryptor.cc
index b1a95211ef6..2a8c76b43c2 100644
--- a/chromium/media/cdm/aes_decryptor.cc
+++ b/chromium/media/cdm/aes_decryptor.cc
@@ -417,7 +417,7 @@ void AesDecryptor::RemoveSession(const std::string& session_id,
// Let message be a message containing or reflecting the record
// of license destruction.
std::vector<uint8_t> message;
- if (it->second != CdmSessionType::TEMPORARY_SESSION) {
+ if (it->second != CdmSessionType::kTemporary) {
// The license release message is specified in the spec:
// https://w3c.github.io/encrypted-media/#clear-key-release-format.
KeyIdList key_ids;
@@ -486,6 +486,8 @@ void AesDecryptor::RegisterNewKeyCB(StreamType stream_type,
void AesDecryptor::Decrypt(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
const DecryptCB& decrypt_cb) {
+ DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
+
if (!encrypted->decrypt_config()) {
// If there is no DecryptConfig, then the data is unencrypted so return it
// immediately.
@@ -575,7 +577,7 @@ std::string AesDecryptor::GetSessionStateAsJWK(const std::string& session_id) {
}
}
}
- return GenerateJWKSet(keys, CdmSessionType::PERSISTENT_LICENSE_SESSION);
+ return GenerateJWKSet(keys, CdmSessionType::kPersistentLicense);
}
bool AesDecryptor::AddDecryptionKey(const std::string& session_id,
diff --git a/chromium/media/cdm/aes_decryptor.h b/chromium/media/cdm/aes_decryptor.h
index 943e76d87cc..3c6cfcb873d 100644
--- a/chromium/media/cdm/aes_decryptor.h
+++ b/chromium/media/cdm/aes_decryptor.h
@@ -85,7 +85,9 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
void DeinitializeDecoder(StreamType stream_type) override;
private:
+ // Testing classes that needs to manipulate internal states for testing.
friend class ClearKeyPersistentSessionCdm;
+ friend class ClearKeyCdmProxy;
// Internally this class supports persistent license type sessions so that
// it can be used by ClearKeyPersistentSessionCdm. The following methods
diff --git a/chromium/media/cdm/aes_decryptor_unittest.cc b/chromium/media/cdm/aes_decryptor_unittest.cc
index cb06e066713..efeaee1f715 100644
--- a/chromium/media/cdm/aes_decryptor_unittest.cc
+++ b/chromium/media/cdm/aes_decryptor_unittest.cc
@@ -367,7 +367,7 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
DCHECK(!key_id.empty());
EXPECT_CALL(cdm_client_,
OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
- cdm_->CreateSessionAndGenerateRequest(CdmSessionType::TEMPORARY_SESSION,
+ cdm_->CreateSessionAndGenerateRequest(CdmSessionType::kTemporary,
EmeInitDataType::WEBM, key_id,
CreateSessionPromise(RESOLVED));
// This expects the promise to be called synchronously, which is the case
@@ -519,13 +519,13 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
TEST_P(AesDecryptorTest, CreateSessionWithEmptyInitData) {
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
- std::vector<uint8_t>(), CreateSessionPromise(REJECTED));
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM, std::vector<uint8_t>(),
+ CreateSessionPromise(REJECTED));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::CENC,
- std::vector<uint8_t>(), CreateSessionPromise(REJECTED));
+ CdmSessionType::kTemporary, EmeInitDataType::CENC, std::vector<uint8_t>(),
+ CreateSessionPromise(REJECTED));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::KEYIDS,
+ CdmSessionType::kTemporary, EmeInitDataType::KEYIDS,
std::vector<uint8_t>(), CreateSessionPromise(REJECTED));
}
@@ -534,41 +534,41 @@ TEST_P(AesDecryptorTest, CreateSessionWithVariousLengthInitData_WebM) {
init_data.resize(1);
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(init_data), CreateSessionPromise(RESOLVED));
init_data.resize(16); // The expected size.
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(init_data), CreateSessionPromise(RESOLVED));
init_data.resize(512);
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(init_data), CreateSessionPromise(RESOLVED));
init_data.resize(513);
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(init_data), CreateSessionPromise(REJECTED));
}
TEST_P(AesDecryptorTest, MultipleCreateSession) {
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(1), CreateSessionPromise(RESOLVED));
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(1), CreateSessionPromise(RESOLVED));
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::WEBM,
+ CdmSessionType::kTemporary, EmeInitDataType::WEBM,
std::vector<uint8_t>(1), CreateSessionPromise(RESOLVED));
}
@@ -590,7 +590,7 @@ TEST_P(AesDecryptorTest, CreateSessionWithCencInitData) {
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::CENC,
+ CdmSessionType::kTemporary, EmeInitDataType::CENC,
std::vector<uint8_t>(init_data, init_data + arraysize(init_data)),
CreateSessionPromise(RESOLVED));
}
@@ -601,7 +601,7 @@ TEST_P(AesDecryptorTest, CreateSessionWithKeyIdsInitData) {
EXPECT_CALL(cdm_client_, OnSessionMessage(NotEmpty(), _, IsJSONDictionary()));
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, EmeInitDataType::KEYIDS,
+ CdmSessionType::kTemporary, EmeInitDataType::KEYIDS,
std::vector<uint8_t>(init_data, init_data + arraysize(init_data) - 1),
CreateSessionPromise(RESOLVED));
}
diff --git a/chromium/media/cdm/api/content_decryption_module.h b/chromium/media/cdm/api/content_decryption_module.h
index 0dde3ff4c91..85ffd01ac2e 100644
--- a/chromium/media/cdm/api/content_decryption_module.h
+++ b/chromium/media/cdm/api/content_decryption_module.h
@@ -420,7 +420,7 @@ CHECK_TYPE(InitDataType, 4, 4);
enum SessionType : uint32_t {
kTemporary = 0,
kPersistentLicense = 1,
- kPersistentKeyRelease = 2
+ kPersistentUsageRecord = 2
};
CHECK_TYPE(SessionType, 4, 4);
diff --git a/chromium/media/cdm/api/content_decryption_module_proxy.h b/chromium/media/cdm/api/content_decryption_module_proxy.h
index f41b9f73f98..78751066279 100644
--- a/chromium/media/cdm/api/content_decryption_module_proxy.h
+++ b/chromium/media/cdm/api/content_decryption_module_proxy.h
@@ -80,7 +80,7 @@ class CDM_CLASS_API CdmProxyClient {
enum Protocol : uint32_t {
kNone = 0, // No protocol supported. Can be used in failure cases.
- kIntelConvergedSecurityAndManageabilityEngine, // Method using Intel CSME.
+ kIntel, // Method using Intel CSME.
// There will be more values in the future e.g. kD3D11RsaHardware,
// kD3D11RsaSoftware to use the D3D11 RSA method.
};
diff --git a/chromium/media/cdm/cdm_adapter.cc b/chromium/media/cdm/cdm_adapter.cc
index 6e9cdfb8c6a..c2bad3057e7 100644
--- a/chromium/media/cdm/cdm_adapter.cc
+++ b/chromium/media/cdm/cdm_adapter.cc
@@ -84,12 +84,12 @@ cdm::HdcpVersion ToCdmHdcpVersion(HdcpVersion hdcp_version) {
cdm::SessionType ToCdmSessionType(CdmSessionType session_type) {
switch (session_type) {
- case CdmSessionType::TEMPORARY_SESSION:
+ case CdmSessionType::kTemporary:
return cdm::kTemporary;
- case CdmSessionType::PERSISTENT_LICENSE_SESSION:
+ case CdmSessionType::kPersistentLicense:
return cdm::kPersistentLicense;
- case CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION:
- return cdm::kPersistentKeyRelease;
+ case CdmSessionType::kPersistentUsageRecord:
+ return cdm::kPersistentUsageRecord;
}
NOTREACHED() << "Unexpected session type: " << static_cast<int>(session_type);
@@ -318,11 +318,8 @@ cdm::EncryptionScheme ToCdmEncryptionScheme(const EncryptionScheme& scheme) {
case EncryptionScheme::CIPHER_MODE_UNENCRYPTED:
return cdm::EncryptionScheme::kUnencrypted;
case EncryptionScheme::CIPHER_MODE_AES_CTR:
- if (!scheme.pattern().IsInEffect())
- return cdm::EncryptionScheme::kCenc;
- break;
+ return cdm::EncryptionScheme::kCenc;
case EncryptionScheme::CIPHER_MODE_AES_CBC:
- // Pattern should be required for 'cbcs' but is currently optional.
return cdm::EncryptionScheme::kCbcs;
}
@@ -686,7 +683,10 @@ Decryptor* CdmAdapter::GetDecryptor() {
// When using HW secure codecs, we cannot and should not use the CDM instance
// to do decrypt and/or decode. Instead, we should use the CdmProxy.
- if (cdm_config_.use_hw_secure_codecs)
+ // TODO(xhwang): Fix External Clear Key key system to be able to set
+ // |use_hw_secure_codecs| so that we don't have to check both.
+ // TODO(xhwang): Update this logic to support transcryption.
+ if (cdm_config_.use_hw_secure_codecs || cdm_proxy_created_)
return nullptr;
return this;
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index 8ca37d995f8..4ecc77cfec7 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -229,7 +229,7 @@ class CdmAdapterTestWithClearKeyCdm : public CdmAdapterTestBase {
}
cdm_->CreateSessionAndGenerateRequest(
- CdmSessionType::TEMPORARY_SESSION, data_type, key_id,
+ CdmSessionType::kTemporary, data_type, key_id,
CreateSessionPromise(expected_result));
RunUntilIdle();
}
@@ -241,7 +241,7 @@ class CdmAdapterTestWithClearKeyCdm : public CdmAdapterTestBase {
DCHECK(!session_id.empty());
ASSERT_EQ(expected_result, FAILURE) << "LoadSession not supported.";
- cdm_->LoadSession(CdmSessionType::TEMPORARY_SESSION, session_id,
+ cdm_->LoadSession(CdmSessionType::kTemporary, session_id,
CreateSessionPromise(expected_result));
RunUntilIdle();
}
diff --git a/chromium/media/cdm/cdm_proxy.h b/chromium/media/cdm/cdm_proxy.h
index d735d20cf6f..822e1581f89 100644
--- a/chromium/media/cdm/cdm_proxy.h
+++ b/chromium/media/cdm/cdm_proxy.h
@@ -38,17 +38,17 @@ class MEDIA_EXPORT CdmProxy {
enum class Status {
kOk,
kFail,
- kMax = kFail,
+ kMaxValue = kFail,
};
enum class Protocol {
// No supported protocol. Used in failure cases.
kNone,
// Method using Intel CSME.
- kIntelConvergedSecurityAndManageabilityEngine,
+ kIntel,
// There will be more values in the future e.g. kD3D11RsaHardware,
// kD3D11RsaSoftware to use the D3D11 RSA method.
- kMax = kIntelConvergedSecurityAndManageabilityEngine,
+ kMaxValue = kIntel,
};
enum class Function {
@@ -56,7 +56,7 @@ class MEDIA_EXPORT CdmProxy {
// ID3D11VideoContext::NegotiateCryptoSessionKeyExchange.
kIntelNegotiateCryptoSessionKeyExchange,
// There will be more values in the future e.g. for D3D11 RSA method.
- kMax = kIntelNegotiateCryptoSessionKeyExchange,
+ kMaxValue = kIntelNegotiateCryptoSessionKeyExchange,
};
CdmProxy();
diff --git a/chromium/media/cdm/cenc_decryptor_unittest.cc b/chromium/media/cdm/cenc_decryptor_unittest.cc
index 2fa6b6900e1..441c2c41b7a 100644
--- a/chromium/media/cdm/cenc_decryptor_unittest.cc
+++ b/chromium/media/cdm/cenc_decryptor_unittest.cc
@@ -129,7 +129,8 @@ TEST_F(CencDecryptorTest, OneBlock) {
auto encrypted_block = Encrypt(one_block_, *key_, iv_);
// Only 1 subsample, all encrypted data.
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size())}};
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
@@ -140,7 +141,8 @@ TEST_F(CencDecryptorTest, ExtraData) {
auto encrypted_block = Encrypt(one_block_, *key_, iv_);
// Only 1 subsample, all encrypted data.
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size())}};
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
@@ -179,7 +181,8 @@ TEST_F(CencDecryptorTest, BadSubsamples) {
auto encrypted_block = Encrypt(one_block_, *key_, iv_);
// Subsample size > data size.
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size() + 1}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size() + 1)}};
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
@@ -189,7 +192,8 @@ TEST_F(CencDecryptorTest, BadSubsamples) {
TEST_F(CencDecryptorTest, InvalidIv) {
auto encrypted_block = Encrypt(one_block_, *key_, iv_);
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size())}};
// Use an invalid IV for decryption. Call should succeed, but return
// something other than the original data.
@@ -204,7 +208,8 @@ TEST_F(CencDecryptorTest, InvalidKey) {
crypto::SymmetricKey::AES, std::string(arraysize(kKey), 'b'));
auto encrypted_block = Encrypt(one_block_, *key_, iv_);
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size())}};
// Use a different key for decryption. Call should succeed, but return
// something other than the original data.
@@ -217,7 +222,8 @@ TEST_F(CencDecryptorTest, PartialBlock) {
auto encrypted_block = Encrypt(partial_block_, *key_, iv_);
// Only 1 subsample, all encrypted data.
- std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+ std::vector<SubsampleEntry> subsamples = {
+ {0, static_cast<uint32_t>(encrypted_block.size())}};
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
@@ -230,7 +236,9 @@ TEST_F(CencDecryptorTest, MultipleSubsamples) {
// Treat as 3 subsamples.
std::vector<SubsampleEntry> subsamples = {
- {0, one_block_.size()}, {0, one_block_.size()}, {0, one_block_.size()}};
+ {0, static_cast<uint32_t>(one_block_.size())},
+ {0, static_cast<uint32_t>(one_block_.size())},
+ {0, static_cast<uint32_t>(one_block_.size())}};
auto encrypted_buffer =
CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
@@ -258,9 +266,11 @@ TEST_F(CencDecryptorTest, MultipleSubsamplesWithClearBytes) {
auto expected_result = Combine(
{one_block_, partial_block_, partial_block_, one_block_, partial_block_});
std::vector<SubsampleEntry> subsamples = {
- {one_block_.size(), partial_block_.size()},
- {partial_block_.size(), one_block_.size()},
- {partial_block_.size(), 0}};
+ {static_cast<uint32_t>(one_block_.size()),
+ static_cast<uint32_t>(partial_block_.size())},
+ {static_cast<uint32_t>(partial_block_.size()),
+ static_cast<uint32_t>(one_block_.size())},
+ {static_cast<uint32_t>(partial_block_.size()), 0}};
auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples);
EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
diff --git a/chromium/media/cdm/json_web_key.cc b/chromium/media/cdm/json_web_key.cc
index d851ff30ef6..aec3796e3cb 100644
--- a/chromium/media/cdm/json_web_key.cc
+++ b/chromium/media/cdm/json_web_key.cc
@@ -32,7 +32,7 @@ const char kKeyIdsTag[] = "kids";
const char kTypeTag[] = "type";
const char kTemporarySession[] = "temporary";
const char kPersistentLicenseSession[] = "persistent-license";
-const char kPersistentReleaseMessageSession[] = "persistent-release-message";
+const char kPersistentUsageRecordSession[] = "persistent-usage-record";
static std::string ShortenTo64Characters(const std::string& input) {
// Convert |input| into a string with escaped characters replacing any
@@ -100,14 +100,14 @@ std::string GenerateJWKSet(const KeyIdAndKeyPairs& keys,
base::DictionaryValue jwk_set;
jwk_set.Set(kKeysTag, std::move(list));
switch (session_type) {
- case CdmSessionType::TEMPORARY_SESSION:
+ case CdmSessionType::kTemporary:
jwk_set.SetString(kTypeTag, kTemporarySession);
break;
- case CdmSessionType::PERSISTENT_LICENSE_SESSION:
+ case CdmSessionType::kPersistentLicense:
jwk_set.SetString(kTypeTag, kPersistentLicenseSession);
break;
- case CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION:
- jwk_set.SetString(kTypeTag, kPersistentReleaseMessageSession);
+ case CdmSessionType::kPersistentUsageRecord:
+ jwk_set.SetString(kTypeTag, kPersistentUsageRecordSession);
break;
}
@@ -212,16 +212,16 @@ bool ExtractKeysFromJWKSet(const std::string& jwk_set,
std::string session_type_id;
if (!dictionary->Get(kTypeTag, &value)) {
// Not specified, so use the default type.
- *session_type = CdmSessionType::TEMPORARY_SESSION;
+ *session_type = CdmSessionType::kTemporary;
} else if (!value->GetAsString(&session_type_id)) {
DVLOG(1) << "Invalid '" << kTypeTag << "' value";
return false;
} else if (session_type_id == kTemporarySession) {
- *session_type = CdmSessionType::TEMPORARY_SESSION;
+ *session_type = CdmSessionType::kTemporary;
} else if (session_type_id == kPersistentLicenseSession) {
- *session_type = CdmSessionType::PERSISTENT_LICENSE_SESSION;
- } else if (session_type_id == kPersistentReleaseMessageSession) {
- *session_type = CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION;
+ *session_type = CdmSessionType::kPersistentLicense;
+ } else if (session_type_id == kPersistentUsageRecordSession) {
+ *session_type = CdmSessionType::kPersistentUsageRecord;
} else {
DVLOG(1) << "Invalid '" << kTypeTag << "' value: " << session_type_id;
return false;
@@ -317,14 +317,14 @@ void CreateLicenseRequest(const KeyIdList& key_ids,
request->Set(kKeyIdsTag, std::move(list));
switch (session_type) {
- case CdmSessionType::TEMPORARY_SESSION:
+ case CdmSessionType::kTemporary:
request->SetString(kTypeTag, kTemporarySession);
break;
- case CdmSessionType::PERSISTENT_LICENSE_SESSION:
+ case CdmSessionType::kPersistentLicense:
request->SetString(kTypeTag, kPersistentLicenseSession);
break;
- case CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION:
- request->SetString(kTypeTag, kPersistentReleaseMessageSession);
+ case CdmSessionType::kPersistentUsageRecord:
+ request->SetString(kTypeTag, kPersistentUsageRecordSession);
break;
}
diff --git a/chromium/media/cdm/json_web_key_unittest.cc b/chromium/media/cdm/json_web_key_unittest.cc
index 689d781ade3..f0c4de25a29 100644
--- a/chromium/media/cdm/json_web_key_unittest.cc
+++ b/chromium/media/cdm/json_web_key_unittest.cc
@@ -112,22 +112,22 @@ TEST_F(JSONWebKeyTest, GenerateJWKSet) {
EXPECT_EQ(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":"
"\"temporary\"}",
- GenerateJWKSet(keys, CdmSessionType::TEMPORARY_SESSION));
+ GenerateJWKSet(keys, CdmSessionType::kTemporary));
keys.push_back(
MakeKeyIdAndKeyPair(data2, arraysize(data2), data2, arraysize(data2)));
EXPECT_EQ(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"},{\"k\":"
"\"AQIDBA\",\"kid\":\"AQIDBA\",\"kty\":\"oct\"}],\"type\":\"persistent-"
"license\"}",
- GenerateJWKSet(keys, CdmSessionType::PERSISTENT_LICENSE_SESSION));
+ GenerateJWKSet(keys, CdmSessionType::kPersistentLicense));
keys.push_back(
MakeKeyIdAndKeyPair(data3, arraysize(data3), data3, arraysize(data3)));
EXPECT_EQ(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"},{\"k\":"
"\"AQIDBA\",\"kid\":\"AQIDBA\",\"kty\":\"oct\"},{\"k\":"
"\"AQIDBAUGBwgJCgsMDQ4PEA\",\"kid\":\"AQIDBAUGBwgJCgsMDQ4PEA\",\"kty\":"
- "\"oct\"}],\"type\":\"persistent-release-message\"}",
- GenerateJWKSet(keys, CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION));
+ "\"oct\"}],\"type\":\"persistent-usage-record\"}",
+ GenerateJWKSet(keys, CdmSessionType::kPersistentUsageRecord));
}
TEST_F(JSONWebKeyTest, ExtractValidJWKKeys) {
@@ -398,26 +398,26 @@ TEST_F(JSONWebKeyTest, Alg) {
TEST_F(JSONWebKeyTest, CdmSessionType) {
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}]}", true,
- CdmSessionType::TEMPORARY_SESSION);
+ CdmSessionType::kTemporary);
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":"
"\"temporary\"}",
- true, CdmSessionType::TEMPORARY_SESSION);
+ true, CdmSessionType::kTemporary);
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":"
"\"persistent-license\"}",
- true, CdmSessionType::PERSISTENT_LICENSE_SESSION);
+ true, CdmSessionType::kPersistentLicense);
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":"
- "\"persistent-release-message\"}",
- true, CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION);
+ "\"persistent-usage-record\"}",
+ true, CdmSessionType::kPersistentUsageRecord);
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":"
"\"unknown\"}",
- false, CdmSessionType::TEMPORARY_SESSION);
+ false, CdmSessionType::kTemporary);
ExtractSessionTypeAndExpect(
"{\"keys\":[{\"k\":\"AQI\",\"kid\":\"AQI\",\"kty\":\"oct\"}],\"type\":3}",
- false, CdmSessionType::TEMPORARY_SESSION);
+ false, CdmSessionType::kTemporary);
}
TEST_F(JSONWebKeyTest, CreateLicense) {
@@ -426,21 +426,18 @@ TEST_F(JSONWebKeyTest, CreateLicense) {
const uint8_t data3[] = {0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10};
- CreateLicenseAndExpect(data1, arraysize(data1),
- CdmSessionType::TEMPORARY_SESSION,
+ CreateLicenseAndExpect(data1, arraysize(data1), CdmSessionType::kTemporary,
"{\"kids\":[\"AQI\"],\"type\":\"temporary\"}");
CreateLicenseAndExpect(
- data1, arraysize(data1), CdmSessionType::PERSISTENT_LICENSE_SESSION,
+ data1, arraysize(data1), CdmSessionType::kPersistentLicense,
"{\"kids\":[\"AQI\"],\"type\":\"persistent-license\"}");
CreateLicenseAndExpect(
- data1, arraysize(data1),
- CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION,
- "{\"kids\":[\"AQI\"],\"type\":\"persistent-release-message\"}");
- CreateLicenseAndExpect(data2, arraysize(data2),
- CdmSessionType::TEMPORARY_SESSION,
+ data1, arraysize(data1), CdmSessionType::kPersistentUsageRecord,
+ "{\"kids\":[\"AQI\"],\"type\":\"persistent-usage-record\"}");
+ CreateLicenseAndExpect(data2, arraysize(data2), CdmSessionType::kTemporary,
"{\"kids\":[\"AQIDBA\"],\"type\":\"temporary\"}");
CreateLicenseAndExpect(data3, arraysize(data3),
- CdmSessionType::PERSISTENT_LICENSE_SESSION,
+ CdmSessionType::kPersistentLicense,
"{\"kids\":[\"AQIDBAUGBwgJCgsMDQ4PEA\"],\"type\":"
"\"persistent-license\"}");
}
@@ -506,8 +503,7 @@ TEST_F(JSONWebKeyTest, Base64UrlEncoding) {
EXPECT_EQ(encoded_text.find('-'), std::string::npos);
EXPECT_EQ(encoded_text.find('_'), std::string::npos);
- CreateLicenseAndExpect(data1, arraysize(data1),
- CdmSessionType::TEMPORARY_SESSION,
+ CreateLicenseAndExpect(data1, arraysize(data1), CdmSessionType::kTemporary,
"{\"kids\":[\"-_37_fv9-w\"],\"type\":\"temporary\"}");
ExtractKeyFromLicenseAndExpect(
@@ -526,7 +522,7 @@ TEST_F(JSONWebKeyTest, MultipleKeys) {
key_ids.push_back(std::vector<uint8_t>(data1, data1 + arraysize(data1)));
key_ids.push_back(std::vector<uint8_t>(data2, data2 + arraysize(data2)));
key_ids.push_back(std::vector<uint8_t>(data3, data3 + arraysize(data3)));
- CreateLicenseRequest(key_ids, CdmSessionType::TEMPORARY_SESSION, &result);
+ CreateLicenseRequest(key_ids, CdmSessionType::kTemporary, &result);
std::string s(result.begin(), result.end());
EXPECT_EQ(
"{\"kids\":[\"AQI\",\"AQIDBA\",\"AQIDBAUGBwgJCgsMDQ4PEA\"],\"type\":"
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
index c6fe179fc73..932e2bd5a34 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
@@ -14,8 +14,8 @@ loadable_module("clear_key_cdm") {
"cdm_file_adapter.h",
"cdm_file_io_test.cc",
"cdm_file_io_test.h",
- "cdm_proxy_test.cc",
- "cdm_proxy_test.h",
+ "cdm_proxy_handler.cc",
+ "cdm_proxy_handler.h",
"cdm_video_decoder.cc",
"cdm_video_decoder.h",
"clear_key_cdm.cc",
@@ -32,7 +32,6 @@ loadable_module("clear_key_cdm") {
deps = [
":cdm_proxy_common",
"//base",
- "//build/config:exe_and_shlib_deps",
"//media", # For media::AudioTimestampHelper
"//media:shared_memory_support", # For media::AudioBus.
"//media/cdm:cdm_api", # For content_decryption_module.h
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.cc
index 152539f4bd6..ebcb163a7fa 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h"
+#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.h"
#include <stdint.h>
#include <algorithm>
@@ -14,43 +14,48 @@
namespace media {
-CdmProxyTest::CdmProxyTest(CdmHostProxy* cdm_host_proxy)
+CdmProxyHandler::CdmProxyHandler(CdmHostProxy* cdm_host_proxy)
: cdm_host_proxy_(cdm_host_proxy) {}
-CdmProxyTest::~CdmProxyTest() {}
+CdmProxyHandler::~CdmProxyHandler() {}
-void CdmProxyTest::Run(CompletionCB completion_cb) {
+void CdmProxyHandler::Initialize(InitCB init_cb) {
DVLOG(1) << __func__;
- completion_cb_ = std::move(completion_cb);
+ init_cb_ = std::move(init_cb);
cdm_proxy_ = cdm_host_proxy_->RequestCdmProxy(this);
if (!cdm_proxy_) {
- OnTestComplete(false);
+ FinishInitialization(false);
return;
}
cdm_proxy_->Initialize();
}
-void CdmProxyTest::OnTestComplete(bool success) {
+void CdmProxyHandler::SetKey(const std::vector<uint8_t>& response) {
+ cdm_proxy_->SetKey(crypto_session_id_, nullptr, 0, response.data(),
+ response.size());
+}
+
+void CdmProxyHandler::FinishInitialization(bool success) {
DVLOG(1) << __func__ << ": success = " << success;
- std::move(completion_cb_).Run(success);
+ std::move(init_cb_).Run(success);
}
-void CdmProxyTest::OnInitialized(Status status,
- Protocol protocol,
- uint32_t crypto_session_id) {
+void CdmProxyHandler::OnInitialized(Status status,
+ Protocol protocol,
+ uint32_t crypto_session_id) {
DVLOG(1) << __func__ << ": status = " << status;
if (status != Status::kOk ||
crypto_session_id != kClearKeyCdmProxyCryptoSessionId) {
- OnTestComplete(false);
+ FinishInitialization(false);
return;
}
// Only one CdmProxy can be created during the lifetime of the CDM instance.
if (cdm_host_proxy_->RequestCdmProxy(this)) {
- OnTestComplete(false);
+ FinishInitialization(false);
return;
}
@@ -59,15 +64,15 @@ void CdmProxyTest::OnInitialized(Status status,
kClearKeyCdmProxyInputData.size(), 0);
}
-void CdmProxyTest::OnProcessed(Status status,
- const uint8_t* output_data,
- uint32_t output_data_size) {
+void CdmProxyHandler::OnProcessed(Status status,
+ const uint8_t* output_data,
+ uint32_t output_data_size) {
DVLOG(1) << __func__ << ": status = " << status;
if (status != Status::kOk ||
!std::equal(output_data, output_data + output_data_size,
kClearKeyCdmProxyOutputData.begin())) {
- OnTestComplete(false);
+ FinishInitialization(false);
return;
}
@@ -75,21 +80,21 @@ void CdmProxyTest::OnProcessed(Status status,
kClearKeyCdmProxyInputData.size());
}
-void CdmProxyTest::OnMediaCryptoSessionCreated(Status status,
- uint32_t crypto_session_id,
- uint64_t output_data) {
+void CdmProxyHandler::OnMediaCryptoSessionCreated(Status status,
+ uint32_t crypto_session_id,
+ uint64_t output_data) {
DVLOG(1) << __func__ << ": status = " << status;
if (status != Status::kOk ||
crypto_session_id != kClearKeyCdmProxyMediaCryptoSessionId) {
- OnTestComplete(false);
+ FinishInitialization(false);
return;
}
- OnTestComplete(true);
+ FinishInitialization(true);
}
-void CdmProxyTest::NotifyHardwareReset() {
+void CdmProxyHandler::NotifyHardwareReset() {
DVLOG(1) << __func__;
NOTREACHED();
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.h
index 27237be66ec..dd5badb9952 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_TEST_H_
-#define MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_TEST_H_
+#ifndef MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_HANDLER_H_
+#define MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_HANDLER_H_
#include "base/callback.h"
#include "base/macros.h"
@@ -13,18 +13,23 @@ namespace media {
class CdmHostProxy;
-class CdmProxyTest : public cdm::CdmProxyClient {
+class CdmProxyHandler : public cdm::CdmProxyClient {
public:
- using CompletionCB = base::OnceCallback<void(bool success)>;
+ using InitCB = base::OnceCallback<void(bool success)>;
- explicit CdmProxyTest(CdmHostProxy* cdm_host_proxy);
- ~CdmProxyTest() override;
+ explicit CdmProxyHandler(CdmHostProxy* cdm_host_proxy);
+ ~CdmProxyHandler() override;
- // Runs the test and returns the test result through |completion_cb|.
- void Run(CompletionCB completion_cb);
+ // Initializes the CdmProxyHandler and returns the result through |init_cb|.
+ // This will request and initialize the CdmProxy, create media crypto session
+ // and do some trivial procesing for better test coverage.
+ void Initialize(InitCB init_cb);
+
+ // Push a response that contains a license to the CdmProxy.
+ void SetKey(const std::vector<uint8_t>& response);
private:
- void OnTestComplete(bool success);
+ void FinishInitialization(bool success);
// cdm::CdmProxyClient implementation.
void OnInitialized(Status status,
@@ -39,12 +44,13 @@ class CdmProxyTest : public cdm::CdmProxyClient {
void NotifyHardwareReset() final;
CdmHostProxy* const cdm_host_proxy_ = nullptr;
- CompletionCB completion_cb_;
+ InitCB init_cb_;
cdm::CdmProxy* cdm_proxy_ = nullptr;
+ uint32_t crypto_session_id_ = 0u;
- DISALLOW_COPY_AND_ASSIGN(CdmProxyTest);
+ DISALLOW_COPY_AND_ASSIGN(CdmProxyHandler);
};
} // namespace media
-#endif // MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_TEST_H_
+#endif // MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_PROXY_HANDLER_H_
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
index 27c7940acc8..35f55902eff 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
@@ -27,7 +27,7 @@
#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/cdm/library_cdm/cdm_host_proxy_impl.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h"
+#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_handler.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.h"
#include "media/media_buildflags.h"
@@ -65,8 +65,8 @@ const char kExternalClearKeyStorageIdTestKeySystem[] =
"org.chromium.externalclearkey.storageidtest";
const char kExternalClearKeyDifferentGuidTestKeySystem[] =
"org.chromium.externalclearkey.differentguid";
-const char kExternalClearKeyCdmProxyTestKeySystem[] =
- "org.chromium.externalclearkey.cdmproxytest";
+const char kExternalClearKeyCdmProxyKeySystem[] =
+ "org.chromium.externalclearkey.cdmproxy";
const int64_t kSecondsPerMinute = 60;
const int64_t kMsPerSecond = 1000;
@@ -151,14 +151,14 @@ static cdm::Exception ConvertException(
static media::CdmSessionType ConvertSessionType(cdm::SessionType session_type) {
switch (session_type) {
case cdm::kTemporary:
- return media::CdmSessionType::TEMPORARY_SESSION;
+ return media::CdmSessionType::kTemporary;
case cdm::kPersistentLicense:
- return media::CdmSessionType::PERSISTENT_LICENSE_SESSION;
- case cdm::kPersistentKeyRelease:
- return media::CdmSessionType::PERSISTENT_RELEASE_MESSAGE_SESSION;
+ return media::CdmSessionType::kPersistentLicense;
+ case cdm::kPersistentUsageRecord:
+ return media::CdmSessionType::kPersistentUsageRecord;
}
NOTREACHED();
- return media::CdmSessionType::TEMPORARY_SESSION;
+ return media::CdmSessionType::kTemporary;
}
static media::EmeInitDataType ConvertInitDataType(
@@ -264,7 +264,7 @@ void* CreateCdmInstance(int cdm_interface_version,
key_system_string != kExternalClearKeyVerifyCdmHostTestKeySystem &&
key_system_string != kExternalClearKeyStorageIdTestKeySystem &&
key_system_string != kExternalClearKeyDifferentGuidTestKeySystem &&
- key_system_string != kExternalClearKeyCdmProxyTestKeySystem) {
+ key_system_string != kExternalClearKeyCdmProxyKeySystem) {
DVLOG(1) << "Unsupported key system:" << key_system_string;
return nullptr;
}
@@ -416,9 +416,9 @@ void ClearKeyCdm::Initialize(bool allow_distinctive_identifier,
allow_persistent_state_ = allow_persistent_state;
// CdmProxy must be created during initialization time. OnInitialized() will
- // be called in OnCdmProxyTestComplete().
- if (key_system_ == kExternalClearKeyCdmProxyTestKeySystem) {
- StartCdmProxyTest();
+ // be called in OnCdmProxyHandlerInitialized().
+ if (key_system_ == kExternalClearKeyCdmProxyKeySystem) {
+ InitializeCdmProxyHandler();
return;
}
@@ -483,8 +483,6 @@ void ClearKeyCdm::CreateSessionAndGenerateRequest(
ReportVerifyCdmHostTestResult();
} else if (key_system_ == kExternalClearKeyStorageIdTestKeySystem) {
StartStorageIdTest();
- } else if (key_system_ == kExternalClearKeyCdmProxyTestKeySystem) {
- ReportCdmProxyTestResult();
}
}
@@ -514,6 +512,16 @@ void ClearKeyCdm::UpdateSession(uint32_t promise_id,
uint32_t response_size) {
DVLOG(1) << __func__;
std::string web_session_str(session_id, session_id_length);
+ std::vector<uint8_t> response_vector(response, response + response_size);
+
+ // Push the license to CdmProxy.
+ // TODO(xhwang): There's a potential race condition here where key status
+ // update is dispatched in the render process first, which triggers the
+ // resume-decryption-after-no-key logic, and by the time we try to decrypt
+ // again in the ClearKeyCdmProxy (GPU process), SetKey() hasn't been
+ // dispatched yet. To solve this, handle no-key in ClearKeyCdmProxy.
+ if (cdm_proxy_handler_)
+ cdm_proxy_handler_->SetKey(response_vector);
std::unique_ptr<media::SimpleCdmPromise> promise(
new media::CdmCallbackPromise<>(
@@ -521,9 +529,7 @@ void ClearKeyCdm::UpdateSession(uint32_t promise_id,
promise_id, web_session_str),
base::Bind(&ClearKeyCdm::OnPromiseFailed, base::Unretained(this),
promise_id)));
- cdm_->UpdateSession(web_session_str,
- std::vector<uint8_t>(response, response + response_size),
- std::move(promise));
+ cdm_->UpdateSession(web_session_str, response_vector, std::move(promise));
}
void ClearKeyCdm::OnUpdateSuccess(uint32_t promise_id,
@@ -649,6 +655,10 @@ cdm::Status ClearKeyCdm::Decrypt(const cdm::InputBuffer_2& encrypted_buffer,
DVLOG(1) << __func__;
DCHECK(encrypted_buffer.data);
+ // When CdmProxy is used, the CDM cannot do any decryption or decoding.
+ if (key_system_ == kExternalClearKeyCdmProxyKeySystem)
+ return cdm::kDecryptError;
+
scoped_refptr<DecoderBuffer> buffer;
cdm::Status status = DecryptToMediaDecoderBuffer(encrypted_buffer, &buffer);
@@ -683,8 +693,10 @@ cdm::Status ClearKeyCdm::InitializeAudioDecoder(
cdm::Status ClearKeyCdm::InitializeAudioDecoder(
const cdm::AudioDecoderConfig_2& audio_decoder_config) {
- if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem)
+ if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem ||
+ key_system_ == kExternalClearKeyCdmProxyKeySystem) {
return cdm::kInitializationError;
+ }
#if defined(CLEAR_KEY_CDM_USE_FFMPEG_DECODER)
if (!audio_decoder_)
@@ -696,8 +708,7 @@ cdm::Status ClearKeyCdm::InitializeAudioDecoder(
return cdm::kSuccess;
#else
- NOTIMPLEMENTED();
- return cdm::kSessionError;
+ return cdm::kInitializationError;
#endif // CLEAR_KEY_CDM_USE_FFMPEG_DECODER
}
@@ -715,8 +726,10 @@ cdm::Status ClearKeyCdm::InitializeVideoDecoder(
cdm::Status ClearKeyCdm::InitializeVideoDecoder(
const cdm::VideoDecoderConfig_2& video_decoder_config) {
- if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem)
+ if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem ||
+ key_system_ == kExternalClearKeyCdmProxyKeySystem) {
return cdm::kInitializationError;
+ }
if (video_decoder_ && video_decoder_->is_initialized()) {
DCHECK(!video_decoder_->is_initialized());
@@ -1071,29 +1084,20 @@ void ClearKeyCdm::StartStorageIdTest() {
cdm_host_proxy_->RequestStorageId(0);
}
-void ClearKeyCdm::StartCdmProxyTest() {
+void ClearKeyCdm::InitializeCdmProxyHandler() {
DVLOG(1) << __func__;
- DCHECK(!cdm_proxy_test_);
+ DCHECK(!cdm_proxy_handler_);
- cdm_proxy_test_.reset(new CdmProxyTest(cdm_host_proxy_.get()));
- cdm_proxy_test_->Run(base::BindOnce(&ClearKeyCdm::OnCdmProxyTestComplete,
- base::Unretained(this)));
+ cdm_proxy_handler_ = std::make_unique<CdmProxyHandler>(cdm_host_proxy_.get());
+ cdm_proxy_handler_->Initialize(base::BindOnce(
+ &ClearKeyCdm::OnCdmProxyHandlerInitialized, base::Unretained(this)));
}
-void ClearKeyCdm::OnCdmProxyTestComplete(bool success) {
+void ClearKeyCdm::OnCdmProxyHandlerInitialized(bool success) {
DVLOG(1) << __func__;
- DCHECK(cdm_proxy_test_);
-
- cdm_proxy_test_.reset();
- has_cdm_proxy_test_passed_ = success;
-
- // Ignore test result here. It will be reported in ReportCdmProxyTestResult().
- cdm_host_proxy_->OnInitialized(true);
-}
+ DCHECK(cdm_proxy_handler_);
-void ClearKeyCdm::ReportCdmProxyTestResult() {
- // StartCdmProxyTest() should have already been called and finished.
- OnUnitTestComplete(has_cdm_proxy_test_passed_);
+ cdm_host_proxy_->OnInitialized(success);
}
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
index b9c78fa9665..386f2b95d07 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
@@ -23,7 +23,7 @@
namespace media {
class CdmHostProxy;
-class CdmProxyTest;
+class CdmProxyHandler;
class CdmVideoDecoder;
class DecoderBuffer;
class FFmpegCdmAudioDecoder;
@@ -159,9 +159,8 @@ class ClearKeyCdm : public cdm::ContentDecryptionModule_9,
void ReportVerifyCdmHostTestResult();
void StartStorageIdTest();
- void StartCdmProxyTest();
- void OnCdmProxyTestComplete(bool success);
- void ReportCdmProxyTestResult();
+ void InitializeCdmProxyHandler();
+ void OnCdmProxyHandlerInitialized(bool success);
int host_interface_version_ = 0;
@@ -189,12 +188,11 @@ class ClearKeyCdm : public cdm::ContentDecryptionModule_9,
std::unique_ptr<CdmVideoDecoder> video_decoder_;
std::unique_ptr<FileIOTestRunner> file_io_test_runner_;
- std::unique_ptr<CdmProxyTest> cdm_proxy_test_;
+ std::unique_ptr<CdmProxyHandler> cdm_proxy_handler_;
bool is_running_output_protection_test_ = false;
bool is_running_platform_verification_test_ = false;
bool is_running_storage_id_test_ = false;
- bool has_cdm_proxy_test_passed_ = false;
DISALLOW_COPY_AND_ASSIGN(ClearKeyCdm);
};
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
index c251354b535..2a4b2c2e337 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
@@ -11,6 +11,26 @@
namespace media {
+namespace {
+
+constexpr char kDummySessionId[] = "dummy session id";
+
+class IgnoreResponsePromise : public SimpleCdmPromise {
+ public:
+ IgnoreResponsePromise() = default;
+ ~IgnoreResponsePromise() override = default;
+
+ // SimpleCdmPromise implementation.
+ void resolve() final { MarkPromiseSettled(); }
+ void reject(CdmPromise::Exception exception_code,
+ uint32_t system_code,
+ const std::string& error_message) final {
+ MarkPromiseSettled();
+ }
+};
+
+} // namespace
+
ClearKeyCdmProxy::ClearKeyCdmProxy() : weak_factory_(this) {}
ClearKeyCdmProxy::~ClearKeyCdmProxy() {}
@@ -23,9 +43,8 @@ base::WeakPtr<CdmContext> ClearKeyCdmProxy::GetCdmContext() {
void ClearKeyCdmProxy::Initialize(Client* client, InitializeCB init_cb) {
DVLOG(1) << __func__;
- std::move(init_cb).Run(
- Status::kOk, Protocol::kIntelConvergedSecurityAndManageabilityEngine,
- kClearKeyCdmProxyCryptoSessionId);
+ std::move(init_cb).Run(Status::kOk, Protocol::kIntel,
+ kClearKeyCdmProxyCryptoSessionId);
}
void ClearKeyCdmProxy::Process(Function function,
@@ -67,7 +86,15 @@ void ClearKeyCdmProxy::CreateMediaCryptoSession(
void ClearKeyCdmProxy::SetKey(uint32_t crypto_session_id,
const std::vector<uint8_t>& key_id,
- const std::vector<uint8_t>& key_blob) {}
+ const std::vector<uint8_t>& key_blob) {
+ DVLOG(1) << __func__;
+
+ if (!aes_decryptor_)
+ CreateDecryptor();
+
+ aes_decryptor_->UpdateSession(kDummySessionId, key_blob,
+ std::make_unique<IgnoreResponsePromise>());
+}
void ClearKeyCdmProxy::RemoveKey(uint32_t crypto_session_id,
const std::vector<uint8_t>& key_id) {}
@@ -75,13 +102,22 @@ void ClearKeyCdmProxy::RemoveKey(uint32_t crypto_session_id,
Decryptor* ClearKeyCdmProxy::GetDecryptor() {
DVLOG(1) << __func__;
- if (!aes_decryptor_) {
- aes_decryptor_ = base::MakeRefCounted<AesDecryptor>(
- base::DoNothing(), base::DoNothing(), base::DoNothing(),
- base::DoNothing());
- }
+ if (!aes_decryptor_)
+ CreateDecryptor();
return aes_decryptor_.get();
}
+void ClearKeyCdmProxy::CreateDecryptor() {
+ DVLOG(1) << __func__;
+ DCHECK(!aes_decryptor_);
+
+ aes_decryptor_ =
+ base::MakeRefCounted<AesDecryptor>(base::DoNothing(), base::DoNothing(),
+ base::DoNothing(), base::DoNothing());
+
+ // Also create a dummy session to be used for SetKey().
+ aes_decryptor_->CreateSession(kDummySessionId, CdmSessionType::kTemporary);
+}
+
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
index 20f3d99cb35..ac8dce769c9 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
@@ -42,6 +42,8 @@ class ClearKeyCdmProxy : public CdmProxy, public CdmContext {
Decryptor* GetDecryptor() final;
private:
+ void CreateDecryptor();
+
scoped_refptr<AesDecryptor> aes_decryptor_;
base::WeakPtrFactory<ClearKeyCdmProxy> weak_factory_;
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
index aade5d5d23e..f065633e075 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
@@ -117,7 +117,7 @@ void ClearKeyPersistentSessionCdm::CreateSessionAndGenerateRequest(
const std::vector<uint8_t>& init_data,
std::unique_ptr<NewSessionCdmPromise> promise) {
std::unique_ptr<NewSessionCdmPromise> new_promise;
- if (session_type != CdmSessionType::PERSISTENT_LICENSE_SESSION) {
+ if (session_type != CdmSessionType::kPersistentLicense) {
new_promise = std::move(promise);
} else {
// Since it's a persistent session, we need to save the session ID after
@@ -135,7 +135,7 @@ void ClearKeyPersistentSessionCdm::LoadSession(
CdmSessionType session_type,
const std::string& session_id,
std::unique_ptr<NewSessionCdmPromise> promise) {
- DCHECK_EQ(CdmSessionType::PERSISTENT_LICENSE_SESSION, session_type);
+ DCHECK_EQ(CdmSessionType::kPersistentLicense, session_type);
// Load the saved state for |session_id| and then create the session.
std::unique_ptr<CdmFileAdapter> file(new CdmFileAdapter(cdm_host_proxy_));
@@ -180,8 +180,7 @@ void ClearKeyPersistentSessionCdm::OnFileReadForLoadSession(
}
// Add the session to the list of active sessions.
- if (!cdm_->CreateSession(session_id,
- CdmSessionType::PERSISTENT_LICENSE_SESSION)) {
+ if (!cdm_->CreateSession(session_id, CdmSessionType::kPersistentLicense)) {
// If the session can't be created it's due to an already existing session
// with the same name.
promise->reject(CdmPromise::Exception::QUOTA_EXCEEDED_ERROR, 0,
diff --git a/chromium/media/device_monitors/device_monitor_mac.mm b/chromium/media/device_monitors/device_monitor_mac.mm
index fff4482bae4..0953e0779ba 100644
--- a/chromium/media/device_monitors/device_monitor_mac.mm
+++ b/chromium/media/device_monitors/device_monitor_mac.mm
@@ -7,9 +7,9 @@
#include <AVFoundation/AVFoundation.h>
#include <set>
+#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/logging.h"
-#include "base/mac/bind_objc_block.h"
#include "base/mac/scoped_nsobject.h"
#include "base/macros.h"
#include "base/task_runner_util.h"
@@ -205,10 +205,10 @@ void SuspendObserverDelegate::StartObserver(
// done on UI thread. The devices array is retained in |device_thread| and
// released in DoStartObserver().
base::PostTaskAndReplyWithResult(
- device_thread.get(), FROM_HERE, base::BindBlock(^{
+ device_thread.get(), FROM_HERE, base::BindOnce(base::RetainBlock(^{
return [[AVCaptureDevice devices] retain];
- }),
- base::Bind(&SuspendObserverDelegate::DoStartObserver, this));
+ })),
+ base::BindOnce(&SuspendObserverDelegate::DoStartObserver, this));
}
void SuspendObserverDelegate::OnDeviceChanged(
@@ -218,10 +218,10 @@ void SuspendObserverDelegate::OnDeviceChanged(
// new devices and the old ones to be done on main thread. The devices array
// is retained in |device_thread| and released in DoOnDeviceChanged().
PostTaskAndReplyWithResult(
- device_thread.get(), FROM_HERE, base::BindBlock(^{
+ device_thread.get(), FROM_HERE, base::BindOnce(base::RetainBlock(^{
return [[AVCaptureDevice devices] retain];
- }),
- base::Bind(&SuspendObserverDelegate::DoOnDeviceChanged, this));
+ })),
+ base::BindOnce(&SuspendObserverDelegate::DoOnDeviceChanged, this));
}
void SuspendObserverDelegate::ResetDeviceMonitor() {
diff --git a/chromium/media/ffmpeg/ffmpeg_common.cc b/chromium/media/ffmpeg/ffmpeg_common.cc
index caa2647cb29..e299bbf336d 100644
--- a/chromium/media/ffmpeg/ffmpeg_common.cc
+++ b/chromium/media/ffmpeg/ffmpeg_common.cc
@@ -497,7 +497,7 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
break;
case kCodecAV1:
format = PIXEL_FORMAT_I420;
- profile = AV1PROFILE_PROFILE0;
+ profile = AV1PROFILE_PROFILE_MAIN;
break;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
case kCodecHEVC:
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index 9429b426ceb..70e1dd2f6b0 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -188,7 +188,7 @@ void MediaCodecAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
input_queue_.push_back(std::make_pair(std::move(buffer), bound_decode_cb));
- codec_loop_->DoPendingWork();
+ codec_loop_->ExpectWork();
}
void MediaCodecAudioDecoder::Reset(const base::Closure& closure) {
diff --git a/chromium/media/filters/aom_video_decoder.cc b/chromium/media/filters/aom_video_decoder.cc
index 41250ae6dad..fa5b8a53ced 100644
--- a/chromium/media/filters/aom_video_decoder.cc
+++ b/chromium/media/filters/aom_video_decoder.cc
@@ -118,61 +118,16 @@ static void SetColorSpaceForFrame(const aom_image_t* img,
return;
}
- ColorSpace color_space = config.color_space();
- gfx::ColorSpace::PrimaryID primaries = gfx::ColorSpace::PrimaryID::INVALID;
- gfx::ColorSpace::TransferID transfer = gfx::ColorSpace::TransferID::INVALID;
- gfx::ColorSpace::MatrixID matrix = gfx::ColorSpace::MatrixID::INVALID;
gfx::ColorSpace::RangeID range = img->range == AOM_CR_FULL_RANGE
? gfx::ColorSpace::RangeID::FULL
: gfx::ColorSpace::RangeID::LIMITED;
- switch (img->cs) {
- case AOM_CS_BT_601:
- case AOM_CS_SMPTE_170:
- primaries = gfx::ColorSpace::PrimaryID::SMPTE170M;
- transfer = gfx::ColorSpace::TransferID::SMPTE170M;
- matrix = gfx::ColorSpace::MatrixID::SMPTE170M;
- color_space = COLOR_SPACE_SD_REC601;
- break;
- case AOM_CS_SMPTE_240:
- primaries = gfx::ColorSpace::PrimaryID::SMPTE240M;
- transfer = gfx::ColorSpace::TransferID::SMPTE240M;
- matrix = gfx::ColorSpace::MatrixID::SMPTE240M;
- break;
- case AOM_CS_BT_709:
- primaries = gfx::ColorSpace::PrimaryID::BT709;
- transfer = gfx::ColorSpace::TransferID::BT709;
- matrix = gfx::ColorSpace::MatrixID::BT709;
- color_space = COLOR_SPACE_HD_REC709;
- break;
- case AOM_CS_BT_2020_NCL:
- case AOM_CS_BT_2020_CL:
- primaries = gfx::ColorSpace::PrimaryID::BT2020;
- if (img->bit_depth >= 12) {
- transfer = gfx::ColorSpace::TransferID::BT2020_12;
- } else if (img->bit_depth >= 10) {
- transfer = gfx::ColorSpace::TransferID::BT2020_10;
- } else {
- transfer = gfx::ColorSpace::TransferID::BT709;
- }
- matrix = img->cs == AOM_CS_BT_2020_NCL
- ? gfx::ColorSpace::MatrixID::BT2020_NCL
- : gfx::ColorSpace::MatrixID::BT2020_CL;
- break;
- case AOM_CS_SRGB:
- primaries = gfx::ColorSpace::PrimaryID::BT709;
- transfer = gfx::ColorSpace::TransferID::IEC61966_2_1;
- matrix = gfx::ColorSpace::MatrixID::BT709;
- break;
- default:
- NOTIMPLEMENTED() << "Unsupported color space encountered: " << img->cs;
- break;
- }
-
- // TODO(ccameron): Set a color space even for unspecified values.
- if (primaries != gfx::ColorSpace::PrimaryID::INVALID)
- frame->set_color_space(gfx::ColorSpace(primaries, transfer, matrix, range));
- frame->metadata()->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space);
+ // AOM color space defines match ISO 23001-8:2016 via ISO/IEC 23091-4/ITU-T
+ // H.273.
+ // http://av1-spec.argondesign.com/av1-spec/av1-spec.html#color-config-semantics
+ frame->set_color_space(
+ media::VideoColorSpace(img->cp, img->tc, img->mc, range)
+ .ToGfxColorSpace());
}
// Copies plane of 8-bit pixels out of a 16-bit values.
@@ -308,8 +263,8 @@ bool AomVideoDecoder::DecodeBuffer(const DecoderBuffer* buffer) {
if (aom_codec_decode(
aom_decoder_.get(), buffer->data(), buffer->data_size(),
- reinterpret_cast<void*>(buffer->timestamp().InMicroseconds()),
- 0 /* deadline */) != AOM_CODEC_OK) {
+ reinterpret_cast<void*>(buffer->timestamp().InMicroseconds())) !=
+ AOM_CODEC_OK) {
const char* detail = aom_codec_error_detail(aom_decoder_.get());
MEDIA_LOG(ERROR, media_log_)
<< "aom_codec_decode() failed: " << aom_codec_error(aom_decoder_.get())
diff --git a/chromium/media/filters/aom_video_decoder_unittest.cc b/chromium/media/filters/aom_video_decoder_unittest.cc
index 7ab2b07ad95..f48a31f640b 100644
--- a/chromium/media/filters/aom_video_decoder_unittest.cc
+++ b/chromium/media/filters/aom_video_decoder_unittest.cc
@@ -33,7 +33,7 @@ class AomVideoDecoderTest : public testing::Test {
public:
AomVideoDecoderTest()
: decoder_(new AomVideoDecoder(&media_log_)),
- i_frame_buffer_(ReadTestDataFile("av1-I-frame-352x288")) {}
+ i_frame_buffer_(ReadTestDataFile("av1-I-frame-320x240")) {}
~AomVideoDecoderTest() override { Destroy(); }
@@ -208,7 +208,7 @@ TEST_F(AomVideoDecoderTest, DecodeFrame_Normal) {
// the output size was adjusted.
// TODO(dalecurtis): Get an I-frame from a larger video.
TEST_F(AomVideoDecoderTest, DISABLED_DecodeFrame_LargerWidth) {
- DecodeIFrameThenTestFile("av1-I-frame-352x288", gfx::Size(1280, 720));
+ DecodeIFrameThenTestFile("av1-I-frame-320x240", gfx::Size(1280, 720));
}
// Decode a VP9 frame which should trigger a decoder error.
diff --git a/chromium/media/filters/audio_file_reader.cc b/chromium/media/filters/audio_file_reader.cc
index ae37ab7b375..ded5a7d200f 100644
--- a/chromium/media/filters/audio_file_reader.cc
+++ b/chromium/media/filters/audio_file_reader.cc
@@ -215,7 +215,7 @@ bool AudioFileReader::OnNewFrame(
int* total_frames,
std::vector<std::unique_ptr<AudioBus>>* decoded_audio_packets,
AVFrame* frame) {
- const int frames_read = frame->nb_samples;
+ int frames_read = frame->nb_samples;
if (frames_read < 0)
return false;
@@ -233,6 +233,27 @@ bool AudioFileReader::OnNewFrame(
return false;
}
+ // AAC decoding doesn't properly trim the last packet in a stream, so if we
+ // have duration information, use it to set the correct length to avoid extra
+ // silence from being output. In the case where we are also discarding some
+ // portion of the packet (as indicated by a negative pts), we further want to
+ // adjust the duration downward by however much exists before zero.
+ if (audio_codec_ == kCodecAAC && frame->pkt_duration) {
+ const base::TimeDelta pkt_duration = ConvertFromTimeBase(
+ glue_->format_context()->streams[stream_index_]->time_base,
+ frame->pkt_duration + std::min(static_cast<int64_t>(0), frame->pts));
+ const base::TimeDelta frame_duration = base::TimeDelta::FromSecondsD(
+ frames_read / static_cast<double>(sample_rate_));
+
+ if (pkt_duration < frame_duration && pkt_duration > base::TimeDelta()) {
+ const int new_frames_read = frames_read * (pkt_duration.InSecondsF() /
+ frame_duration.InSecondsF());
+ DVLOG(2) << "Shrinking AAC frame from " << frames_read << " to "
+ << new_frames_read << " based on packet duration.";
+ frames_read = new_frames_read;
+ }
+ }
+
// Deinterleave each channel and convert to 32bit floating-point with
// nominal range -1.0 -> +1.0. If the output is already in float planar
// format, just copy it into the AudioBus.
diff --git a/chromium/media/filters/audio_file_reader_unittest.cc b/chromium/media/filters/audio_file_reader_unittest.cc
index 6df10b2a8fd..f8d4b839243 100644
--- a/chromium/media/filters/audio_file_reader_unittest.cc
+++ b/chromium/media/filters/audio_file_reader_unittest.cc
@@ -125,9 +125,7 @@ class AudioFileReaderTest : public testing::Test {
EXPECT_EQ(reader_->Read(&decoded_audio_packets), 0);
}
- void disable_packet_verification() {
- packet_verification_disabled_ = true;
- }
+ void disable_packet_verification() { packet_verification_disabled_ = true; }
protected:
scoped_refptr<DecoderBuffer> data_;
@@ -162,72 +160,52 @@ TEST_F(AudioFileReaderTest, Vorbis) {
}
TEST_F(AudioFileReaderTest, WaveU8) {
- RunTest("sfx_u8.wav",
- "-1.23,-1.57,-1.14,-0.91,-0.87,-0.07,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(288414),
- 12720,
- 12719);
+ RunTest("sfx_u8.wav", "-1.23,-1.57,-1.14,-0.91,-0.87,-0.07,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveS16LE) {
- RunTest("sfx_s16le.wav",
- "3.05,2.87,3.00,3.32,3.58,4.08,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(288414),
- 12720,
- 12719);
+ RunTest("sfx_s16le.wav", "3.05,2.87,3.00,3.32,3.58,4.08,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveS24LE) {
- RunTest("sfx_s24le.wav",
- "3.03,2.86,2.99,3.31,3.57,4.06,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(288414),
- 12720,
- 12719);
+ RunTest("sfx_s24le.wav", "3.03,2.86,2.99,3.31,3.57,4.06,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, WaveF32LE) {
- RunTest("sfx_f32le.wav",
- "3.03,2.86,2.99,3.31,3.57,4.06,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(288414),
- 12720,
- 12719);
+ RunTest("sfx_f32le.wav", "3.03,2.86,2.99,3.31,3.57,4.06,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(288414), 12720, 12719);
}
TEST_F(AudioFileReaderTest, MP3) {
- RunTest("sfx.mp3",
- "1.30,2.72,4.56,5.08,3.74,2.03,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(313470),
- 13825,
- 11025);
+ RunTest("sfx.mp3", "1.30,2.72,4.56,5.08,3.74,2.03,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(313470), 13825, 11025);
}
TEST_F(AudioFileReaderTest, CorruptMP3) {
// Disable packet verification since the file is corrupt and FFmpeg does not
// make any guarantees on packet consistency in this case.
disable_packet_verification();
- RunTest("corrupt.mp3",
- "-4.95,-2.95,-0.44,1.16,0.31,-2.21,",
- 1,
- 44100,
- base::TimeDelta::FromMicroseconds(1018801),
- 44930,
- 44928);
+ RunTest("corrupt.mp3", "-4.95,-2.95,-0.44,1.16,0.31,-2.21,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(1018801), 44930, 44928);
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(AudioFileReaderTest, AAC) {
- RunTest("sfx.m4a", "1.81,1.66,2.32,3.27,4.46,3.36,", 1, 44100,
- base::TimeDelta::FromMicroseconds(371660), 16391, 13312);
+ RunTest("sfx.m4a", "0.79,2.31,4.15,4.92,4.04,1.44,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(371660), 16391, 12701);
+}
+
+TEST_F(AudioFileReaderTest, AAC_SinglePacket) {
+ RunTest("440hz-10ms.m4a", "3.77,4.53,4.75,3.48,3.67,3.76,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(69660), 3073, 441);
+}
+
+TEST_F(AudioFileReaderTest, AAC_ADTS) {
+ RunTest("sfx.adts", "1.80,1.66,2.31,3.26,4.46,3.36,", 1, 44100,
+ base::TimeDelta::FromMicroseconds(2825180), 124591, 13312);
}
TEST_F(AudioFileReaderTest, MidStreamConfigChangesFail) {
@@ -240,13 +218,8 @@ TEST_F(AudioFileReaderTest, VorbisInvalidChannelLayout) {
}
TEST_F(AudioFileReaderTest, WaveValidFourChannelLayout) {
- RunTest("4ch.wav",
- "131.71,38.02,130.31,44.89,135.98,42.52,",
- 4,
- 44100,
- base::TimeDelta::FromMicroseconds(100001),
- 4411,
- 4410);
+ RunTest("4ch.wav", "131.71,38.02,130.31,44.89,135.98,42.52,", 4, 44100,
+ base::TimeDelta::FromMicroseconds(100001), 4411, 4410);
}
} // namespace media
diff --git a/chromium/media/filters/chunk_demuxer.cc b/chromium/media/filters/chunk_demuxer.cc
index 50fc1bc8ba5..230c3fbad5a 100644
--- a/chromium/media/filters/chunk_demuxer.cc
+++ b/chromium/media/filters/chunk_demuxer.cc
@@ -52,6 +52,37 @@ using base::TimeDelta;
} \
}
+namespace {
+
+// Helper to attempt construction of a StreamParser specific to |content_type|
+// and |codecs|.
+// TODO(wolenetz): Consider relocating this to StreamParserFactory in
+// conjunction with updating StreamParserFactory's isTypeSupported() to also
+// parse codecs, rather than require preparsed vector.
+std::unique_ptr<media::StreamParser> CreateParserForTypeAndCodecs(
+ const std::string& content_type,
+ const std::string& codecs,
+ media::MediaLog* media_log) {
+ std::vector<std::string> parsed_codec_ids;
+ media::SplitCodecsToVector(codecs, &parsed_codec_ids, false);
+ return media::StreamParserFactory::Create(content_type, parsed_codec_ids,
+ media_log);
+}
+
+// Helper to calculate the expected codecs parsed from initialization segments
+// for a few mime types that have an implicit codec.
+std::string ExpectedCodecs(const std::string& content_type,
+ const std::string& codecs) {
+ if (codecs == "" && content_type == "audio/aac")
+ return "aac";
+ if (codecs == "" &&
+ (content_type == "audio/mpeg" || content_type == "audio/mp3"))
+ return "mp3";
+ return codecs;
+}
+
+} // namespace
+
namespace media {
ChunkDemuxerStream::ChunkDemuxerStream(Type type,
@@ -228,6 +259,7 @@ void ChunkDemuxerStream::OnStartOfCodedFrameGroup(DecodeTimestamp start_dts,
}
bool ChunkDemuxerStream::UpdateAudioConfig(const AudioDecoderConfig& config,
+ bool allow_codec_change,
MediaLog* media_log) {
DCHECK(config.IsValidConfig());
DCHECK_EQ(type_, AUDIO);
@@ -245,10 +277,11 @@ bool ChunkDemuxerStream::UpdateAudioConfig(const AudioDecoderConfig& config,
return true;
}
- return SBSTREAM_OP(UpdateAudioConfig(config));
+ return SBSTREAM_OP(UpdateAudioConfig(config, allow_codec_change));
}
bool ChunkDemuxerStream::UpdateVideoConfig(const VideoDecoderConfig& config,
+ bool allow_codec_change,
MediaLog* media_log) {
DCHECK(config.IsValidConfig());
DCHECK_EQ(type_, VIDEO);
@@ -260,7 +293,7 @@ bool ChunkDemuxerStream::UpdateVideoConfig(const VideoDecoderConfig& config,
return true;
}
- return SBSTREAM_OP(UpdateVideoConfig(config));
+ return SBSTREAM_OP(UpdateVideoConfig(config, allow_codec_change));
}
void ChunkDemuxerStream::UpdateTextConfig(const TextTrackConfig& config,
@@ -440,7 +473,6 @@ ChunkDemuxer::ChunkDemuxer(
open_cb_(open_cb),
progress_cb_(progress_cb),
encrypted_media_init_data_cb_(encrypted_media_init_data_cb),
- enable_text_(false),
media_log_(media_log),
duration_(kNoTimestamp),
user_specified_duration_(-1),
@@ -461,8 +493,7 @@ std::string ChunkDemuxer::GetDisplayName() const {
}
void ChunkDemuxer::Initialize(DemuxerHost* host,
- const PipelineStatusCB& init_cb,
- bool enable_text_tracks) {
+ const PipelineStatusCB& init_cb) {
DVLOG(1) << "Init(), buffering_by_pts_=" << buffering_by_pts_;
base::AutoLock auto_lock(lock_);
@@ -480,7 +511,6 @@ void ChunkDemuxer::Initialize(DemuxerHost* host,
// has a chance to run. This is because ChunkDemuxer::ReportError_Locked
// directly calls DemuxerHost::OnDemuxerError: crbug.com/633016.
init_cb_ = init_cb;
- enable_text_ = enable_text_tracks;
ChangeState_Locked(INITIALIZING);
@@ -617,9 +647,9 @@ void ChunkDemuxer::CancelPendingSeek(TimeDelta seek_time) {
}
ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
- const std::string& type,
+ const std::string& content_type,
const std::string& codecs) {
- DVLOG(1) << __func__ << " id=" << id << " mime_type=" << type
+ DVLOG(1) << __func__ << " id=" << id << " content_type=" << content_type
<< " codecs=" << codecs;
base::AutoLock auto_lock(lock_);
@@ -630,14 +660,10 @@ ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
// needed. See https://crbug.com/786975.
CHECK(!init_cb_.is_null());
- std::vector<std::string> parsed_codec_ids;
- media::SplitCodecsToVector(codecs, &parsed_codec_ids, false);
-
std::unique_ptr<media::StreamParser> stream_parser(
- StreamParserFactory::Create(type, parsed_codec_ids, media_log_));
-
+ CreateParserForTypeAndCodecs(content_type, codecs, media_log_));
if (!stream_parser) {
- DVLOG(1) << __func__ << " failed: unsupported mime_type=" << type
+ DVLOG(1) << __func__ << " failed: unsupported content_type=" << content_type
<< " codecs=" << codecs;
return ChunkDemuxer::kNotSupported;
}
@@ -657,11 +683,6 @@ ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
SourceBufferState::NewTextTrackCB new_text_track_cb;
- if (enable_text_) {
- new_text_track_cb = base::Bind(&ChunkDemuxer::OnNewTextTrack,
- base::Unretained(this));
- }
-
// TODO(wolenetz): Change these to DCHECKs or switch to returning
// kReachedIdLimit once less verification in release build is needed. See
// https://crbug.com/786975.
@@ -671,15 +692,10 @@ ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
CHECK(*insert_result.first == id);
CHECK(insert_result.second); // Only true if insertion succeeded.
- std::string expected_sbs_codecs = codecs;
- if (codecs == "" && type == "audio/aac")
- expected_sbs_codecs = "aac";
- if (codecs == "" && (type == "audio/mpeg" || type == "audio/mp3"))
- expected_sbs_codecs = "mp3";
-
- source_state->Init(
- base::Bind(&ChunkDemuxer::OnSourceInitDone, base::Unretained(this), id),
- expected_sbs_codecs, encrypted_media_init_data_cb_, new_text_track_cb);
+ source_state->Init(base::BindOnce(&ChunkDemuxer::OnSourceInitDone,
+ base::Unretained(this), id),
+ ExpectedCodecs(content_type, codecs),
+ encrypted_media_init_data_cb_, new_text_track_cb);
// TODO(wolenetz): Change to DCHECKs once less verification in release build
// is needed. See https://crbug.com/786975.
@@ -947,6 +963,51 @@ void ChunkDemuxer::Remove(const std::string& id, TimeDelta start,
host_->OnBufferedTimeRangesChanged(GetBufferedRanges_Locked());
}
+bool ChunkDemuxer::CanChangeType(const std::string& id,
+ const std::string& content_type,
+ const std::string& codecs) {
+ // Note, Chromium currently will not compare content_type and codecs, if any,
+ // with previous content_type and codecs of the SourceBuffer.
+ // TODO(wolenetz): Consider returning false if the codecs parameters are ever
+ // made to be precise such that they signal that the number of tracks of
+ // various media types differ from the first initialization segment (if
+ // received already). Switching to an audio-only container, when the first
+ // initialization segment only contained non-audio tracks, is one example we
+ // could enforce earlier here.
+
+ DVLOG(1) << __func__ << " id=" << id << " content_type=" << content_type
+ << " codecs=" << codecs;
+ base::AutoLock auto_lock(lock_);
+
+ DCHECK(IsValidId(id));
+
+ // CanChangeType() doesn't care if there has or hasn't been received a first
+ // initialization segment for the source buffer corresponding to |id|.
+
+ std::unique_ptr<media::StreamParser> stream_parser(
+ CreateParserForTypeAndCodecs(content_type, codecs, media_log_));
+ return !!stream_parser;
+}
+
+void ChunkDemuxer::ChangeType(const std::string& id,
+ const std::string& content_type,
+ const std::string& codecs) {
+ DVLOG(1) << __func__ << " id=" << id << " content_type=" << content_type
+ << " codecs=" << codecs;
+
+ base::AutoLock auto_lock(lock_);
+
+ DCHECK(state_ == INITIALIZING || state_ == INITIALIZED) << state_;
+ DCHECK(IsValidId(id));
+
+ std::unique_ptr<media::StreamParser> stream_parser(
+ CreateParserForTypeAndCodecs(content_type, codecs, media_log_));
+ // Caller should query CanChangeType() first to protect from failing this.
+ DCHECK(stream_parser);
+ source_state_map_[id]->ChangeType(std::move(stream_parser),
+ ExpectedCodecs(content_type, codecs));
+}
+
double ChunkDemuxer::GetDuration() {
base::AutoLock auto_lock(lock_);
return GetDuration_Locked();
@@ -1018,6 +1079,14 @@ bool ChunkDemuxer::IsParsingMediaSegment(const std::string& id) {
return source_state_map_[id]->parsing_media_segment();
}
+bool ChunkDemuxer::GetGenerateTimestampsFlag(const std::string& id) {
+ base::AutoLock auto_lock(lock_);
+ DVLOG(1) << "GetGenerateTimestampsFlag(" << id << ")";
+ CHECK(IsValidId(id));
+
+ return source_state_map_[id]->generate_timestamps_flag();
+}
+
void ChunkDemuxer::SetSequenceMode(const std::string& id,
bool sequence_mode) {
base::AutoLock auto_lock(lock_);
@@ -1312,13 +1381,6 @@ ChunkDemuxerStream* ChunkDemuxer::CreateDemuxerStream(
return owning_vector->back().get();
}
-void ChunkDemuxer::OnNewTextTrack(ChunkDemuxerStream* text_stream,
- const TextTrackConfig& config) {
- lock_.AssertAcquired();
- DCHECK_NE(state_, SHUTDOWN);
- host_->AddTextStream(text_stream, config);
-}
-
bool ChunkDemuxer::IsValidId(const std::string& source_id) const {
lock_.AssertAcquired();
return source_state_map_.count(source_id) > 0u;
diff --git a/chromium/media/filters/chunk_demuxer.h b/chromium/media/filters/chunk_demuxer.h
index 9253d2d50a5..d4fdfb4c7e3 100644
--- a/chromium/media/filters/chunk_demuxer.h
+++ b/chromium/media/filters/chunk_demuxer.h
@@ -103,10 +103,16 @@ class MEDIA_EXPORT ChunkDemuxerStream : public DemuxerStream {
base::TimeDelta start_pts);
// Called when midstream config updates occur.
+ // For audio and video, if the codec is allowed to change, the caller should
+ // set |allow_codec_change| to true.
// Returns true if the new config is accepted.
// Returns false if the new config should trigger an error.
- bool UpdateAudioConfig(const AudioDecoderConfig& config, MediaLog* media_log);
- bool UpdateVideoConfig(const VideoDecoderConfig& config, MediaLog* media_log);
+ bool UpdateAudioConfig(const AudioDecoderConfig& config,
+ bool allow_codec_change,
+ MediaLog* media_log);
+ bool UpdateVideoConfig(const VideoDecoderConfig& config,
+ bool allow_codec_change,
+ MediaLog* media_log);
void UpdateTextConfig(const TextTrackConfig& config, MediaLog* media_log);
void MarkEndOfStream();
@@ -200,9 +206,7 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
// |enable_text| Process inband text tracks in the normal way when true,
// otherwise ignore them.
- void Initialize(DemuxerHost* host,
- const PipelineStatusCB& init_cb,
- bool enable_text_tracks) override;
+ void Initialize(DemuxerHost* host, const PipelineStatusCB& init_cb) override;
void Stop() override;
void Seek(base::TimeDelta time, const PipelineStatusCB& cb) override;
base::Time GetTimelineOffset() const override;
@@ -218,15 +222,16 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
void StartWaitingForSeek(base::TimeDelta seek_time) override;
void CancelPendingSeek(base::TimeDelta seek_time) override;
- // Registers a new |id| to use for AppendData() calls. |type| indicates
- // the MIME type for the data that we intend to append for this ID.
- // kOk is returned if the demuxer has enough resources to support another ID
- // and supports the format indicated by |type|.
- // kNotSupported is returned if |type| is not a supported format.
- // kReachedIdLimit is returned if the demuxer cannot handle another ID right
- // now.
+ // Registers a new |id| to use for AppendData() calls. |content_type|
+ // indicates the MIME type's ContentType and |codecs| indicates the MIME
+ // type's "codecs" parameter string (if any) for the data that we intend to
+ // append for this ID. kOk is returned if the demuxer has enough resources to
+ // support another ID and supports the format indicated by |content_type| and
+ // |codecs|. kReachedIdLimit is returned if the demuxer cannot handle another
+ // ID right now. kNotSupported is returned if |content_type| and |codecs| is
+ // not a supported format.
Status AddId(const std::string& id,
- const std::string& type,
+ const std::string& content_type,
const std::string& codecs);
// Notifies a caller via |tracks_updated_cb| that the set of media tracks
@@ -285,6 +290,26 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
void Remove(const std::string& id, base::TimeDelta start,
base::TimeDelta end);
+ // Returns whether or not the source buffer associated with |id| can change
+ // its parser type to one which parses |content_type| and |codecs|.
+ // |content_type| indicates the ContentType of the MIME type for the data that
+ // we intend to append for this |id|; |codecs| similarly indicates the MIME
+ // type's "codecs" parameter, if any.
+ bool CanChangeType(const std::string& id,
+ const std::string& content_type,
+ const std::string& codecs);
+
+ // For the source buffer associated with |id|, changes its parser type to one
+ // which parses |content_type| and |codecs|. |content_type| indicates the
+ // ContentType of the MIME type for the data that we intend to append for this
+ // |id|; |codecs| similarly indicates the MIME type's "codecs" parameter, if
+ // any. Caller must first ensure CanChangeType() returns true for the same
+ // parameters. Caller must also ensure that ResetParserState() is done before
+ // calling this, to flush any pending frames.
+ void ChangeType(const std::string& id,
+ const std::string& content_type,
+ const std::string& codecs);
+
// If the buffer is full, attempts to try to free up space, as specified in
// the "Coded Frame Eviction Algorithm" in the Media Source Extensions Spec.
// Returns false iff buffer is still full after running eviction.
@@ -310,6 +335,10 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
// a media segment, or false otherwise.
bool IsParsingMediaSegment(const std::string& id);
+ // Returns the 'Generate Timestamps Flag', as described in the MSE Byte Stream
+ // Format Registry, for the source buffer associated with |id|.
+ bool GetGenerateTimestampsFlag(const std::string& id);
+
// Set the append mode to be applied to subsequent buffers appended to the
// source buffer associated with |id|. If |sequence_mode| is true, caller
// is requesting "sequence" mode. Otherwise, caller is requesting "segments"
@@ -430,7 +459,6 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
base::Closure open_cb_;
base::Closure progress_cb_;
EncryptedMediaInitDataCB encrypted_media_init_data_cb_;
- bool enable_text_;
// MediaLog for reporting messages and properties to debug content and engine.
MediaLog* media_log_;
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 53adadf198c..57ed2458fe5 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -97,7 +97,6 @@ const int kVideoTrackEntryHeaderSize =
const int kVideoTrackNum = 1;
const int kAudioTrackNum = 2;
-const int kTextTrackNum = 3;
const int kAlternateVideoTrackNum = 4;
const int kAlternateAudioTrackNum = 5;
const int kAlternateTextTrackNum = 6;
@@ -209,8 +208,9 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
base::Bind(&ChunkDemuxerTest::DemuxerOpened, base::Unretained(this));
base::Closure progress_cb =
base::Bind(&ChunkDemuxerTest::OnProgress, base::Unretained(this));
- Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb = base::Bind(
- &ChunkDemuxerTest::OnEncryptedMediaInitData, base::Unretained(this));
+ Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
+ base::BindRepeating(&ChunkDemuxerTest::OnEncryptedMediaInitData,
+ base::Unretained(this));
EXPECT_MEDIA_LOG(
BufferingByPtsDts(buffering_api_ == BufferingApi::kNewByPts));
demuxer_.reset(new ChunkDemuxer(open_cb, progress_cb,
@@ -228,7 +228,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
int* size) {
bool has_audio = (stream_flags & HAS_AUDIO) != 0;
bool has_video = (stream_flags & HAS_VIDEO) != 0;
- bool has_text = (stream_flags & HAS_TEXT) != 0;
scoped_refptr<DecoderBuffer> ebml_header;
scoped_refptr<DecoderBuffer> info;
scoped_refptr<DecoderBuffer> audio_track_entry;
@@ -279,32 +278,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
}
}
- if (has_text) {
- // TODO(matthewjheaney): create an abstraction to do
- // this (http://crbug/321454).
- // We need it to also handle the creation of multiple text tracks.
- //
- // This is the track entry for a text track,
- // TrackEntry [AE], size=30
- // TrackNum [D7], size=1, val=3 (or 4 if USE_ALTERNATE_TEXT_TRACK_ID)
- // TrackUID [73] [C5], size=1, value=3 (must remain constant for same
- // track, even if TrackNum changes)
- // TrackType [83], size=1, val=0x11
- // CodecId [86], size=18, val="D_WEBVTT/SUBTITLES"
- char str[] = "\xAE\x9E\xD7\x81\x03\x73\xC5\x81\x03"
- "\x83\x81\x11\x86\x92"
- "D_WEBVTT/SUBTITLES";
- DCHECK_EQ(str[4], kTextTrackNum);
- if (stream_flags & USE_ALTERNATE_TEXT_TRACK_ID)
- str[4] = kAlternateTextTrackNum;
-
- const int len = strlen(str);
- DCHECK_EQ(len, 32);
- const uint8_t* const buf = reinterpret_cast<const uint8_t*>(str);
- text_track_entry = DecoderBuffer::CopyFrom(buf, len);
- tracks_element_size += text_track_entry->data_size();
- }
-
*size = ebml_header->data_size() + info->data_size() +
kTracksHeaderSize + tracks_element_size;
@@ -353,12 +326,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
}
buf += audio_track_entry->data_size();
}
-
- if (has_text) {
- memcpy(buf, text_track_entry->data(),
- text_track_entry->data_size());
- buf += text_track_entry->data_size();
- }
}
ChunkDemuxer::Status AddId() {
@@ -433,7 +400,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
case kAlternateAudioTrackNum:
block_duration = kAudioBlockDuration;
break;
- case kTextTrackNum: // Fall-through.
case kAlternateTextTrackNum:
block_duration = kTextBlockDuration;
break;
@@ -513,8 +479,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
CHECK(base::StringToInt(timestamp_str, &block_info.timestamp_in_ms));
- if (track_number == kTextTrackNum ||
- track_number == kAlternateTextTrackNum) {
+ if (track_number == kAlternateTextTrackNum) {
block_info.duration = kTextBlockDuration;
ASSERT_EQ(kWebMFlagKeyframe, block_info.flags)
<< "Text block with timestamp " << block_info.timestamp_in_ms
@@ -727,7 +692,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
enum StreamFlags {
HAS_AUDIO = 1 << 0,
HAS_VIDEO = 1 << 1,
- HAS_TEXT = 1 << 2,
USE_ALTERNATE_AUDIO_TRACK_ID = 1 << 3,
USE_ALTERNATE_VIDEO_TRACK_ID = 1 << 4,
USE_ALTERNATE_TEXT_TRACK_ID = 1 << 5,
@@ -788,8 +752,8 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
EXPECT_MEDIA_LOG(StreamParsingFailed());
}
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(expected_duration, expected_status), true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(expected_duration, expected_status));
if (AddId(kSourceId, stream_flags) != ChunkDemuxer::kOk)
return false;
@@ -799,11 +763,10 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
}
bool InitDemuxerAudioAndVideoSourcesText(const std::string& audio_id,
- const std::string& video_id,
- bool has_text) {
+ const std::string& video_id) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK), true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK));
if (AddId(audio_id, HAS_AUDIO) != ChunkDemuxer::kOk)
return false;
@@ -813,11 +776,6 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
int audio_flags = HAS_AUDIO;
int video_flags = HAS_VIDEO;
- if (has_text) {
- audio_flags |= HAS_TEXT;
- video_flags |= HAS_TEXT;
- }
-
// Note: Unlike InitDemuxerWithEncryptionInfo, this method is currently
// incompatible with InSequence tests. Refactoring of the duration
// set expectation to not be added during CreateInitDoneCB() could fix this.
@@ -833,7 +791,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
bool InitDemuxerAudioAndVideoSources(const std::string& audio_id,
const std::string& video_id) {
- return InitDemuxerAudioAndVideoSourcesText(audio_id, video_id, false);
+ return InitDemuxerAudioAndVideoSourcesText(audio_id, video_id);
}
// Initializes the demuxer with data from 2 files with different
@@ -864,8 +822,8 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
ExpectInitMediaLogs(HAS_AUDIO | HAS_VIDEO);
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
demuxer_->Initialize(
- &host_, CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744),
- PIPELINE_OK), true);
+ &host_,
+ CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744), PIPELINE_OK));
if (AddId(kSourceId, HAS_AUDIO | HAS_VIDEO) != ChunkDemuxer::kOk)
return false;
@@ -874,7 +832,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(2)).Times(7);
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2746)));
+ EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
EXPECT_TRUE(AppendData(bear1->data(), bear1->data_size()));
// Last audio frame has timestamp 2721 and duration 24 (estimated from max
// seen so far for audio track).
@@ -901,7 +859,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
// segment.
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
EXPECT_TRUE(AppendData(bear1->data(), 4370));
- EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(23));
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(24));
EXPECT_MEDIA_LOG(TrimmedSpliceOverlap(779000, 759000, 3000));
EXPECT_TRUE(AppendData(bear1->data() + 72737, 28183));
CheckExpectedRanges("{ [0,2736) }");
@@ -1235,8 +1193,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
const base::TimeDelta& duration,
int stream_flags) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(duration, PIPELINE_OK), true);
+ demuxer_->Initialize(&host_, CreateInitDoneCB(duration, PIPELINE_OK));
if (AddId(kSourceId, stream_flags) != ChunkDemuxer::kOk)
return false;
@@ -1244,6 +1201,7 @@ class ChunkDemuxerTest : public ::testing::TestWithParam<BufferingApi> {
// Read a WebM file into memory and send the data to the demuxer.
scoped_refptr<DecoderBuffer> buffer = ReadTestDataFile(filename);
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
+
EXPECT_TRUE(AppendDataInPieces(buffer->data(), buffer->data_size(), 512));
// Verify that the timestamps on the first few packets match what we
@@ -1428,123 +1386,6 @@ TEST_P(ChunkDemuxerTest, Init) {
}
}
-// TODO(acolwell): Fold this test into Init tests since the tests are
-// almost identical.
-TEST_P(ChunkDemuxerTest, InitText) {
- // Test with 1 video stream and 1 text streams, and 0 or 1 audio streams.
- // No encryption cases handled here.
- bool has_video = true;
- bool is_audio_encrypted = false;
- bool is_video_encrypted = false;
- for (int i = 0; i < 2; i++) {
- bool has_audio = (i & 0x1) != 0;
-
- CreateNewDemuxer();
-
- DemuxerStream* text_stream = NULL;
- TextTrackConfig text_config;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(DoAll(SaveArg<0>(&text_stream),
- SaveArg<1>(&text_config)));
-
- int stream_flags = HAS_TEXT;
- if (has_audio)
- stream_flags |= HAS_AUDIO;
-
- if (has_video)
- stream_flags |= HAS_VIDEO;
-
- ASSERT_TRUE(InitDemuxerWithEncryptionInfo(
- stream_flags, is_audio_encrypted, is_video_encrypted));
- ASSERT_TRUE(text_stream);
- EXPECT_EQ(DemuxerStream::TEXT, text_stream->type());
- EXPECT_EQ(kTextSubtitles, text_config.kind());
- EXPECT_FALSE(static_cast<ChunkDemuxerStream*>(text_stream)
- ->supports_partial_append_window_trimming());
-
- DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
- if (has_audio) {
- ASSERT_TRUE(audio_stream);
-
- const AudioDecoderConfig& config = audio_stream->audio_decoder_config();
- EXPECT_EQ(kCodecVorbis, config.codec());
- EXPECT_EQ(32, config.bits_per_channel());
- EXPECT_EQ(CHANNEL_LAYOUT_STEREO, config.channel_layout());
- EXPECT_EQ(44100, config.samples_per_second());
- EXPECT_GT(config.extra_data().size(), 0u);
- EXPECT_EQ(kSampleFormatPlanarF32, config.sample_format());
- EXPECT_EQ(is_audio_encrypted,
- audio_stream->audio_decoder_config().is_encrypted());
- EXPECT_TRUE(static_cast<ChunkDemuxerStream*>(audio_stream)
- ->supports_partial_append_window_trimming());
- } else {
- EXPECT_FALSE(audio_stream);
- }
-
- DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
- if (has_video) {
- EXPECT_TRUE(video_stream);
- EXPECT_EQ(is_video_encrypted,
- video_stream->video_decoder_config().is_encrypted());
- EXPECT_FALSE(static_cast<ChunkDemuxerStream*>(video_stream)
- ->supports_partial_append_window_trimming());
- } else {
- EXPECT_FALSE(video_stream);
- }
-
- ShutdownDemuxer();
- demuxer_.reset();
- }
-}
-
-TEST_P(ChunkDemuxerTest, SingleTextTrackIdChange) {
- // Test with 1 video stream, 1 audio, and 1 text stream. Send a second init
- // segment in which the text track ID changes. Verify appended buffers before
- // and after the second init segment map to the same underlying track buffers.
- CreateNewDemuxer();
- DemuxerStream* text_stream = NULL;
- TextTrackConfig text_config;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(DoAll(SaveArg<0>(&text_stream),
- SaveArg<1>(&text_config)));
- ASSERT_TRUE(InitDemuxerWithEncryptionInfo(
- HAS_TEXT | HAS_AUDIO | HAS_VIDEO, false, false));
- DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
- DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
- ASSERT_TRUE(audio_stream);
- ASSERT_TRUE(video_stream);
- ASSERT_TRUE(text_stream);
-
- AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "0K 23K", 23),
- MuxedStreamInfo(kVideoTrackNum, "0K 30", 30),
- MuxedStreamInfo(kTextTrackNum, "10K"));
- CheckExpectedRanges("{ [0,46) }");
-
- std::unique_ptr<uint8_t[]> info_tracks;
- int info_tracks_size = 0;
- CreateInitSegment(
- HAS_TEXT | HAS_AUDIO | HAS_VIDEO | USE_ALTERNATE_TEXT_TRACK_ID, false,
- false, &info_tracks, &info_tracks_size);
- EXPECT_CALL(*this, InitSegmentReceivedMock(_));
- ASSERT_TRUE(demuxer_->AppendData(
- kSourceId, info_tracks.get(), info_tracks_size,
- append_window_start_for_next_append_, append_window_end_for_next_append_,
- &timestamp_offset_map_[kSourceId]));
-
- AppendMuxedCluster(
- MuxedStreamInfo(kAudioTrackNum, "46K 69K", 23),
- MuxedStreamInfo(kVideoTrackNum, "60K",
- WebMClusterParser::kDefaultVideoBufferDurationInMs),
- MuxedStreamInfo(kAlternateTextTrackNum, "45K"));
-
- CheckExpectedRanges("{ [0,92) }");
- CheckExpectedBuffers(audio_stream, "0K 23K 46K 69K");
- CheckExpectedBuffers(video_stream, "0K 30 60K");
- CheckExpectedBuffers(text_stream, "10K 45K");
-
- ShutdownDemuxer();
-}
-
TEST_P(ChunkDemuxerTest, AudioVideoTrackIdsChange) {
// Test with 1 audio and 1 video stream. Send a second init segment in which
// the audio and video track IDs change. Verify that appended buffers before
@@ -1582,39 +1423,32 @@ TEST_P(ChunkDemuxerTest, InitSegmentSetsNeedRandomAccessPointFlag) {
// to fully test this since needs random access point flag initializes to
// true.
CreateNewDemuxer();
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxerWithEncryptionInfo(
- HAS_TEXT | HAS_AUDIO | HAS_VIDEO, false, false));
+ ASSERT_TRUE(
+ InitDemuxerWithEncryptionInfo(HAS_AUDIO | HAS_VIDEO, false, false));
DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
- ASSERT_TRUE(audio_stream && video_stream && text_stream);
+ ASSERT_TRUE(audio_stream && video_stream);
AppendMuxedCluster(
MuxedStreamInfo(kAudioTrackNum, "23K",
WebMClusterParser::kDefaultAudioBufferDurationInMs),
- MuxedStreamInfo(kVideoTrackNum, "0 30K", 30),
- MuxedStreamInfo(kTextTrackNum, "25K 40K"));
+ MuxedStreamInfo(kVideoTrackNum, "0 30K", 30));
CheckExpectedRanges("{ [23,46) }");
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
- ASSERT_TRUE(AppendInitSegment(HAS_TEXT | HAS_AUDIO | HAS_VIDEO));
+ ASSERT_TRUE(AppendInitSegment(HAS_AUDIO | HAS_VIDEO));
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "46K 69K", 23),
- MuxedStreamInfo(kVideoTrackNum, "60 90K", 30),
- MuxedStreamInfo(kTextTrackNum, "80K 90K"));
+ MuxedStreamInfo(kVideoTrackNum, "60 90K", 30));
CheckExpectedRanges("{ [23,92) }");
CheckExpectedBuffers(audio_stream, "23K 46K 69K");
CheckExpectedBuffers(video_stream, "30K 90K");
- CheckExpectedBuffers(text_stream, "25K 40K 80K 90K");
}
TEST_P(ChunkDemuxerTest, Shutdown_BeforeAllInitSegmentsAppended) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(&host_, base::Bind(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ base::Unretained(this)));
EXPECT_EQ(AddId("audio", HAS_AUDIO), ChunkDemuxer::kOk);
EXPECT_EQ(AddId("video", HAS_VIDEO), ChunkDemuxer::kOk);
@@ -1626,54 +1460,27 @@ TEST_P(ChunkDemuxerTest, Shutdown_BeforeAllInitSegmentsAppended) {
ShutdownDemuxer();
}
-TEST_P(ChunkDemuxerTest, Shutdown_BeforeAllInitSegmentsAppendedText) {
- EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(&host_, base::Bind(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
-
- EXPECT_EQ(AddId("audio", HAS_AUDIO), ChunkDemuxer::kOk);
- EXPECT_EQ(AddId("video_and_text", HAS_VIDEO), ChunkDemuxer::kOk);
-
- EXPECT_CALL(host_, AddTextStream(_, _))
- .Times(Exactly(1));
-
- ExpectInitMediaLogs(HAS_VIDEO);
- EXPECT_CALL(*this, InitSegmentReceivedMock(_));
- ASSERT_TRUE(
- AppendInitSegmentWithSourceId("video_and_text", HAS_VIDEO | HAS_TEXT));
-
- ShutdownDemuxer();
-}
-
// Verifies that all streams waiting for data receive an end of stream
// buffer when Shutdown() is called.
TEST_P(ChunkDemuxerTest, Shutdown_EndOfStreamWhileWaitingForData) {
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO | HAS_TEXT));
+ ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
bool audio_read_done = false;
bool video_read_done = false;
- bool text_read_done = false;
audio_stream->Read(base::Bind(&OnReadDone_EOSExpected, &audio_read_done));
video_stream->Read(base::Bind(&OnReadDone_EOSExpected, &video_read_done));
- text_stream->Read(base::Bind(&OnReadDone_EOSExpected, &text_read_done));
base::RunLoop().RunUntilIdle();
EXPECT_FALSE(audio_read_done);
EXPECT_FALSE(video_read_done);
- EXPECT_FALSE(text_read_done);
ShutdownDemuxer();
EXPECT_TRUE(audio_read_done);
EXPECT_TRUE(video_read_done);
- EXPECT_TRUE(text_read_done);
}
// Test that Seek() completes successfully when the first cluster
@@ -1887,8 +1694,8 @@ TEST_P(ChunkDemuxerTest, PerStreamMonotonicallyIncreasingTimestamps) {
// INFO & TRACKS data.
TEST_P(ChunkDemuxerTest, ClusterBeforeInitSegment) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, NewExpectedStatusCB(CHUNK_DEMUXER_ERROR_APPEND_FAILED), true);
+ demuxer_->Initialize(&host_,
+ NewExpectedStatusCB(CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(), ChunkDemuxer::kOk);
@@ -1900,15 +1707,15 @@ TEST_P(ChunkDemuxerTest, ClusterBeforeInitSegment) {
// Test cases where we get an MarkEndOfStream() call during initialization.
TEST_P(ChunkDemuxerTest, EOSDuringInit) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, NewExpectedStatusCB(DEMUXER_ERROR_COULD_NOT_OPEN), true);
+ demuxer_->Initialize(&host_,
+ NewExpectedStatusCB(DEMUXER_ERROR_COULD_NOT_OPEN));
MarkEndOfStream(PIPELINE_OK);
}
TEST_P(ChunkDemuxerTest, EndOfStreamWithNoAppend) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, NewExpectedStatusCB(DEMUXER_ERROR_COULD_NOT_OPEN), true);
+ demuxer_->Initialize(&host_,
+ NewExpectedStatusCB(DEMUXER_ERROR_COULD_NOT_OPEN));
ASSERT_EQ(AddId(), ChunkDemuxer::kOk);
@@ -2108,50 +1915,24 @@ TEST_P(ChunkDemuxerTest, EndOfStreamDuringCanceledSeek) {
// Verify buffered range change behavior for audio/video/text tracks.
TEST_P(ChunkDemuxerTest, EndOfStreamRangeChanges) {
- DemuxerStream* text_stream = NULL;
-
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO | HAS_TEXT));
+ ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
AppendMuxedCluster(MuxedStreamInfo(kVideoTrackNum, "0K 33", 33),
MuxedStreamInfo(kAudioTrackNum, "0K 23K", 23));
- // Check expected ranges and verify that an empty text track does not
- // affect the expected ranges.
CheckExpectedRanges("{ [0,46) }");
EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(66)));
MarkEndOfStream(PIPELINE_OK);
- // Check expected ranges and verify that an empty text track does not
- // affect the expected ranges.
CheckExpectedRanges("{ [0,66) }");
-
- // Unmark end of stream state and verify that the ranges return to
- // their pre-"end of stream" values.
- demuxer_->UnmarkEndOfStream();
- CheckExpectedRanges("{ [0,46) }");
-
- // Add text track data and verify that the buffered ranges don't change
- // since the intersection of all the tracks doesn't change.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(200)));
- AppendMuxedCluster(MuxedStreamInfo(kVideoTrackNum, "0K 33", 33),
- MuxedStreamInfo(kAudioTrackNum, "0K 23K", 23),
- MuxedStreamInfo(kTextTrackNum, "0K 100K"));
- CheckExpectedRanges("{ [0,46) }");
-
- // Mark end of stream and verify that text track data is reflected in
- // the new range.
- MarkEndOfStream(PIPELINE_OK);
- CheckExpectedRanges("{ [0,200) }");
}
// Make sure AppendData() will accept elements that span multiple calls.
TEST_P(ChunkDemuxerTest, AppendingInPieces) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK), true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK));
ASSERT_EQ(AddId(), ChunkDemuxer::kOk);
@@ -2199,7 +1980,7 @@ TEST_P(ChunkDemuxerTest, WebMFile_AudioAndVideo) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2746)));
+ EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
ASSERT_TRUE(ParseWebMFile("bear-320x240.webm", buffer_timestamps,
base::TimeDelta::FromMilliseconds(2744)));
@@ -2243,7 +2024,7 @@ TEST_P(ChunkDemuxerTest, WebMFile_AudioOnly) {
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2746)));
+ EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
ASSERT_TRUE(ParseWebMFile("bear-320x240-audio-only.webm", buffer_timestamps,
base::TimeDelta::FromMilliseconds(2744),
@@ -2283,6 +2064,10 @@ TEST_P(ChunkDemuxerTest, WebMFile_AltRefFrames) {
{kSkip, kSkip},
};
+ // Expect duration adjustment since actual duration differs slightly from
+ // duration in the init segment.
+ EXPECT_CALL(host_, SetDuration(base::TimeDelta::FromMilliseconds(2768)));
+
ExpectInitMediaLogs(HAS_AUDIO | HAS_VIDEO);
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(2));
ASSERT_TRUE(ParseWebMFile("bear-320x240-altref.webm", buffer_timestamps,
@@ -2345,8 +2130,8 @@ TEST_P(ChunkDemuxerTest, IncrementalClusterParsing) {
TEST_P(ChunkDemuxerTest, ParseErrorDuringInit) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(), ChunkDemuxer::kOk);
@@ -2360,8 +2145,8 @@ TEST_P(ChunkDemuxerTest, ParseErrorDuringInit) {
TEST_P(ChunkDemuxerTest, AVHeadersWithAudioOnlyType) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(kSourceId, "audio/webm", "vorbis"), ChunkDemuxer::kOk);
@@ -2374,8 +2159,8 @@ TEST_P(ChunkDemuxerTest, AVHeadersWithAudioOnlyType) {
TEST_P(ChunkDemuxerTest, AVHeadersWithVideoOnlyType) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(kSourceId, "video/webm", "vp8"), ChunkDemuxer::kOk);
@@ -2390,8 +2175,8 @@ TEST_P(ChunkDemuxerTest, AVHeadersWithVideoOnlyType) {
TEST_P(ChunkDemuxerTest, AudioOnlyHeaderWithAVType) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(kSourceId, "video/webm", "vorbis,vp8"), ChunkDemuxer::kOk);
@@ -2406,8 +2191,8 @@ TEST_P(ChunkDemuxerTest, AudioOnlyHeaderWithAVType) {
TEST_P(ChunkDemuxerTest, VideoOnlyHeaderWithAVType) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(kSourceId, "video/webm", "vorbis,vp8"), ChunkDemuxer::kOk);
@@ -2450,15 +2235,10 @@ TEST_P(ChunkDemuxerTest, AddSeparateSourcesForAudioAndVideo) {
}
TEST_P(ChunkDemuxerTest, AddSeparateSourcesForAudioAndVideoText) {
- // TODO(matthewjheaney): Here and elsewhere, we need more tests
- // for inband text tracks (http://crbug/321455).
-
std::string audio_id = "audio1";
std::string video_id = "video1";
- EXPECT_CALL(host_, AddTextStream(_, _))
- .Times(Exactly(2));
- ASSERT_TRUE(InitDemuxerAudioAndVideoSourcesText(audio_id, video_id, true));
+ ASSERT_TRUE(InitDemuxerAudioAndVideoSourcesText(audio_id, video_id));
// Append audio and video data into separate source ids.
ASSERT_TRUE(AppendCluster(
@@ -2473,8 +2253,8 @@ TEST_P(ChunkDemuxerTest, AddSeparateSourcesForAudioAndVideoText) {
TEST_P(ChunkDemuxerTest, AddIdFailures) {
EXPECT_CALL(*this, DemuxerOpened());
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK), true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK));
std::string audio_id = "audio1";
std::string video_id = "video1";
@@ -2527,10 +2307,8 @@ TEST_P(ChunkDemuxerTest, RemoveId) {
// quota for new IDs in the future.
TEST_P(ChunkDemuxerTest, RemoveAndAddId) {
demuxer_->Initialize(
- &host_,
- base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ &host_, base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
+ base::Unretained(this)));
std::string audio_id_1 = "audio1";
ASSERT_TRUE(AddId(audio_id_1, HAS_AUDIO) == ChunkDemuxer::kOk);
@@ -2940,32 +2718,6 @@ TEST_P(ChunkDemuxerTest, GetBufferedRanges_AudioVideo) {
CheckExpectedRanges("{ [0,23) [300,400) [600,670) [900,950) [1200,1250) }");
}
-TEST_P(ChunkDemuxerTest, GetBufferedRanges_AudioVideoText) {
- EXPECT_CALL(host_, AddTextStream(_, _));
- ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO | HAS_TEXT));
-
- // Append audio & video data
- AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "0K 23K", 23),
- MuxedStreamInfo(kVideoTrackNum, "0K 33", 33));
-
- // Verify that a text track with no cues does not result in an empty buffered
- // range.
- CheckExpectedRanges("{ [0,46) }");
-
- // Add some text cues.
- AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "100K 123K", 23),
- MuxedStreamInfo(kVideoTrackNum, "100K 133", 33),
- MuxedStreamInfo(kTextTrackNum, "100K 200K"));
-
- // Verify that the text cues are not reflected in the buffered ranges.
- CheckExpectedRanges("{ [0,46) [100,146) }");
-
- // Remove the buffered ranges.
- demuxer_->Remove(kSourceId, base::TimeDelta(),
- base::TimeDelta::FromMilliseconds(250));
- CheckExpectedRanges("{ }");
-}
-
// Once MarkEndOfStream() is called, GetBufferedRanges should not cut off any
// over-hanging tails at the end of the ranges as this is likely due to block
// duration differences.
@@ -3092,10 +2844,8 @@ TEST_P(ChunkDemuxerTest, DifferentStreamTimecodesOutOfRange) {
TEST_P(ChunkDemuxerTest, CodecPrefixMatching) {
demuxer_->Initialize(
- &host_,
- base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ &host_, base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
+ base::Unretained(this)));
ChunkDemuxer::Status expected = ChunkDemuxer::kNotSupported;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -3126,10 +2876,8 @@ TEST_P(ChunkDemuxerTest, CodecIDsThatAreNotRFC6381Compliant) {
};
demuxer_->Initialize(
- &host_,
- base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ &host_, base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
+ base::Unretained(this)));
for (size_t i = 0; i < arraysize(codec_ids); ++i) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -3188,8 +2936,7 @@ TEST_P(ChunkDemuxerTest, EndOfStreamStillSetAfterSeek) {
TEST_P(ChunkDemuxerTest, GetBufferedRangesBeforeInitSegment) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(&host_, base::Bind(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ base::Unretained(this)));
ASSERT_EQ(AddId("audio", HAS_AUDIO), ChunkDemuxer::kOk);
ASSERT_EQ(AddId("video", HAS_VIDEO), ChunkDemuxer::kOk);
@@ -3453,8 +3200,8 @@ TEST_P(ChunkDemuxerTest, EmitBuffersDuringAbort) {
EXPECT_MEDIA_LOG(CodecName("audio", "aac"));
EXPECT_MEDIA_LOG(FoundStream("video"));
EXPECT_MEDIA_LOG(CodecName("video", "h264"));
- demuxer_->Initialize(&host_, CreateInitDoneCB(kInfiniteDuration, PIPELINE_OK),
- true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(kInfiniteDuration, PIPELINE_OK));
EXPECT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, kMp2tMimeType, kMp2tCodecs));
// For info:
@@ -3523,8 +3270,8 @@ TEST_P(ChunkDemuxerTest, SeekCompleteDuringAbort) {
EXPECT_MEDIA_LOG(CodecName("audio", "aac"));
EXPECT_MEDIA_LOG(FoundStream("video"));
EXPECT_MEDIA_LOG(CodecName("video", "h264"));
- demuxer_->Initialize(&host_, CreateInitDoneCB(kInfiniteDuration, PIPELINE_OK),
- true);
+ demuxer_->Initialize(&host_,
+ CreateInitDoneCB(kInfiniteDuration, PIPELINE_OK));
EXPECT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, kMp2tMimeType, kMp2tCodecs));
// For info:
@@ -3655,7 +3402,6 @@ TEST_P(ChunkDemuxerTest, WebMIsParsingMediaSegmentDetection) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO));
EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(23)).Times(2);
- EXPECT_MEDIA_LOG(TrimmedSpliceOverlap(2000, 1000, 22000));
for (size_t i = 0; i < sizeof(kBuffer); i++) {
DVLOG(3) << "Appending and testing index " << i;
ASSERT_TRUE(AppendData(kBuffer + i, 1));
@@ -3745,8 +3491,7 @@ TEST_P(ChunkDemuxerTest, AppendAfterEndOfStream) {
// the pipeline has a chance to initialize the demuxer.
TEST_P(ChunkDemuxerTest, Shutdown_BeforeInitialize) {
demuxer_->Shutdown();
- demuxer_->Initialize(
- &host_, CreateInitDoneCB(DEMUXER_ERROR_COULD_NOT_OPEN), true);
+ demuxer_->Initialize(&host_, CreateInitDoneCB(DEMUXER_ERROR_COULD_NOT_OPEN));
base::RunLoop().RunUntilIdle();
}
@@ -4184,8 +3929,7 @@ TEST_P(ChunkDemuxerTest, AppendWindow_WebMFile_AudioOnly) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
&host_,
- CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744), PIPELINE_OK),
- true);
+ CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744), PIPELINE_OK));
ASSERT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, HAS_AUDIO));
// Set the append window to [50,150).
@@ -4218,15 +3962,14 @@ TEST_P(ChunkDemuxerTest, AppendWindow_AudioConfigUpdateRemovesPreroll) {
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(
&host_,
- CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744), PIPELINE_OK),
- true);
+ CreateInitDoneCB(base::TimeDelta::FromMilliseconds(2744), PIPELINE_OK));
ASSERT_EQ(ChunkDemuxer::kOk, AddId(kSourceId, HAS_AUDIO));
// Set the append window such that the first file is completely before the
// append window.
// Expect duration adjustment since actual duration differs slightly from
// duration in the init segment.
- const base::TimeDelta duration_1 = base::TimeDelta::FromMilliseconds(2746);
+ const base::TimeDelta duration_1 = base::TimeDelta::FromMilliseconds(2768);
append_window_start_for_next_append_ = duration_1;
EXPECT_MEDIA_LOG(DroppedFrameCheckAppendWindow(
@@ -4251,69 +3994,16 @@ TEST_P(ChunkDemuxerTest, AppendWindow_AudioConfigUpdateRemovesPreroll) {
scoped_refptr<DecoderBuffer> buffer2 =
ReadTestDataFile("bear-320x240-audio-only-48khz.webm");
EXPECT_CALL(*this, InitSegmentReceivedMock(_));
- EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(21));
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimated(22));
EXPECT_CALL(host_, SetDuration(_)).Times(AnyNumber());
ASSERT_TRUE(SetTimestampOffset(kSourceId, duration_1));
ASSERT_TRUE(AppendDataInPieces(buffer2->data(), buffer2->data_size(), 512));
- CheckExpectedRanges("{ [2746,5519) }");
+ CheckExpectedRanges("{ [2768,5542) }");
Seek(duration_1);
ExpectConfigChanged(DemuxerStream::AUDIO);
ASSERT_FALSE(config_1.Matches(stream->audio_decoder_config()));
- CheckExpectedBuffers(stream, "2746K 2767K 2789K 2810K");
-}
-
-TEST_P(ChunkDemuxerTest, AppendWindow_Text) {
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxer(HAS_VIDEO | HAS_TEXT));
- DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
-
- // Set the append window to [20,280).
- append_window_start_for_next_append_ = base::TimeDelta::FromMilliseconds(20);
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(280);
-
- EXPECT_MEDIA_LOG(DroppedFrame("video", 0));
- EXPECT_MEDIA_LOG(DroppedFrame("text", 0));
- EXPECT_MEDIA_LOG(DroppedFrame("text", 200000));
- EXPECT_MEDIA_LOG(DroppedFrame("video", 270000));
- EXPECT_MEDIA_LOG(DroppedFrame("video", 300000));
- EXPECT_MEDIA_LOG(DroppedFrame("text", 300000));
- EXPECT_MEDIA_LOG(DroppedFrame("video", 330000));
-
- // Append a cluster that starts before and ends after the append
- // window.
- AppendMuxedCluster(
- MuxedStreamInfo(kVideoTrackNum,
- "0K 30 60 90 120K 150 180 210 240K 270 300 330K", 30),
- MuxedStreamInfo(kTextTrackNum, "0K 100K 200K 300K"));
-
- // Verify that text cues that start outside the window are not included
- // in the buffer. Also verify that cues that extend beyond the
- // window are not included.
- CheckExpectedRanges("{ [100,270) }");
- CheckExpectedBuffers(video_stream, "120K 150 180 210 240K");
- CheckExpectedBuffers(text_stream, "100K");
-
- // Extend the append window to [20,650).
- append_window_end_for_next_append_ = base::TimeDelta::FromMilliseconds(650);
-
- EXPECT_MEDIA_LOG(DroppedFrame("text", 600000));
- EXPECT_MEDIA_LOG(DroppedFrame("video", 630000));
- EXPECT_MEDIA_LOG(DroppedFrame("text", 700000));
-
- // Append more data and verify that a new range is created.
- AppendMuxedCluster(
- MuxedStreamInfo(kVideoTrackNum,
- "360 390 420K 450 480 510 540K 570 600 630K", 30),
- MuxedStreamInfo(kTextTrackNum, "400K 500K 600K 700K"));
- CheckExpectedRanges("{ [100,270) [400,630) }");
-
- // Seek to the new range and verify that the expected buffers are returned.
- Seek(base::TimeDelta::FromMilliseconds(420));
- CheckExpectedBuffers(video_stream, "420K 450 480 510 540K 570 600");
- CheckExpectedBuffers(text_stream, "400K 500K");
+ CheckExpectedBuffers(stream, "2768K 2789K 2811K 2832K");
}
TEST_P(ChunkDemuxerTest, StartWaitingForSeekAfterParseError) {
@@ -4326,22 +4016,17 @@ TEST_P(ChunkDemuxerTest, StartWaitingForSeekAfterParseError) {
}
TEST_P(ChunkDemuxerTest, Remove_AudioVideoText) {
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO | HAS_TEXT));
+ ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
AppendMuxedCluster(
MuxedStreamInfo(kAudioTrackNum, "0K 20K 40K 60K 80K 100K 120K 140K", 20),
- MuxedStreamInfo(kVideoTrackNum, "0K 30 60 90 120K 150 180", 30),
- MuxedStreamInfo(kTextTrackNum, "0K 100K 200K"));
+ MuxedStreamInfo(kVideoTrackNum, "0K 30 60 90 120K 150 180", 30));
CheckExpectedBuffers(audio_stream, "0K 20K 40K 60K 80K 100K 120K 140K");
CheckExpectedBuffers(video_stream, "0K 30 60 90 120K 150 180");
- CheckExpectedBuffers(text_stream, "0K 100K 200K");
// Remove the buffers that were added.
demuxer_->Remove(kSourceId, base::TimeDelta(),
@@ -4354,13 +4039,11 @@ TEST_P(ChunkDemuxerTest, Remove_AudioVideoText) {
// ones and verify that only the new buffers are returned.
AppendMuxedCluster(
MuxedStreamInfo(kAudioTrackNum, "1K 21K 41K 61K 81K 101K 121K 141K", 20),
- MuxedStreamInfo(kVideoTrackNum, "1K 31 61 91 121K 151 181", 30),
- MuxedStreamInfo(kTextTrackNum, "1K 101K 201K"));
+ MuxedStreamInfo(kVideoTrackNum, "1K 31 61 91 121K 151 181", 30));
Seek(base::TimeDelta());
CheckExpectedBuffers(audio_stream, "1K 21K 41K 61K 81K 101K 121K 141K");
CheckExpectedBuffers(video_stream, "1K 31 61 91 121K 151 181");
- CheckExpectedBuffers(text_stream, "1K 101K 201K");
}
TEST_P(ChunkDemuxerTest, Remove_StartAtDuration) {
@@ -4394,10 +4077,7 @@ TEST_P(ChunkDemuxerTest, Remove_StartAtDuration) {
// the seek point and will return cues after the seek position
// when they are eventually appended.
TEST_P(ChunkDemuxerTest, SeekCompletesWithoutTextCues) {
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO | HAS_TEXT));
+ ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
DemuxerStream* audio_stream = GetStream(DemuxerStream::AUDIO);
DemuxerStream* video_stream = GetStream(DemuxerStream::VIDEO);
@@ -4411,11 +4091,6 @@ TEST_P(ChunkDemuxerTest, SeekCompletesWithoutTextCues) {
EXPECT_FALSE(seek_cb_was_called);
- bool text_read_done = false;
- text_stream->Read(base::Bind(&OnReadDone,
- base::TimeDelta::FromMilliseconds(225),
- &text_read_done));
-
// Append audio & video data so the seek completes.
AppendMuxedCluster(
MuxedStreamInfo(kAudioTrackNum,
@@ -4424,26 +4099,17 @@ TEST_P(ChunkDemuxerTest, SeekCompletesWithoutTextCues) {
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(seek_cb_was_called);
- EXPECT_FALSE(text_read_done);
// Read some audio & video buffers to further verify seek completion.
CheckExpectedBuffers(audio_stream, "120K 140K");
CheckExpectedBuffers(video_stream, "120K 150");
- EXPECT_FALSE(text_read_done);
-
// Append text cues that start after the seek point and verify that
// they are returned by Read() calls.
AppendMuxedCluster(MuxedStreamInfo(kAudioTrackNum, "220K 240K 260K 280K", 20),
- MuxedStreamInfo(kVideoTrackNum, "240K 270 300 330", 30),
- MuxedStreamInfo(kTextTrackNum, "225K 275K 325K"));
+ MuxedStreamInfo(kVideoTrackNum, "240K 270 300 330", 30));
base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(text_read_done);
-
- // NOTE: we start at 275 here because the buffer at 225 was returned
- // to the pending read initiated above.
- CheckExpectedBuffers(text_stream, "275K 325K");
// Verify that audio & video streams continue to return expected values.
CheckExpectedBuffers(audio_stream, "160K 180K");
@@ -4845,8 +4511,7 @@ TEST_P(ChunkDemuxerTest, MultipleIds) {
CreateNewDemuxer();
EXPECT_CALL(*this, DemuxerOpened());
EXPECT_CALL(host_, SetDuration(_)).Times(2);
- demuxer_->Initialize(&host_, CreateInitDoneCB(kNoTimestamp, PIPELINE_OK),
- true);
+ demuxer_->Initialize(&host_, CreateInitDoneCB(kNoTimestamp, PIPELINE_OK));
const char* kId1 = "id1";
const char* kId2 = "id2";
@@ -4872,7 +4537,7 @@ TEST_P(ChunkDemuxerTest, CompleteInitAfterIdRemoved) {
CreateNewDemuxer();
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(&host_,
- CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK), true);
+ CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK));
// Add two ids, then remove one of the ids and verify that adding init segment
// only for the remaining id still triggers the InitDoneCB.
@@ -4895,7 +4560,7 @@ TEST_P(ChunkDemuxerTest, RemovingIdMustRemoveStreams) {
CreateNewDemuxer();
EXPECT_CALL(*this, DemuxerOpened());
demuxer_->Initialize(&host_,
- CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK), true);
+ CreateInitDoneCB(kDefaultDuration(), PIPELINE_OK));
const char* kId1 = "id1";
EXPECT_EQ(AddId(kId1, "video/webm", "vorbis,vp8"), ChunkDemuxer::kOk);
@@ -4952,10 +4617,8 @@ TEST_P(ChunkDemuxerTest, SequenceModeSingleTrackNoWarning) {
TEST_P(ChunkDemuxerTest, Mp4Vp9CodecSupport) {
demuxer_->Initialize(
- &host_,
- base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
- base::Unretained(this)),
- true);
+ &host_, base::BindRepeating(&ChunkDemuxerTest::DemuxerInitialized,
+ base::Unretained(this)));
ChunkDemuxer::Status expected = ChunkDemuxer::kOk;
EXPECT_EQ(AddId("source_id", "video/mp4", "vp09.00.10.08"), expected);
}
@@ -4966,8 +4629,8 @@ TEST_P(ChunkDemuxerTest, UnmarkEOSRetainsParseErrorState_BeforeInit) {
EXPECT_CALL(*this, DemuxerOpened());
EXPECT_MEDIA_LOG(StreamParsingFailed());
demuxer_->Initialize(
- &host_, CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED),
- true);
+ &host_,
+ CreateInitDoneCB(kNoTimestamp, CHUNK_DEMUXER_ERROR_APPEND_FAILED));
ASSERT_EQ(AddId(kSourceId, HAS_AUDIO | HAS_VIDEO), ChunkDemuxer::kOk);
AppendGarbage();
diff --git a/chromium/media/filters/decoder_stream.cc b/chromium/media/filters/decoder_stream.cc
index c4ecd423fbc..09d61733315 100644
--- a/chromium/media/filters/decoder_stream.cc
+++ b/chromium/media/filters/decoder_stream.cc
@@ -73,13 +73,13 @@ DecoderStream<StreamType>::~DecoderStream() {
DCHECK(task_runner_->BelongsToCurrentThread());
if (init_cb_) {
- task_runner_->PostTask(
- FROM_HERE, base::BindOnce(base::ResetAndReturn(&init_cb_), false));
+ task_runner_->PostTask(FROM_HERE,
+ base::BindOnce(std::move(init_cb_), false));
}
if (read_cb_) {
task_runner_->PostTask(
- FROM_HERE, base::BindOnce(base::ResetAndReturn(&read_cb_), ABORTED,
- scoped_refptr<Output>()));
+ FROM_HERE,
+ base::BindOnce(std::move(read_cb_), ABORTED, scoped_refptr<Output>()));
}
if (reset_cb_)
task_runner_->PostTask(FROM_HERE, base::ResetAndReturn(&reset_cb_));
@@ -96,10 +96,10 @@ std::string DecoderStream<StreamType>::GetStreamTypeString() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::Initialize(
DemuxerStream* stream,
- const InitCB& init_cb,
+ InitCB init_cb,
CdmContext* cdm_context,
- const StatisticsCB& statistics_cb,
- const base::Closure& waiting_for_decryption_key_cb) {
+ StatisticsCB statistics_cb,
+ base::RepeatingClosure waiting_for_decryption_key_cb) {
FUNCTION_DVLOG(1);
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_UNINITIALIZED);
@@ -107,10 +107,10 @@ void DecoderStream<StreamType>::Initialize(
DCHECK(init_cb);
stream_ = stream;
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
cdm_context_ = cdm_context;
- statistics_cb_ = statistics_cb;
- waiting_for_decryption_key_cb_ = waiting_for_decryption_key_cb;
+ statistics_cb_ = std::move(statistics_cb);
+ waiting_for_decryption_key_cb_ = std::move(waiting_for_decryption_key_cb);
traits_->OnStreamReset(stream_);
@@ -119,7 +119,7 @@ void DecoderStream<StreamType>::Initialize(
}
template <DemuxerStream::Type StreamType>
-void DecoderStream<StreamType>::Read(const ReadCB& read_cb) {
+void DecoderStream<StreamType>::Read(ReadCB read_cb) {
FUNCTION_DVLOG(3);
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(state_ != STATE_UNINITIALIZED && state_ != STATE_INITIALIZING)
@@ -130,26 +130,27 @@ void DecoderStream<StreamType>::Read(const ReadCB& read_cb) {
DCHECK(!reset_cb_);
if (state_ == STATE_ERROR) {
- task_runner_->PostTask(FROM_HERE, base::BindOnce(read_cb, DECODE_ERROR,
- scoped_refptr<Output>()));
+ task_runner_->PostTask(FROM_HERE,
+ base::BindOnce(std::move(read_cb), DECODE_ERROR,
+ scoped_refptr<Output>()));
return;
}
if (state_ == STATE_END_OF_STREAM && ready_outputs_.empty() &&
unprepared_outputs_.empty()) {
- task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(read_cb, OK, StreamTraits::CreateEOSOutput()));
+ task_runner_->PostTask(FROM_HERE,
+ base::BindOnce(std::move(read_cb), OK,
+ StreamTraits::CreateEOSOutput()));
return;
}
if (!ready_outputs_.empty()) {
- task_runner_->PostTask(FROM_HERE,
- base::BindOnce(read_cb, OK, ready_outputs_.front()));
+ task_runner_->PostTask(FROM_HERE, base::BindOnce(std::move(read_cb), OK,
+ ready_outputs_.front()));
ready_outputs_.pop_front();
MaybePrepareAnotherOutput();
} else {
- read_cb_ = read_cb;
+ read_cb_ = std::move(read_cb);
}
if (state_ == STATE_NORMAL && CanDecodeMore())
@@ -157,18 +158,18 @@ void DecoderStream<StreamType>::Read(const ReadCB& read_cb) {
}
template <DemuxerStream::Type StreamType>
-void DecoderStream<StreamType>::Reset(const base::Closure& closure) {
+void DecoderStream<StreamType>::Reset(base::OnceClosure closure) {
FUNCTION_DVLOG(2);
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, STATE_UNINITIALIZED);
DCHECK(!reset_cb_);
- reset_cb_ = closure;
+ reset_cb_ = std::move(closure);
if (read_cb_) {
task_runner_->PostTask(
- FROM_HERE, base::BindOnce(base::ResetAndReturn(&read_cb_), ABORTED,
- scoped_refptr<Output>()));
+ FROM_HERE,
+ base::BindOnce(std::move(read_cb_), ABORTED, scoped_refptr<Output>()));
}
ClearOutputs();
@@ -328,7 +329,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
state_ = STATE_UNINITIALIZED;
MEDIA_LOG(ERROR, media_log_)
<< GetStreamTypeString() << " decoder initialization failed";
- base::ResetAndReturn(&init_cb_).Run(false);
+ std::move(init_cb_).Run(false);
} else {
CompleteDecoderReinitialization(false);
}
@@ -341,6 +342,9 @@ void DecoderStream<StreamType>::OnDecoderSelected(
!!decrypting_demuxer_stream_);
media_log_->SetStringProperty(GetStreamTypeString() + "_decoder",
decoder_->GetDisplayName());
+ media_log_->SetBooleanProperty(
+ "is_platform_" + GetStreamTypeString() + "_decoder",
+ decoder_->IsPlatformDecoder());
MEDIA_LOG(INFO, media_log_)
<< "Selected " << decoder_->GetDisplayName() << " for "
@@ -356,7 +360,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
state_ = STATE_NORMAL;
if (StreamTraits::NeedsBitstreamConversion(decoder_.get()))
stream_->EnableBitstreamConverter();
- base::ResetAndReturn(&init_cb_).Run(true);
+ std::move(init_cb_).Run(true);
}
template <DemuxerStream::Type StreamType>
@@ -364,7 +368,7 @@ void DecoderStream<StreamType>::SatisfyRead(
Status status,
const scoped_refptr<Output>& output) {
DCHECK(read_cb_);
- base::ResetAndReturn(&read_cb_).Run(status, output);
+ std::move(read_cb_).Run(status, output);
}
template <DemuxerStream::Type StreamType>
diff --git a/chromium/media/filters/decoder_stream.h b/chromium/media/filters/decoder_stream.h
index 95bd78f81eb..50707c37a26 100644
--- a/chromium/media/filters/decoder_stream.h
+++ b/chromium/media/filters/decoder_stream.h
@@ -56,11 +56,10 @@ class MEDIA_EXPORT DecoderStream {
base::RepeatingCallback<std::vector<std::unique_ptr<Decoder>>()>;
// Indicates completion of a DecoderStream initialization.
- using InitCB = base::RepeatingCallback<void(bool success)>;
+ using InitCB = base::OnceCallback<void(bool success)>;
// Indicates completion of a DecoderStream read.
- using ReadCB =
- base::RepeatingCallback<void(Status, const scoped_refptr<Output>&)>;
+ using ReadCB = base::OnceCallback<void(Status, const scoped_refptr<Output>&)>;
DecoderStream(std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
@@ -76,16 +75,16 @@ class MEDIA_EXPORT DecoderStream {
// |cdm_context| can be used to handle encrypted stream. Can be null if the
// stream is not encrypted.
void Initialize(DemuxerStream* stream,
- const InitCB& init_cb,
+ InitCB init_cb,
CdmContext* cdm_context,
- const StatisticsCB& statistics_cb,
- const base::Closure& waiting_for_decryption_key_cb);
+ StatisticsCB statistics_cb,
+ base::RepeatingClosure waiting_for_decryption_key_cb);
// Reads a decoded Output and returns it via the |read_cb|. Note that
// |read_cb| is always called asynchronously. This method should only be
// called after initialization has succeeded and must not be called during
// pending Reset().
- void Read(const ReadCB& read_cb);
+ void Read(ReadCB read_cb);
// Resets the decoder, flushes all decoded outputs and/or internal buffers,
// fires any existing pending read callback and calls |closure| on completion.
@@ -94,7 +93,7 @@ class MEDIA_EXPORT DecoderStream {
// during pending Reset().
// N.B: If the decoder stream has run into an error, calling this method does
// not 'reset' it to a normal state.
- void Reset(const base::Closure& closure);
+ void Reset(base::OnceClosure closure);
// Returns true if the decoder currently has the ability to decode and return
// an Output.
@@ -224,10 +223,10 @@ class MEDIA_EXPORT DecoderStream {
StatisticsCB statistics_cb_;
InitCB init_cb_;
- base::Closure waiting_for_decryption_key_cb_;
+ base::RepeatingClosure waiting_for_decryption_key_cb_;
ReadCB read_cb_;
- base::Closure reset_cb_;
+ base::OnceClosure reset_cb_;
DemuxerStream* stream_;
diff --git a/chromium/media/filters/decrypting_audio_decoder.cc b/chromium/media/filters/decrypting_audio_decoder.cc
index e09411ed90e..767b405ef18 100644
--- a/chromium/media/filters/decrypting_audio_decoder.cc
+++ b/chromium/media/filters/decrypting_audio_decoder.cc
@@ -94,7 +94,7 @@ void DecryptingAudioDecoder::Initialize(
if (state_ == kUninitialized) {
if (!cdm_context->GetDecryptor()) {
- MEDIA_LOG(DEBUG, media_log_) << GetDisplayName() << ": no decryptor";
+ DVLOG(1) << __func__ << ": no decryptor";
base::ResetAndReturn(&init_cb_).Run(false);
return;
}
@@ -141,10 +141,9 @@ void DecryptingAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void DecryptingAudioDecoder::Reset(const base::Closure& closure) {
DVLOG(2) << "Reset() - state: " << state_;
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(state_ == kIdle ||
- state_ == kPendingDecode ||
- state_ == kWaitingForKey ||
- state_ == kDecodeFinished) << state_;
+ DCHECK(state_ == kIdle || state_ == kPendingDecode ||
+ state_ == kWaitingForKey || state_ == kDecodeFinished)
+ << state_;
DCHECK(init_cb_.is_null()); // No Reset() during pending initialization.
DCHECK(reset_cb_.is_null());
@@ -194,9 +193,8 @@ DecryptingAudioDecoder::~DecryptingAudioDecoder() {
void DecryptingAudioDecoder::InitializeDecoder() {
state_ = kPendingDecoderInit;
decryptor_->InitializeAudioDecoder(
- config_,
- BindToCurrentLoop(base::Bind(
- &DecryptingAudioDecoder::FinishInitialization, weak_this_)));
+ config_, BindToCurrentLoop(base::Bind(
+ &DecryptingAudioDecoder::FinishInitialization, weak_this_)));
}
void DecryptingAudioDecoder::FinishInitialization(bool success) {
@@ -204,12 +202,11 @@ void DecryptingAudioDecoder::FinishInitialization(bool success) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(state_ == kPendingDecoderInit) << state_;
DCHECK(!init_cb_.is_null());
- DCHECK(reset_cb_.is_null()); // No Reset() before initialization finished.
+ DCHECK(reset_cb_.is_null()); // No Reset() before initialization finished.
DCHECK(decode_cb_.is_null()); // No Decode() before initialization finished.
if (!success) {
- MEDIA_LOG(DEBUG, media_log_) << GetDisplayName()
- << ": failed to init decoder on decryptor";
+ DVLOG(1) << __func__ << ": failed to init audio decoder on decryptor";
base::ResetAndReturn(&init_cb_).Run(false);
decryptor_ = NULL;
state_ = kError;
@@ -221,9 +218,8 @@ void DecryptingAudioDecoder::FinishInitialization(bool success) {
new AudioTimestampHelper(config_.samples_per_second()));
decryptor_->RegisterNewKeyCB(
- Decryptor::kAudio,
- BindToCurrentLoop(
- base::Bind(&DecryptingAudioDecoder::OnKeyAdded, weak_this_)));
+ Decryptor::kAudio, BindToCurrentLoop(base::Bind(
+ &DecryptingAudioDecoder::OnKeyAdded, weak_this_)));
state_ = kIdle;
base::ResetAndReturn(&init_cb_).Run(true);
@@ -240,8 +236,8 @@ void DecryptingAudioDecoder::DecodePendingBuffer() {
decryptor_->DecryptAndDecodeAudio(
pending_buffer_to_decode_,
- BindToCurrentLoop(base::Bind(
- &DecryptingAudioDecoder::DeliverFrame, weak_this_, buffer_size)));
+ BindToCurrentLoop(base::Bind(&DecryptingAudioDecoder::DeliverFrame,
+ weak_this_, buffer_size)));
}
void DecryptingAudioDecoder::DeliverFrame(
@@ -271,7 +267,7 @@ void DecryptingAudioDecoder::DeliverFrame(
if (status == Decryptor::kError) {
DVLOG(2) << "DeliverFrame() - kError";
MEDIA_LOG(ERROR, media_log_) << GetDisplayName() << ": decode error";
- state_ = kDecodeFinished; // TODO add kError state
+ state_ = kDecodeFinished; // TODO add kError state
base::ResetAndReturn(&decode_cb_).Run(DecodeStatus::DECODE_ERROR);
return;
}
@@ -279,10 +275,11 @@ void DecryptingAudioDecoder::DeliverFrame(
if (status == Decryptor::kNoKey) {
std::string key_id =
scoped_pending_buffer_to_decode->decrypt_config()->key_id();
- std::string missing_key_id = base::HexEncode(key_id.data(), key_id.size());
- DVLOG(1) << "DeliverFrame() - no key for key ID " << missing_key_id;
- MEDIA_LOG(DEBUG, media_log_) << GetDisplayName() << ": no key for key ID "
- << missing_key_id;
+ std::string log_message =
+ "no key for key ID " + base::HexEncode(key_id.data(), key_id.size()) +
+ "; will resume decoding after new usable key is available";
+ DVLOG(1) << __func__ << ": " << log_message;
+ MEDIA_LOG(INFO, media_log_) << GetDisplayName() << ": " << log_message;
// Set |pending_buffer_to_decode_| back as we need to try decoding the
// pending buffer again when new key is added to the decryptor.
@@ -290,8 +287,8 @@ void DecryptingAudioDecoder::DeliverFrame(
if (need_to_try_again_if_nokey_is_returned) {
// The |state_| is still kPendingDecode.
- MEDIA_LOG(INFO, media_log_) << GetDisplayName()
- << ": key was added, resuming decode";
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key was added, resuming decode";
DecodePendingBuffer();
return;
}
@@ -334,8 +331,8 @@ void DecryptingAudioDecoder::OnKeyAdded() {
}
if (state_ == kWaitingForKey) {
- MEDIA_LOG(INFO, media_log_) << GetDisplayName()
- << ": key added, resuming decode";
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key added, resuming decode";
state_ = kPendingDecode;
DecodePendingBuffer();
}
@@ -352,8 +349,7 @@ void DecryptingAudioDecoder::DoReset() {
void DecryptingAudioDecoder::ProcessDecodedFrames(
const Decryptor::AudioFrames& frames) {
for (Decryptor::AudioFrames::const_iterator iter = frames.begin();
- iter != frames.end();
- ++iter) {
+ iter != frames.end(); ++iter) {
scoped_refptr<AudioBuffer> frame = *iter;
DCHECK(!frame->end_of_stream()) << "EOS frame returned.";
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index f68e048aed9..8a5b4621a37 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -111,16 +111,14 @@ class DecryptingAudioDecoderTest : public testing::Test {
InitializeAndExpectResult(config_, true);
}
- void Reinitialize() {
- ReinitializeConfigChange(config_);
- }
+ void Reinitialize() { ReinitializeConfigChange(config_); }
void ReinitializeConfigChange(const AudioDecoderConfig& new_config) {
EXPECT_CALL(*decryptor_, DeinitializeDecoder(Decryptor::kAudio));
EXPECT_CALL(*decryptor_, InitializeAudioDecoder(_, _))
.WillOnce(RunCallback<1>(true));
EXPECT_CALL(*decryptor_, RegisterNewKeyCB(Decryptor::kAudio, _))
- .WillOnce(SaveArg<1>(&key_added_cb_));
+ .WillOnce(SaveArg<1>(&key_added_cb_));
decoder_->Initialize(
new_config, cdm_context_.get(), NewExpectedBoolCB(true),
base::Bind(&DecryptingAudioDecoderTest::FrameReady,
@@ -133,9 +131,8 @@ class DecryptingAudioDecoderTest : public testing::Test {
void DecodeAndExpect(scoped_refptr<DecoderBuffer> buffer,
DecodeStatus status) {
EXPECT_CALL(*this, DecodeDone(status));
- decoder_->Decode(buffer,
- base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
- base::Unretained(this)));
+ decoder_->Decode(buffer, base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -161,8 +158,9 @@ class DecryptingAudioDecoderTest : public testing::Test {
// Sets up expectations and actions to put DecryptingAudioDecoder in an
// active normal decoding state.
void EnterNormalDecodingState() {
- EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _)).WillRepeatedly(
- Invoke(this, &DecryptingAudioDecoderTest::DecryptAndDecodeAudio));
+ EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
+ .WillRepeatedly(
+ Invoke(this, &DecryptingAudioDecoderTest::DecryptAndDecodeAudio));
EXPECT_CALL(*this, FrameReady(decoded_frame_));
for (int i = 0; i < kDecodingDelay + 1; ++i)
DecodeAndExpect(encrypted_buffer_, DecodeStatus::OK);
@@ -173,8 +171,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
// EnterNormalDecodingState() to work.
void EnterEndOfStreamState() {
// The codec in the |decryptor_| will be flushed.
- EXPECT_CALL(*this, FrameReady(decoded_frame_))
- .Times(kDecodingDelay);
+ EXPECT_CALL(*this, FrameReady(decoded_frame_)).Times(kDecodingDelay);
DecodeAndExpect(DecoderBuffer::CreateEOSBuffer(), DecodeStatus::OK);
EXPECT_EQ(0, num_frames_in_decryptor_);
}
@@ -196,8 +193,8 @@ class DecryptingAudioDecoderTest : public testing::Test {
void EnterWaitingForKeyState() {
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(encrypted_buffer_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kNoKey,
- Decryptor::AudioFrames()));
+ .WillRepeatedly(
+ RunCallback<1>(Decryptor::kNoKey, Decryptor::AudioFrames()));
EXPECT_CALL(*this, OnWaitingForDecryptionKey());
decoder_->Decode(encrypted_buffer_,
base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
@@ -208,8 +205,8 @@ class DecryptingAudioDecoderTest : public testing::Test {
void AbortPendingAudioDecodeCB() {
if (!pending_audio_decode_cb_.is_null()) {
- base::ResetAndReturn(&pending_audio_decode_cb_).Run(
- Decryptor::kSuccess, Decryptor::AudioFrames());
+ base::ResetAndReturn(&pending_audio_decode_cb_)
+ .Run(Decryptor::kSuccess, Decryptor::AudioFrames());
}
}
@@ -315,8 +312,8 @@ TEST_F(DecryptingAudioDecoderTest, DecryptAndDecode_DecodeError) {
Initialize();
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kError,
- Decryptor::AudioFrames()));
+ .WillRepeatedly(
+ RunCallback<1>(Decryptor::kError, Decryptor::AudioFrames()));
DecodeAndExpect(encrypted_buffer_, DecodeStatus::DECODE_ERROR);
}
@@ -423,8 +420,8 @@ TEST_F(DecryptingAudioDecoderTest, KeyAdded_DruingPendingDecode) {
// The audio decode callback is returned after the correct decryption key is
// added.
key_added_cb_.Run();
- base::ResetAndReturn(&pending_audio_decode_cb_).Run(
- Decryptor::kNoKey, Decryptor::AudioFrames());
+ base::ResetAndReturn(&pending_audio_decode_cb_)
+ .Run(Decryptor::kNoKey, Decryptor::AudioFrames());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/decrypting_demuxer_stream.cc b/chromium/media/filters/decrypting_demuxer_stream.cc
index 06009f7a947..12de15c5369 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream.cc
@@ -58,7 +58,7 @@ void DecryptingDemuxerStream::Initialize(DemuxerStream* stream,
InitializeDecoderConfig();
if (!cdm_context->GetDecryptor()) {
- DVLOG(2) << __func__ << ": no decryptor";
+ DVLOG(1) << __func__ << ": no decryptor";
state_ = kUninitialized;
base::ResetAndReturn(&init_cb_).Run(DECODER_ERROR_NOT_SUPPORTED);
return;
@@ -235,8 +235,7 @@ void DecryptingDemuxerStream::DecryptPendingBuffer() {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kPendingDecrypt) << state_;
decryptor_->Decrypt(
- GetDecryptorStreamType(),
- pending_buffer_to_decrypt_,
+ GetDecryptorStreamType(), pending_buffer_to_decrypt_,
BindToCurrentLoop(
base::Bind(&DecryptingDemuxerStream::DeliverBuffer, weak_this_)));
}
@@ -274,15 +273,17 @@ void DecryptingDemuxerStream::DeliverBuffer(
if (status == Decryptor::kNoKey) {
std::string key_id = pending_buffer_to_decrypt_->decrypt_config()->key_id();
- std::string missing_key_id = base::HexEncode(key_id.data(), key_id.size());
- DVLOG(1) << "DeliverBuffer() - no key for key ID " << missing_key_id;
- MEDIA_LOG(INFO, media_log_) << GetDisplayName() << ": no key for key ID "
- << missing_key_id;
+
+ std::string log_message =
+ "no key for key ID " + base::HexEncode(key_id.data(), key_id.size()) +
+ "; will resume decrypting after new usable key is available";
+ DVLOG(1) << __func__ << ": " << log_message;
+ MEDIA_LOG(INFO, media_log_) << GetDisplayName() << ": " << log_message;
if (need_to_try_again_if_nokey) {
// The |state_| is still kPendingDecrypt.
- MEDIA_LOG(INFO, media_log_) << GetDisplayName()
- << ": key was added, resuming decrypt";
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key was added, resuming decrypt";
DecryptPendingBuffer();
return;
}
diff --git a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
index 2650a45a168..d156adc807f 100644
--- a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -41,8 +41,10 @@ static const uint8_t kFakeIv[DecryptConfig::kDecryptionKeySize] = {0};
static scoped_refptr<DecoderBuffer> CreateFakeEncryptedStreamBuffer(
bool is_clear) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(kFakeBufferSize));
- std::string iv = is_clear ? std::string() :
- std::string(reinterpret_cast<const char*>(kFakeIv), arraysize(kFakeIv));
+ std::string iv = is_clear
+ ? std::string()
+ : std::string(reinterpret_cast<const char*>(kFakeIv),
+ arraysize(kFakeIv));
if (!is_clear) {
buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
std::string(reinterpret_cast<const char*>(kFakeKeyId),
@@ -227,8 +229,8 @@ class DecryptingDemuxerStreamTest : public testing::Test {
EXPECT_CALL(*input_audio_stream_, Read(_))
.WillRepeatedly(ReturnBuffer(encrypted_buffer_));
EXPECT_CALL(*decryptor_, Decrypt(_, encrypted_buffer_, _))
- .WillRepeatedly(RunCallback<2>(Decryptor::kNoKey,
- scoped_refptr<DecoderBuffer>()));
+ .WillRepeatedly(
+ RunCallback<2>(Decryptor::kNoKey, scoped_refptr<DecoderBuffer>()));
EXPECT_MEDIA_LOG(HasSubstr("DecryptingDemuxerStream: no key for key ID"));
EXPECT_CALL(*this, OnWaitingForDecryptionKey());
demuxer_stream_->Read(base::Bind(&DecryptingDemuxerStreamTest::BufferReady,
@@ -343,8 +345,8 @@ TEST_F(DecryptingDemuxerStreamTest, Read_DecryptError) {
EXPECT_CALL(*input_audio_stream_, Read(_))
.WillRepeatedly(ReturnBuffer(encrypted_buffer_));
EXPECT_CALL(*decryptor_, Decrypt(_, encrypted_buffer_, _))
- .WillRepeatedly(RunCallback<2>(Decryptor::kError,
- scoped_refptr<DecoderBuffer>()));
+ .WillRepeatedly(
+ RunCallback<2>(Decryptor::kError, scoped_refptr<DecoderBuffer>()));
EXPECT_MEDIA_LOG(HasSubstr("DecryptingDemuxerStream: decrypt error"));
ReadAndExpectBufferReadyWith(DemuxerStream::kError, nullptr);
}
diff --git a/chromium/media/filters/decrypting_video_decoder.cc b/chromium/media/filters/decrypting_video_decoder.cc
index 1f4968d5926..132e58e1dd3 100644
--- a/chromium/media/filters/decrypting_video_decoder.cc
+++ b/chromium/media/filters/decrypting_video_decoder.cc
@@ -47,9 +47,9 @@ void DecryptingVideoDecoder::Initialize(
DVLOG(2) << __func__ << ": " << config.AsHumanReadableString();
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(state_ == kUninitialized ||
- state_ == kIdle ||
- state_ == kDecodeFinished) << state_;
+ DCHECK(state_ == kUninitialized || state_ == kIdle ||
+ state_ == kDecodeFinished)
+ << state_;
DCHECK(decode_cb_.is_null());
DCHECK(reset_cb_.is_null());
DCHECK(config.IsValidConfig());
@@ -80,7 +80,7 @@ void DecryptingVideoDecoder::Initialize(
if (state_ == kUninitialized) {
if (!cdm_context->GetDecryptor()) {
- MEDIA_LOG(DEBUG, media_log_) << GetDisplayName() << ": no decryptor";
+ DVLOG(1) << __func__ << ": no decryptor";
base::ResetAndReturn(&init_cb_).Run(false);
return;
}
@@ -102,9 +102,8 @@ void DecryptingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
const DecodeCB& decode_cb) {
DVLOG(3) << "Decode()";
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(state_ == kIdle ||
- state_ == kDecodeFinished ||
- state_ == kError) << state_;
+ DCHECK(state_ == kIdle || state_ == kDecodeFinished || state_ == kError)
+ << state_;
DCHECK(!decode_cb.is_null());
CHECK(decode_cb_.is_null()) << "Overlapping decodes are not supported.";
@@ -129,11 +128,10 @@ void DecryptingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void DecryptingVideoDecoder::Reset(const base::Closure& closure) {
DVLOG(2) << "Reset() - state: " << state_;
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(state_ == kIdle ||
- state_ == kPendingDecode ||
- state_ == kWaitingForKey ||
- state_ == kDecodeFinished ||
- state_ == kError) << state_;
+ DCHECK(state_ == kIdle || state_ == kPendingDecode ||
+ state_ == kWaitingForKey || state_ == kDecodeFinished ||
+ state_ == kError)
+ << state_;
DCHECK(init_cb_.is_null()); // No Reset() during pending initialization.
DCHECK(reset_cb_.is_null());
@@ -184,12 +182,11 @@ void DecryptingVideoDecoder::FinishInitialization(bool success) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kPendingDecoderInit) << state_;
DCHECK(!init_cb_.is_null());
- DCHECK(reset_cb_.is_null()); // No Reset() before initialization finished.
+ DCHECK(reset_cb_.is_null()); // No Reset() before initialization finished.
DCHECK(decode_cb_.is_null()); // No Decode() before initialization finished.
if (!success) {
- MEDIA_LOG(DEBUG, media_log_) << GetDisplayName()
- << ": failed to init decoder on decryptor";
+ DVLOG(1) << __func__ << ": failed to init video decoder on decryptor";
base::ResetAndReturn(&init_cb_).Run(false);
decryptor_ = NULL;
state_ = kError;
@@ -197,16 +194,14 @@ void DecryptingVideoDecoder::FinishInitialization(bool success) {
}
decryptor_->RegisterNewKeyCB(
- Decryptor::kVideo,
- BindToCurrentLoop(
- base::Bind(&DecryptingVideoDecoder::OnKeyAdded, weak_this_)));
+ Decryptor::kVideo, BindToCurrentLoop(base::Bind(
+ &DecryptingVideoDecoder::OnKeyAdded, weak_this_)));
// Success!
state_ = kIdle;
base::ResetAndReturn(&init_cb_).Run(true);
}
-
void DecryptingVideoDecoder::DecodePendingBuffer() {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kPendingDecode) << state_;
@@ -219,8 +214,9 @@ void DecryptingVideoDecoder::DecodePendingBuffer() {
}
decryptor_->DecryptAndDecodeVideo(
- pending_buffer_to_decode_, BindToCurrentLoop(base::Bind(
- &DecryptingVideoDecoder::DeliverFrame, weak_this_, buffer_size)));
+ pending_buffer_to_decode_,
+ BindToCurrentLoop(base::Bind(&DecryptingVideoDecoder::DeliverFrame,
+ weak_this_, buffer_size)));
}
void DecryptingVideoDecoder::DeliverFrame(
@@ -233,9 +229,9 @@ void DecryptingVideoDecoder::DeliverFrame(
DCHECK(!decode_cb_.is_null());
DCHECK(pending_buffer_to_decode_.get());
- TRACE_EVENT_ASYNC_END2(
- "media", "DecryptingVideoDecoder::DecodePendingBuffer", trace_id_,
- "buffer_size", buffer_size, "status", status);
+ TRACE_EVENT_ASYNC_END2("media", "DecryptingVideoDecoder::DecodePendingBuffer",
+ trace_id_, "buffer_size", buffer_size, "status",
+ status);
bool need_to_try_again_if_nokey_is_returned = key_added_while_decode_pending_;
key_added_while_decode_pending_ = false;
@@ -262,10 +258,11 @@ void DecryptingVideoDecoder::DeliverFrame(
if (status == Decryptor::kNoKey) {
std::string key_id =
scoped_pending_buffer_to_decode->decrypt_config()->key_id();
- std::string missing_key_id = base::HexEncode(key_id.data(), key_id.size());
- DVLOG(1) << "DeliverFrame() - no key for key ID " << missing_key_id;
- MEDIA_LOG(INFO, media_log_) << GetDisplayName() << ": no key for key ID "
- << missing_key_id;
+ std::string log_message =
+ "no key for key ID " + base::HexEncode(key_id.data(), key_id.size()) +
+ "; will resume decoding after new usable key is available";
+ DVLOG(1) << __func__ << ": " << log_message;
+ MEDIA_LOG(INFO, media_log_) << GetDisplayName() << ": " << log_message;
// Set |pending_buffer_to_decode_| back as we need to try decoding the
// pending buffer again when new key is added to the decryptor.
@@ -273,8 +270,8 @@ void DecryptingVideoDecoder::DeliverFrame(
if (need_to_try_again_if_nokey_is_returned) {
// The |state_| is still kPendingDecode.
- MEDIA_LOG(INFO, media_log_) << GetDisplayName()
- << ": key was added, resuming decode";
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key was added, resuming decode";
DecodePendingBuffer();
return;
}
@@ -331,8 +328,8 @@ void DecryptingVideoDecoder::OnKeyAdded() {
}
if (state_ == kWaitingForKey) {
- MEDIA_LOG(INFO, media_log_) << GetDisplayName()
- << ": key added, resuming decode";
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key added, resuming decode";
state_ = kPendingDecode;
DecodePendingBuffer();
}
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index 095dc3acb54..90d131fe88d 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -108,9 +108,8 @@ class DecryptingVideoDecoderTest : public testing::Test {
void DecodeAndExpect(scoped_refptr<DecoderBuffer> buffer,
DecodeStatus status) {
EXPECT_CALL(*this, DecodeDone(status));
- decoder_->Decode(buffer,
- base::Bind(&DecryptingVideoDecoderTest::DecodeDone,
- base::Unretained(this)));
+ decoder_->Decode(buffer, base::Bind(&DecryptingVideoDecoderTest::DecodeDone,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -136,8 +135,9 @@ class DecryptingVideoDecoderTest : public testing::Test {
// Sets up expectations and actions to put DecryptingVideoDecoder in an
// active normal decoding state.
void EnterNormalDecodingState() {
- EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _)).WillRepeatedly(
- Invoke(this, &DecryptingVideoDecoderTest::DecryptAndDecodeVideo));
+ EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
+ .WillRepeatedly(
+ Invoke(this, &DecryptingVideoDecoderTest::DecryptAndDecodeVideo));
EXPECT_CALL(*this, FrameReady(decoded_video_frame_));
for (int i = 0; i < kDecodingDelay + 1; ++i)
DecodeAndExpect(encrypted_buffer_, DecodeStatus::OK);
@@ -148,8 +148,7 @@ class DecryptingVideoDecoderTest : public testing::Test {
// EnterNormalDecodingState() to work.
void EnterEndOfStreamState() {
// The codec in the |decryptor_| will be flushed.
- EXPECT_CALL(*this, FrameReady(decoded_video_frame_))
- .Times(kDecodingDelay);
+ EXPECT_CALL(*this, FrameReady(decoded_video_frame_)).Times(kDecodingDelay);
DecodeAndExpect(DecoderBuffer::CreateEOSBuffer(), DecodeStatus::OK);
EXPECT_EQ(0, num_frames_in_decryptor_);
}
@@ -181,8 +180,8 @@ class DecryptingVideoDecoderTest : public testing::Test {
void AbortPendingVideoDecodeCB() {
if (!pending_video_decode_cb_.is_null()) {
- base::ResetAndReturn(&pending_video_decode_cb_).Run(
- Decryptor::kSuccess, scoped_refptr<VideoFrame>(NULL));
+ base::ResetAndReturn(&pending_video_decode_cb_)
+ .Run(Decryptor::kSuccess, scoped_refptr<VideoFrame>(NULL));
}
}
@@ -299,8 +298,8 @@ TEST_F(DecryptingVideoDecoderTest, DecryptAndDecode_DecodeError) {
Initialize();
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kError,
- scoped_refptr<VideoFrame>(NULL)));
+ .WillRepeatedly(
+ RunCallback<1>(Decryptor::kError, scoped_refptr<VideoFrame>(NULL)));
DecodeAndExpect(encrypted_buffer_, DecodeStatus::DECODE_ERROR);
@@ -322,8 +321,8 @@ TEST_F(DecryptingVideoDecoderTest, KeyAdded_DuringWaitingForKey) {
EnterWaitingForKeyState();
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kSuccess,
- decoded_video_frame_));
+ .WillRepeatedly(
+ RunCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
EXPECT_CALL(*this, FrameReady(decoded_video_frame_));
EXPECT_CALL(*this, DecodeDone(DecodeStatus::OK));
key_added_cb_.Run();
@@ -337,15 +336,15 @@ TEST_F(DecryptingVideoDecoderTest, KeyAdded_DuringPendingDecode) {
EnterPendingDecodeState();
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kSuccess,
- decoded_video_frame_));
+ .WillRepeatedly(
+ RunCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
EXPECT_CALL(*this, FrameReady(decoded_video_frame_));
EXPECT_CALL(*this, DecodeDone(DecodeStatus::OK));
// The video decode callback is returned after the correct decryption key is
// added.
key_added_cb_.Run();
- base::ResetAndReturn(&pending_video_decode_cb_).Run(Decryptor::kNoKey,
- null_video_frame_);
+ base::ResetAndReturn(&pending_video_decode_cb_)
+ .Run(Decryptor::kNoKey, null_video_frame_);
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/demuxer_perftest.cc b/chromium/media/filters/demuxer_perftest.cc
index bacffe980b3..d1582abc523 100644
--- a/chromium/media/filters/demuxer_perftest.cc
+++ b/chromium/media/filters/demuxer_perftest.cc
@@ -40,9 +40,6 @@ class DemuxerHostImpl : public media::DemuxerHost {
const Ranges<base::TimeDelta>& ranges) override {}
void SetDuration(base::TimeDelta duration) override {}
void OnDemuxerError(media::PipelineStatus error) override {}
- void AddTextStream(media::DemuxerStream* text_stream,
- const media::TextTrackConfig& config) override {}
- void RemoveTextStream(media::DemuxerStream* text_stream) override {}
private:
DISALLOW_COPY_AND_ASSIGN(DemuxerHostImpl);
@@ -185,7 +182,7 @@ static void RunDemuxerBenchmark(const std::string& filename) {
ASSERT_TRUE(data_source.Initialize(file_path));
Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
- base::Bind(&OnEncryptedMediaInitData);
+ base::BindRepeating(&OnEncryptedMediaInitData);
Demuxer::MediaTracksUpdatedCB tracks_updated_cb =
base::Bind(&OnMediaTracksUpdated);
FFmpegDemuxer demuxer(base::ThreadTaskRunnerHandle::Get(), &data_source,
@@ -194,9 +191,8 @@ static void RunDemuxerBenchmark(const std::string& filename) {
{
base::RunLoop run_loop;
- demuxer.Initialize(
- &demuxer_host,
- base::Bind(&QuitLoopWithStatus, run_loop.QuitClosure()), false);
+ demuxer.Initialize(&demuxer_host, base::Bind(&QuitLoopWithStatus,
+ run_loop.QuitClosure()));
run_loop.Run();
}
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index 7402ce16ab5..a81098549b6 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -536,7 +536,12 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
buffer->set_timestamp(stream_timestamp - start_time);
- if (packet->flags & AV_PKT_FLAG_DISCARD) {
+ // If the packet is marked for complete discard and it doesn't already have
+ // any discard padding set, mark the DecoderBuffer for complete discard. We
+ // don't want to overwrite any existing discard padding since the discard
+ // padding may refer to frames beyond this packet.
+ if (packet->flags & AV_PKT_FLAG_DISCARD &&
+ buffer->discard_padding() == DecoderBuffer::DiscardPadding()) {
buffer->set_discard_padding(
std::make_pair(kInfiniteDuration, base::TimeDelta()));
if (buffer->timestamp() < base::TimeDelta()) {
@@ -835,21 +840,6 @@ size_t FFmpegDemuxerStream::MemoryUsage() const {
return buffer_queue_.data_size();
}
-TextKind FFmpegDemuxerStream::GetTextKind() const {
- DCHECK_EQ(type_, DemuxerStream::TEXT);
-
- if (stream_->disposition & AV_DISPOSITION_CAPTIONS)
- return kTextCaptions;
-
- if (stream_->disposition & AV_DISPOSITION_DESCRIPTIONS)
- return kTextDescriptions;
-
- if (stream_->disposition & AV_DISPOSITION_METADATA)
- return kTextMetadata;
-
- return kTextSubtitles;
-}
-
std::string FFmpegDemuxerStream::GetMetadata(const char* key) const {
const AVDictionaryEntry* entry =
av_dict_get(stream_->metadata, key, NULL, 0);
@@ -888,7 +878,6 @@ FFmpegDemuxer::FFmpegDemuxer(
media_log_(media_log),
bitrate_(0),
start_time_(kNoTimestamp),
- text_enabled_(false),
duration_known_(false),
encrypted_media_init_data_cb_(encrypted_media_init_data_cb),
media_tracks_updated_cb_(media_tracks_updated_cb),
@@ -916,11 +905,9 @@ std::string FFmpegDemuxer::GetDisplayName() const {
}
void FFmpegDemuxer::Initialize(DemuxerHost* host,
- const PipelineStatusCB& status_cb,
- bool enable_text_tracks) {
+ const PipelineStatusCB& status_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
host_ = host;
- text_enabled_ = enable_text_tracks;
weak_this_ = cancel_pending_seek_factory_.GetWeakPtr();
// Give a WeakPtr to BlockingUrlProtocol since we'll need to release it on the
@@ -1106,24 +1093,6 @@ base::TimeDelta FFmpegDemuxer::GetStartTime() const {
return std::max(start_time_, base::TimeDelta());
}
-void FFmpegDemuxer::AddTextStreams() {
- DCHECK(task_runner_->BelongsToCurrentThread());
-
- for (const auto& stream : streams_) {
- if (!stream || stream->type() != DemuxerStream::TEXT)
- continue;
-
- TextKind kind = stream->GetTextKind();
- std::string title = stream->GetMetadata("title");
- std::string language = stream->GetMetadata("language");
-
- // TODO: Implement "id" metadata in FFMPEG.
- // See: http://crbug.com/323183
- host_->AddTextStream(stream.get(),
- TextTrackConfig(kind, title, language, std::string()));
- }
-}
-
int64_t FFmpegDemuxer::GetMemoryUsage() const {
int64_t allocation_size = 0;
for (const auto& stream : streams_) {
@@ -1308,10 +1277,8 @@ void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
#endif
} else if (codec_type == AVMEDIA_TYPE_SUBTITLE) {
detected_text_track_count++;
- if (codec_id != AV_CODEC_ID_WEBVTT || !text_enabled_) {
- stream->discard = AVDISCARD_ALL;
- continue;
- }
+ stream->discard = AVDISCARD_ALL;
+ continue;
} else {
stream->discard = AVDISCARD_ALL;
continue;
@@ -1436,9 +1403,6 @@ void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
return;
}
- if (text_enabled_)
- AddTextStreams();
-
if (format_context->duration != kNoFFmpegTimestamp) {
// If there is a duration value in the container use that to find the
// maximum between it and the duration from A/V streams.
diff --git a/chromium/media/filters/ffmpeg_demuxer.h b/chromium/media/filters/ffmpeg_demuxer.h
index 425958d0244..01579ab0763 100644
--- a/chromium/media/filters/ffmpeg_demuxer.h
+++ b/chromium/media/filters/ffmpeg_demuxer.h
@@ -214,8 +214,7 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
// Demuxer implementation.
std::string GetDisplayName() const override;
void Initialize(DemuxerHost* host,
- const PipelineStatusCB& status_cb,
- bool enable_text_tracks) override;
+ const PipelineStatusCB& status_cb) override;
void AbortPendingReads() override;
void Stop() override;
void StartWaitingForSeek(base::TimeDelta seek_time) override;
@@ -370,9 +369,6 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
// time if the file doesn't have an association to Time.
base::Time timeline_offset_;
- // Whether text streams have been enabled for this demuxer.
- bool text_enabled_;
-
// Set if we know duration of the audio stream. Used when processing end of
// stream -- at this moment we definitely know duration.
bool duration_known_;
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index b09816a3ba3..24abaff1294 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -136,34 +136,27 @@ class FFmpegDemuxerTest : public testing::Test {
MOCK_METHOD1(CheckPoint, void(int v));
- void InitializeDemuxerInternal(bool enable_text,
- media::PipelineStatus expected_pipeline_status,
+ void InitializeDemuxerInternal(media::PipelineStatus expected_pipeline_status,
base::Time timeline_offset) {
if (expected_pipeline_status == PIPELINE_OK)
EXPECT_CALL(host_, SetDuration(_)).Times(AnyNumber());
WaitableMessageLoopEvent event;
- demuxer_->Initialize(&host_, event.GetPipelineStatusCB(), enable_text);
+ demuxer_->Initialize(&host_, event.GetPipelineStatusCB());
demuxer_->timeline_offset_ = timeline_offset;
event.RunAndWaitForStatus(expected_pipeline_status);
}
void InitializeDemuxer() {
- InitializeDemuxerInternal(/*enable_text=*/false, PIPELINE_OK, base::Time());
- }
-
- void InitializeDemuxerWithText() {
- InitializeDemuxerInternal(/*enable_text=*/true, PIPELINE_OK, base::Time());
+ InitializeDemuxerInternal(PIPELINE_OK, base::Time());
}
void InitializeDemuxerWithTimelineOffset(base::Time timeline_offset) {
- InitializeDemuxerInternal(/*enable_text=*/false, PIPELINE_OK,
- timeline_offset);
+ InitializeDemuxerInternal(PIPELINE_OK, timeline_offset);
}
void InitializeDemuxerAndExpectPipelineStatus(
media::PipelineStatus expected_pipeline_status) {
- InitializeDemuxerInternal(/*enable_text=*/false, expected_pipeline_status,
- base::Time());
+ InitializeDemuxerInternal(expected_pipeline_status, base::Time());
}
MOCK_METHOD2(OnReadDoneCalled, void(int, int64_t));
@@ -304,8 +297,9 @@ class FFmpegDemuxerTest : public testing::Test {
CreateDataSource(name);
- Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb = base::Bind(
- &FFmpegDemuxerTest::OnEncryptedMediaInitData, base::Unretained(this));
+ Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
+ base::BindRepeating(&FFmpegDemuxerTest::OnEncryptedMediaInitData,
+ base::Unretained(this));
Demuxer::MediaTracksUpdatedCB tracks_updated_cb = base::Bind(
&FFmpegDemuxerTest::OnMediaTracksUpdated, base::Unretained(this));
@@ -337,7 +331,7 @@ TEST_F(FFmpegDemuxerTest, Initialize_OpenFails) {
// Simulate avformat_open_input() failing.
CreateDemuxer("ten_byte_file");
WaitableMessageLoopEvent event;
- demuxer_->Initialize(&host_, event.GetPipelineStatusCB(), true);
+ demuxer_->Initialize(&host_, event.GetPipelineStatusCB());
event.RunAndWaitForStatus(DEMUXER_ERROR_COULD_NOT_OPEN);
}
@@ -345,7 +339,7 @@ TEST_F(FFmpegDemuxerTest, Initialize_NoStreams) {
// Open a file with no streams whatsoever.
CreateDemuxer("no_streams.webm");
WaitableMessageLoopEvent event;
- demuxer_->Initialize(&host_, event.GetPipelineStatusCB(), true);
+ demuxer_->Initialize(&host_, event.GetPipelineStatusCB());
event.RunAndWaitForStatus(DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
}
@@ -353,7 +347,7 @@ TEST_F(FFmpegDemuxerTest, Initialize_NoAudioVideo) {
// Open a file containing streams but none of which are audio/video streams.
CreateDemuxer("no_audio_video.webm");
WaitableMessageLoopEvent event;
- demuxer_->Initialize(&host_, event.GetPipelineStatusCB(), true);
+ demuxer_->Initialize(&host_, event.GetPipelineStatusCB());
event.RunAndWaitForStatus(DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
}
@@ -438,35 +432,6 @@ TEST_F(FFmpegDemuxerTest, Initialize_Multitrack) {
}
#endif
-TEST_F(FFmpegDemuxerTest, Initialize_MultitrackText) {
- // Open a file containing the following streams:
- // Stream #0: Video (VP8)
- // Stream #1: Audio (Vorbis)
- // Stream #2: Text (WebVTT)
-
- CreateDemuxer("bear-vp8-webvtt.webm");
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- InitializeDemuxerWithText();
- ASSERT_TRUE(text_stream);
- EXPECT_EQ(DemuxerStream::TEXT, text_stream->type());
-
- // Video stream should be VP8.
- DemuxerStream* stream = GetStream(DemuxerStream::VIDEO);
- ASSERT_TRUE(stream);
- EXPECT_EQ(DemuxerStream::VIDEO, stream->type());
- EXPECT_EQ(kCodecVP8, stream->video_decoder_config().codec());
-
- // Audio stream should be Vorbis.
- stream = GetStream(DemuxerStream::AUDIO);
- ASSERT_TRUE(stream);
- EXPECT_EQ(DemuxerStream::AUDIO, stream->type());
- EXPECT_EQ(kCodecVorbis, stream->audio_decoder_config().codec());
-
- EXPECT_EQ(3u, demuxer_->GetAllStreams().size());
-}
-
TEST_F(FFmpegDemuxerTest, Initialize_Encrypted) {
EXPECT_CALL(*this,
OnEncryptedMediaInitData(
@@ -556,23 +521,6 @@ TEST_F(FFmpegDemuxerTest, Read_Video) {
EXPECT_EQ(148778, demuxer_->GetMemoryUsage());
}
-TEST_F(FFmpegDemuxerTest, Read_Text) {
- // We test that on a successful text packet read.
- CreateDemuxer("bear-vp8-webvtt.webm");
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- InitializeDemuxerWithText();
- ASSERT_TRUE(text_stream);
- EXPECT_EQ(DemuxerStream::TEXT, text_stream->type());
-
- text_stream->Read(NewReadCB(FROM_HERE, 31, 0, true));
- base::RunLoop().Run();
-
- text_stream->Read(NewReadCB(FROM_HERE, 19, 500000, true));
- base::RunLoop().Run();
-}
-
TEST_F(FFmpegDemuxerTest, SeekInitialized_NoVideoStartTime) {
CreateDemuxer("audio-start-time-only.webm");
InitializeDemuxer();
@@ -951,39 +899,6 @@ TEST_F(FFmpegDemuxerTest, Read_DiscardDisabledVideoStream) {
EXPECT_LT(bytes_read_with_video_disabled, bytes_read_with_video_enabled);
}
-// WebM text track discarding doesn't work in ffmpeg. http://crbug.com/681886.
-TEST_F(FFmpegDemuxerTest, DISABLED_Read_DiscardDisabledTextStream) {
- // This test case reads the same video frame twice, first with the text track
- // enabled, then with the text track disabled. When the text track is
- // disabled, FFmpegDemuxer sets the AVDISCARD_ALL flag on the corresponding
- // stream, which allows FFmpeg to choose the initial reading position closer
- // to the requested video frame (i.e. closer to seek_target), since it doesn't
- // need to consider key frames for the text stream. This results in less data
- // being read compared to the case with enabled text track.
- const base::TimeDelta seek_target = base::TimeDelta::FromMilliseconds(805);
-
- CreateDemuxer("bear-vp8-webvtt.webm");
- EXPECT_CALL(host_, AddTextStream(_, _));
- InitializeDemuxerWithText();
- Seek(seek_target);
- GetStream(DemuxerStream::VIDEO)
- ->Read(NewReadCB(FROM_HERE, 5425, 801000, true));
- base::RunLoop().Run();
- auto bytes_read_with_text_enabled = data_source_->bytes_read_for_testing();
-
- Shutdown();
-
- CreateDemuxer("bear-vp8-webvtt.webm");
- InitializeDemuxer();
- Seek(seek_target);
- GetStream(DemuxerStream::VIDEO)
- ->Read(NewReadCB(FROM_HERE, 5425, 801000, true));
- base::RunLoop().Run();
- auto bytes_read_with_text_disabled = data_source_->bytes_read_for_testing();
-
- EXPECT_LT(bytes_read_with_text_disabled, bytes_read_with_text_enabled);
-}
-
TEST_F(FFmpegDemuxerTest, Read_EndOfStream) {
// Verify that end of stream buffers are created.
CreateDemuxer("bear-320x240.webm");
@@ -991,26 +906,6 @@ TEST_F(FFmpegDemuxerTest, Read_EndOfStream) {
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
}
-TEST_F(FFmpegDemuxerTest, Read_EndOfStreamText) {
- // Verify that end of stream buffers are created.
- CreateDemuxer("bear-vp8-webvtt.webm");
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- InitializeDemuxerWithText();
- ASSERT_TRUE(text_stream);
- EXPECT_EQ(DemuxerStream::TEXT, text_stream->type());
-
- bool got_eos_buffer = false;
- const int kMaxBuffers = 10;
- for (int i = 0; !got_eos_buffer && i < kMaxBuffers; i++) {
- text_stream->Read(base::Bind(&EosOnReadDone, &got_eos_buffer));
- base::RunLoop().Run();
- }
-
- EXPECT_TRUE(got_eos_buffer);
-}
-
TEST_F(FFmpegDemuxerTest, Read_EndOfStream_NoDuration) {
// Verify that end of stream buffers are created.
CreateDemuxer("bear-320x240.webm");
@@ -1111,58 +1006,6 @@ TEST_F(FFmpegDemuxerTest, CancelledSeek) {
event.RunAndWaitForStatus(PIPELINE_OK);
}
-TEST_F(FFmpegDemuxerTest, SeekText) {
- // We're testing that the demuxer frees all queued packets when it receives
- // a Seek().
- CreateDemuxer("bear-vp8-webvtt.webm");
- DemuxerStream* text_stream = NULL;
- EXPECT_CALL(host_, AddTextStream(_, _))
- .WillOnce(SaveArg<0>(&text_stream));
- InitializeDemuxerWithText();
- ASSERT_TRUE(text_stream);
- EXPECT_EQ(DemuxerStream::TEXT, text_stream->type());
-
- // Get our streams.
- DemuxerStream* video = GetStream(DemuxerStream::VIDEO);
- DemuxerStream* audio = GetStream(DemuxerStream::AUDIO);
- ASSERT_TRUE(video);
- ASSERT_TRUE(audio);
-
- // Read a text packet and release it.
- text_stream->Read(NewReadCB(FROM_HERE, 31, 0, true));
- base::RunLoop().Run();
-
- // Issue a simple forward seek, which should discard queued packets.
- WaitableMessageLoopEvent event;
- demuxer_->Seek(base::TimeDelta::FromMicroseconds(1000000),
- event.GetPipelineStatusCB());
- event.RunAndWaitForStatus(PIPELINE_OK);
-
- // Audio read #1.
- audio->Read(NewReadCB(FROM_HERE, 145, 803000, true));
- base::RunLoop().Run();
-
- // Audio read #2.
- audio->Read(NewReadCB(FROM_HERE, 148, 826000, true));
- base::RunLoop().Run();
-
- // Video read #1.
- video->Read(NewReadCB(FROM_HERE, 5425, 801000, true));
- base::RunLoop().Run();
-
- // Video read #2.
- video->Read(NewReadCB(FROM_HERE, 1906, 834000, false));
- base::RunLoop().Run();
-
- // Text read #1.
- text_stream->Read(NewReadCB(FROM_HERE, 19, 1000000, true));
- base::RunLoop().Run();
-
- // Text read #2.
- text_stream->Read(NewReadCB(FROM_HERE, 19, 1500000, true));
- base::RunLoop().Run();
-}
-
TEST_F(FFmpegDemuxerTest, Stop) {
// Tests that calling Read() on a stopped demuxer stream immediately deletes
// the callback.
diff --git a/chromium/media/filters/ffmpeg_glue_unittest.cc b/chromium/media/filters/ffmpeg_glue_unittest.cc
index e51019cd661..660b4d4654a 100644
--- a/chromium/media/filters/ffmpeg_glue_unittest.cc
+++ b/chromium/media/filters/ffmpeg_glue_unittest.cc
@@ -10,7 +10,7 @@
#include "base/logging.h"
#include "base/macros.h"
-#include "base/test/histogram_tester.h"
+#include "base/test/metrics/histogram_tester.h"
#include "media/base/container_names.h"
#include "media/base/mock_filters.h"
#include "media/base/test_data_util.h"
diff --git a/chromium/media/filters/frame_buffer_pool.cc b/chromium/media/filters/frame_buffer_pool.cc
index 84c09973a1d..1bcbc8df261 100644
--- a/chromium/media/filters/frame_buffer_pool.cc
+++ b/chromium/media/filters/frame_buffer_pool.cc
@@ -19,8 +19,12 @@
namespace media {
struct FrameBufferPool::FrameBuffer {
- std::vector<uint8_t> data;
- std::vector<uint8_t> alpha_data;
+ // Not using std::vector<uint8_t> as resize() calls take a really long time
+ // for large buffers.
+ std::unique_ptr<uint8_t[]> data;
+ size_t data_size = 0u;
+ std::unique_ptr<uint8_t[]> alpha_data;
+ size_t alpha_data_size = 0u;
bool held_by_library = false;
// Needs to be a counter since a frame buffer might be used multiple times.
int held_by_frame = 0;
@@ -63,12 +67,17 @@ uint8_t* FrameBufferPool::GetFrameBuffer(size_t min_size, void** fb_priv) {
// Resize the frame buffer if necessary.
frame_buffer->held_by_library = true;
- if (frame_buffer->data.size() < min_size)
- frame_buffer->data.resize(min_size);
+ if (frame_buffer->data_size < min_size) {
+ // Free the existing |data| first so that the memory can be reused,
+ // if possible. Note that the new array is purposely not initialized.
+ frame_buffer->data.reset();
+ frame_buffer->data.reset(new uint8_t[min_size]);
+ frame_buffer->data_size = min_size;
+ }
// Provide the client with a private identifier.
*fb_priv = frame_buffer.get();
- return frame_buffer->data.data();
+ return frame_buffer->data.get();
}
void FrameBufferPool::ReleaseFrameBuffer(void* fb_priv) {
@@ -89,9 +98,14 @@ uint8_t* FrameBufferPool::AllocateAlphaPlaneForFrameBuffer(size_t min_size,
auto* frame_buffer = static_cast<FrameBuffer*>(fb_priv);
DCHECK(IsUsed(frame_buffer));
- if (frame_buffer->alpha_data.size() < min_size)
- frame_buffer->alpha_data.resize(min_size);
- return frame_buffer->alpha_data.data();
+ if (frame_buffer->alpha_data_size < min_size) {
+ // Free the existing |alpha_data| first so that the memory can be reused,
+ // if possible. Note that the new array is purposely not initialized.
+ frame_buffer->alpha_data.reset();
+ frame_buffer->alpha_data.reset(new uint8_t[min_size]);
+ frame_buffer->alpha_data_size = min_size;
+ }
+ return frame_buffer->alpha_data.get();
}
base::Closure FrameBufferPool::CreateFrameCallback(void* fb_priv) {
@@ -121,8 +135,8 @@ bool FrameBufferPool::OnMemoryDump(
size_t bytes_reserved = 0;
for (const auto& frame_buffer : frame_buffers_) {
if (IsUsed(frame_buffer.get()))
- bytes_used += frame_buffer->data.size();
- bytes_reserved += frame_buffer->data.size();
+ bytes_used += frame_buffer->data_size + frame_buffer->alpha_data_size;
+ bytes_reserved += frame_buffer->data_size + frame_buffer->alpha_data_size;
}
memory_dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
diff --git a/chromium/media/filters/frame_processor.cc b/chromium/media/filters/frame_processor.cc
index c58ef7e0236..dc6c65f77a2 100644
--- a/chromium/media/filters/frame_processor.cc
+++ b/chromium/media/filters/frame_processor.cc
@@ -653,8 +653,9 @@ bool FrameProcessor::ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
// index.html#sourcebuffer-coded-frame-processing
while (true) {
// 1. Loop Top:
- // Otherwise case: (See SourceBufferState's |auto_update_timestamp_offset_|,
- // too).
+ // Otherwise case: (See also SourceBufferState::OnNewBuffer's conditional
+ // modification of timestamp_offset after frame processing returns, when
+ // generate_timestamps_flag is true).
// 1.1. Let presentation timestamp be a double precision floating point
// representation of the coded frame's presentation timestamp in
// seconds.
@@ -1020,8 +1021,10 @@ bool FrameProcessor::ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
group_end_timestamp_ = frame_end_timestamp;
DCHECK(group_end_timestamp_ >= base::TimeDelta());
- // Step 21 is currently handled differently. See SourceBufferState's
- // |auto_update_timestamp_offset_|.
+ // TODO(wolenetz): Step 21 is currently approximated by predicted
+ // frame_end_time by SourceBufferState::OnNewBuffers(). See
+ // https://crbug.com/850316.
+
return true;
}
diff --git a/chromium/media/filters/frame_processor_unittest.cc b/chromium/media/filters/frame_processor_unittest.cc
index 837973c02ee..0989f0ea76d 100644
--- a/chromium/media/filters/frame_processor_unittest.cc
+++ b/chromium/media/filters/frame_processor_unittest.cc
@@ -352,15 +352,16 @@ class FrameProcessorTest
CHANNEL_LAYOUT_STEREO, 1000,
EmptyExtraData(), Unencrypted());
frame_processor_->OnPossibleAudioConfigUpdate(decoder_config);
- ASSERT_TRUE(audio_->UpdateAudioConfig(decoder_config, &media_log_));
+ ASSERT_TRUE(
+ audio_->UpdateAudioConfig(decoder_config, false, &media_log_));
break;
}
case DemuxerStream::VIDEO: {
ASSERT_FALSE(video_);
video_.reset(
new ChunkDemuxerStream(DemuxerStream::VIDEO, "2", range_api_));
- ASSERT_TRUE(
- video_->UpdateVideoConfig(TestVideoConfig::Normal(), &media_log_));
+ ASSERT_TRUE(video_->UpdateVideoConfig(TestVideoConfig::Normal(), false,
+ &media_log_));
break;
}
// TODO(wolenetz): Test text coded frame processing.
diff --git a/chromium/media/filters/gpu_video_decoder.cc b/chromium/media/filters/gpu_video_decoder.cc
index b16ed0a110e..a5783c95b67 100644
--- a/chromium/media/filters/gpu_video_decoder.cc
+++ b/chromium/media/filters/gpu_video_decoder.cc
@@ -27,7 +27,6 @@
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/pipeline_status.h"
-#include "media/base/surface_manager.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_util.h"
#include "media/media_buildflags.h"
@@ -146,6 +145,10 @@ static void ReportGpuVideoDecoderInitializeStatusToUMAAndRunCB(
cb.Run(success);
}
+bool GpuVideoDecoder::IsPlatformDecoder() const {
+ return true;
+}
+
std::string GpuVideoDecoder::GetDisplayName() const {
return kDecoderName;
}
diff --git a/chromium/media/filters/gpu_video_decoder.h b/chromium/media/filters/gpu_video_decoder.h
index 65a86c9f29d..494cc021671 100644
--- a/chromium/media/filters/gpu_video_decoder.h
+++ b/chromium/media/filters/gpu_video_decoder.h
@@ -20,7 +20,6 @@
#include "gpu/command_buffer/common/sync_token.h"
#include "media/base/overlay_info.h"
#include "media/base/pipeline_status.h"
-#include "media/base/surface_manager.h"
#include "media/base/video_decoder.h"
#include "media/video/video_decode_accelerator.h"
@@ -56,6 +55,7 @@ class MEDIA_EXPORT GpuVideoDecoder
// VideoDecoder implementation.
std::string GetDisplayName() const override;
+ bool IsPlatformDecoder() const override;
void Initialize(
const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/pipeline_controller.cc b/chromium/media/filters/pipeline_controller.cc
index 706b58138af..e204dc931b8 100644
--- a/chromium/media/filters/pipeline_controller.cc
+++ b/chromium/media/filters/pipeline_controller.cc
@@ -72,6 +72,7 @@ void PipelineController::Seek(base::TimeDelta time, bool time_updated) {
if (time_updated)
pending_time_updated_ = true;
pending_seeked_cb_ = true;
+ pending_seek_except_start_ = true;
// If we are already seeking to |time|, and the media is static, elide the
// seek.
@@ -111,6 +112,11 @@ bool PipelineController::IsStable() {
return state_ == State::PLAYING;
}
+bool PipelineController::IsPendingSeek() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return pending_seek_except_start_;
+}
+
bool PipelineController::IsSuspended() {
DCHECK(thread_checker_.CalledOnValidThread());
return (pending_suspend_ || state_ == State::SUSPENDING ||
@@ -287,6 +293,7 @@ void PipelineController::Dispatch() {
// immediately.
pending_startup_ = false;
pending_seeked_cb_ = false;
+ pending_seek_except_start_ = false;
bool was_pending_time_updated = pending_time_updated_;
pending_time_updated_ = false;
seeked_cb_.Run(was_pending_time_updated);
@@ -301,6 +308,7 @@ void PipelineController::Stop() {
demuxer_ = nullptr;
waiting_for_seek_ = false;
pending_seeked_cb_ = false;
+ pending_seek_except_start_ = false;
pending_time_updated_ = false;
pending_seek_ = false;
pending_suspend_ = false;
diff --git a/chromium/media/filters/pipeline_controller.h b/chromium/media/filters/pipeline_controller.h
index bf80dbb66a9..92a420a079f 100644
--- a/chromium/media/filters/pipeline_controller.h
+++ b/chromium/media/filters/pipeline_controller.h
@@ -114,6 +114,10 @@ class MEDIA_EXPORT PipelineController {
// Returns true if the current target state is suspended.
bool IsSuspended();
+ // Returns true if Seek() was called and there is a seek operation which has
+ // not yet completed.
+ bool IsPendingSeek();
+
// Returns true if |pipeline_| is suspended.
bool IsPipelineSuspended();
@@ -189,6 +193,9 @@ class MEDIA_EXPORT PipelineController {
// issued at the next stable state.
bool pending_seeked_cb_ = false;
+ // Indicates that a seek has occurred from an explicit call to Seek().
+ bool pending_seek_except_start_ = false;
+
// Indicates that time has been changed by a seek, which will be reported at
// the next seeked callback.
bool pending_time_updated_ = false;
diff --git a/chromium/media/filters/source_buffer_state.cc b/chromium/media/filters/source_buffer_state.cc
index aa7d625c7b2..2c86688c04d 100644
--- a/chromium/media/filters/source_buffer_state.cc
+++ b/chromium/media/filters/source_buffer_state.cc
@@ -55,6 +55,17 @@ bool CheckBytestreamTrackIds(
return true;
}
+unsigned GetMSEBufferSizeLimitIfExists(base::StringPiece switch_string) {
+ auto* command_line = base::CommandLine::ForCurrentProcess();
+ unsigned memory_limit;
+ if (command_line->HasSwitch(switch_string) &&
+ base::StringToUint(command_line->GetSwitchValueASCII(switch_string),
+ &memory_limit)) {
+ return memory_limit * 1024 * 1024;
+ }
+ return 0;
+}
+
} // namespace
// List of time ranges for each SourceBuffer.
@@ -126,8 +137,7 @@ SourceBufferState::SourceBufferState(
frame_processor_(frame_processor.release()),
create_demuxer_stream_cb_(create_demuxer_stream_cb),
media_log_(media_log),
- state_(UNINITIALIZED),
- auto_update_timestamp_offset_(false) {
+ state_(UNINITIALIZED) {
DCHECK(!create_demuxer_stream_cb_.is_null());
DCHECK(frame_processor_);
}
@@ -137,47 +147,32 @@ SourceBufferState::~SourceBufferState() {
}
void SourceBufferState::Init(
- const StreamParser::InitCB& init_cb,
+ StreamParser::InitCB init_cb,
const std::string& expected_codecs,
const StreamParser::EncryptedMediaInitDataCB& encrypted_media_init_data_cb,
const NewTextTrackCB& new_text_track_cb) {
DCHECK_EQ(state_, UNINITIALIZED);
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
encrypted_media_init_data_cb_ = encrypted_media_init_data_cb;
new_text_track_cb_ = new_text_track_cb;
+ state_ = PENDING_PARSER_CONFIG;
+ InitializeParser(expected_codecs);
+}
- std::vector<std::string> expected_codecs_parsed;
- SplitCodecsToVector(expected_codecs, &expected_codecs_parsed, false);
+void SourceBufferState::ChangeType(
+ std::unique_ptr<StreamParser> new_stream_parser,
+ const std::string& new_expected_codecs) {
+ DCHECK_GE(state_, PENDING_PARSER_CONFIG);
+ DCHECK_NE(state_, PENDING_PARSER_INIT);
+ DCHECK(!parsing_media_segment_);
- std::vector<AudioCodec> expected_acodecs;
- std::vector<VideoCodec> expected_vcodecs;
- for (const auto& codec_id : expected_codecs_parsed) {
- AudioCodec acodec = StringToAudioCodec(codec_id);
- if (acodec != kUnknownAudioCodec) {
- expected_audio_codecs_.push_back(acodec);
- continue;
- }
- VideoCodec vcodec = StringToVideoCodec(codec_id);
- if (vcodec != kUnknownVideoCodec) {
- expected_video_codecs_.push_back(vcodec);
- continue;
- }
- MEDIA_LOG(INFO, media_log_) << "Unrecognized media codec: " << codec_id;
- }
+ // If this source buffer has already handled an initialization segment, avoid
+ // running |init_cb_| again later.
+ if (state_ == PARSER_INITIALIZED)
+ state_ = PENDING_PARSER_RECONFIG;
- state_ = PENDING_PARSER_CONFIG;
- stream_parser_->Init(
- base::Bind(&SourceBufferState::OnSourceInitDone, base::Unretained(this)),
- base::Bind(&SourceBufferState::OnNewConfigs, base::Unretained(this),
- expected_codecs),
- base::Bind(&SourceBufferState::OnNewBuffers, base::Unretained(this)),
- new_text_track_cb_.is_null(),
- base::Bind(&SourceBufferState::OnEncryptedMediaInitData,
- base::Unretained(this)),
- base::Bind(&SourceBufferState::OnNewMediaSegment, base::Unretained(this)),
- base::Bind(&SourceBufferState::OnEndOfMediaSegment,
- base::Unretained(this)),
- media_log_);
+ stream_parser_ = std::move(new_stream_parser);
+ InitializeParser(new_expected_codecs);
}
void SourceBufferState::SetSequenceMode(bool sequence_mode) {
@@ -545,6 +540,46 @@ bool SourceBufferState::IsSeekWaitingForData() const {
return false;
}
+void SourceBufferState::InitializeParser(const std::string& expected_codecs) {
+ expected_audio_codecs_.clear();
+ expected_video_codecs_.clear();
+
+ std::vector<std::string> expected_codecs_parsed;
+ SplitCodecsToVector(expected_codecs, &expected_codecs_parsed, false);
+
+ std::vector<AudioCodec> expected_acodecs;
+ std::vector<VideoCodec> expected_vcodecs;
+ for (const auto& codec_id : expected_codecs_parsed) {
+ AudioCodec acodec = StringToAudioCodec(codec_id);
+ if (acodec != kUnknownAudioCodec) {
+ expected_audio_codecs_.push_back(acodec);
+ continue;
+ }
+ VideoCodec vcodec = StringToVideoCodec(codec_id);
+ if (vcodec != kUnknownVideoCodec) {
+ expected_video_codecs_.push_back(vcodec);
+ continue;
+ }
+ MEDIA_LOG(INFO, media_log_) << "Unrecognized media codec: " << codec_id;
+ }
+
+ stream_parser_->Init(
+ base::BindOnce(&SourceBufferState::OnSourceInitDone,
+ base::Unretained(this)),
+ base::BindRepeating(&SourceBufferState::OnNewConfigs,
+ base::Unretained(this), expected_codecs),
+ base::BindRepeating(&SourceBufferState::OnNewBuffers,
+ base::Unretained(this)),
+ new_text_track_cb_.is_null(),
+ base::BindRepeating(&SourceBufferState::OnEncryptedMediaInitData,
+ base::Unretained(this)),
+ base::BindRepeating(&SourceBufferState::OnNewMediaSegment,
+ base::Unretained(this)),
+ base::BindRepeating(&SourceBufferState::OnEndOfMediaSegment,
+ base::Unretained(this)),
+ media_log_);
+}
+
bool SourceBufferState::OnNewConfigs(
std::string expected_codecs,
std::unique_ptr<MediaTracks> tracks,
@@ -576,6 +611,11 @@ bool SourceBufferState::OnNewConfigs(
std::vector<AudioCodec> expected_acodecs = expected_audio_codecs_;
std::vector<VideoCodec> expected_vcodecs = expected_video_codecs_;
+ // TODO(wolenetz): Once codec strictness is relaxed, we can change
+ // |allow_codec_changes| to always be true. Until then, we only allow codec
+ // changes on explicit ChangeType().
+ const bool allow_codec_changes = state_ == PENDING_PARSER_RECONFIG;
+
FrameProcessor::TrackIdChanges track_id_changes;
for (const auto& track : tracks->tracks()) {
const auto& track_id = track->bytestream_track_id();
@@ -635,7 +675,8 @@ bool SourceBufferState::OnNewConfigs(
track->set_id(stream->media_track_id());
frame_processor_->OnPossibleAudioConfigUpdate(audio_config);
- success &= stream->UpdateAudioConfig(audio_config, media_log_);
+ success &= stream->UpdateAudioConfig(audio_config, allow_codec_changes,
+ media_log_);
} else if (track->type() == MediaTrack::Video) {
VideoDecoderConfig video_config = tracks->getVideoConfig(track_id);
DVLOG(1) << "Video track_id=" << track_id
@@ -690,7 +731,8 @@ bool SourceBufferState::OnNewConfigs(
}
track->set_id(stream->media_track_id());
- success &= stream->UpdateVideoConfig(video_config, media_log_);
+ success &= stream->UpdateVideoConfig(video_config, allow_codec_changes,
+ media_log_);
} else {
MEDIA_LOG(ERROR, media_log_) << "Error: unsupported media track type "
<< track->type();
@@ -798,6 +840,8 @@ bool SourceBufferState::OnNewConfigs(
if (success) {
if (state_ == PENDING_PARSER_CONFIG)
state_ = PENDING_PARSER_INIT;
+ if (state_ == PENDING_PARSER_RECONFIG)
+ state_ = PENDING_PARSER_REINIT;
DCHECK(!init_segment_received_cb_.is_null());
init_segment_received_cb_.Run(std::move(tracks));
}
@@ -806,32 +850,24 @@ bool SourceBufferState::OnNewConfigs(
}
void SourceBufferState::SetStreamMemoryLimits() {
- auto* cmd_line = base::CommandLine::ForCurrentProcess();
-
- std::string audio_buf_limit_switch =
- cmd_line->GetSwitchValueASCII(switches::kMSEAudioBufferSizeLimit);
- unsigned audio_buf_size_limit = 0;
- if (base::StringToUint(audio_buf_limit_switch, &audio_buf_size_limit) &&
- audio_buf_size_limit > 0) {
+ size_t audio_buf_size_limit =
+ GetMSEBufferSizeLimitIfExists(switches::kMSEAudioBufferSizeLimitMb);
+ if (audio_buf_size_limit) {
MEDIA_LOG(INFO, media_log_)
<< "Custom audio per-track SourceBuffer size limit="
<< audio_buf_size_limit;
- for (const auto& it : audio_streams_) {
+ for (const auto& it : audio_streams_)
it.second->SetStreamMemoryLimit(audio_buf_size_limit);
- }
}
- std::string video_buf_limit_switch =
- cmd_line->GetSwitchValueASCII(switches::kMSEVideoBufferSizeLimit);
- unsigned video_buf_size_limit = 0;
- if (base::StringToUint(video_buf_limit_switch, &video_buf_size_limit) &&
- video_buf_size_limit > 0) {
+ size_t video_buf_size_limit =
+ GetMSEBufferSizeLimitIfExists(switches::kMSEVideoBufferSizeLimitMb);
+ if (video_buf_size_limit) {
MEDIA_LOG(INFO, media_log_)
<< "Custom video per-track SourceBuffer size limit="
<< video_buf_size_limit;
- for (const auto& it : video_streams_) {
+ for (const auto& it : video_streams_)
it.second->SetStreamMemoryLimit(video_buf_size_limit);
- }
}
}
@@ -886,9 +922,10 @@ bool SourceBufferState::OnNewBuffers(
*timestamp_offset_during_append_;
// Calculate the new timestamp offset for audio/video tracks if the stream
- // parser has requested automatic updates.
- TimeDelta new_timestamp_offset = timestamp_offset_before_processing;
- if (auto_update_timestamp_offset_) {
+ // parser corresponds to MSE MIME type with 'Generate Timestamps Flag' set
+ // true.
+ TimeDelta predicted_timestamp_offset = timestamp_offset_before_processing;
+ if (generate_timestamps_flag()) {
TimeDelta min_end_timestamp = kNoTimestamp;
for (const auto& it : buffer_queue_map) {
const StreamParser::BufferQueue& bufq = it.second;
@@ -900,7 +937,7 @@ bool SourceBufferState::OnNewBuffers(
}
}
if (min_end_timestamp != kNoTimestamp)
- new_timestamp_offset += min_end_timestamp;
+ predicted_timestamp_offset += min_end_timestamp;
}
if (!frame_processor_->ProcessFrames(
@@ -910,9 +947,11 @@ bool SourceBufferState::OnNewBuffers(
}
// Only update the timestamp offset if the frame processor hasn't already.
- if (auto_update_timestamp_offset_ &&
+ if (generate_timestamps_flag() &&
timestamp_offset_before_processing == *timestamp_offset_during_append_) {
- *timestamp_offset_during_append_ = new_timestamp_offset;
+ // TODO(wolenetz): This prediction assumes the last frame in each track
+ // isn't dropped by append window trimming. See https://crbug.com/850316.
+ *timestamp_offset_during_append_ = predicted_timestamp_offset;
}
return true;
@@ -927,10 +966,15 @@ void SourceBufferState::OnEncryptedMediaInitData(
void SourceBufferState::OnSourceInitDone(
const StreamParser::InitParameters& params) {
- DCHECK_EQ(state_, PENDING_PARSER_INIT);
+ // We've either yet-to-run |init_cb_| if pending init, or we've previously
+ // run it if pending reinit.
+ DCHECK((!init_cb_.is_null() && state_ == PENDING_PARSER_INIT) ||
+ (init_cb_.is_null() && state_ == PENDING_PARSER_REINIT));
+ State old_state = state_;
state_ = PARSER_INITIALIZED;
- auto_update_timestamp_offset_ = params.auto_update_timestamp_offset;
- base::ResetAndReturn(&init_cb_).Run(params);
+
+ if (old_state == PENDING_PARSER_INIT)
+ std::move(init_cb_).Run(params);
}
} // namespace media
diff --git a/chromium/media/filters/source_buffer_state.h b/chromium/media/filters/source_buffer_state.h
index c487344eecd..7bac0c5616d 100644
--- a/chromium/media/filters/source_buffer_state.h
+++ b/chromium/media/filters/source_buffer_state.h
@@ -44,12 +44,18 @@ class MEDIA_EXPORT SourceBufferState {
~SourceBufferState();
- void Init(const StreamParser::InitCB& init_cb,
+ void Init(StreamParser::InitCB init_cb,
const std::string& expected_codecs,
const StreamParser::EncryptedMediaInitDataCB&
encrypted_media_init_data_cb,
const NewTextTrackCB& new_text_track_cb);
+ // Reconfigures this source buffer to use |new_stream_parser|. Caller must
+ // first ensure that ResetParserState() was done to flush any pending frames
+ // from the old stream parser.
+ void ChangeType(std::unique_ptr<StreamParser> new_stream_parser,
+ const std::string& new_expected_codecs);
+
// Appends new data to the StreamParser.
// Returns true if the data was successfully appended. Returns false if an
// error occurred. |*timestamp_offset| is used and possibly updated by the
@@ -91,6 +97,12 @@ class MEDIA_EXPORT SourceBufferState {
// Returns true if currently parsing a media segment, or false otherwise.
bool parsing_media_segment() const { return parsing_media_segment_; }
+ // Returns the 'Generate Timestamps Flag' for this SourceBuffer's byte stream
+ // format parser as described in the MSE Byte Stream Format Registry.
+ bool generate_timestamps_flag() const {
+ return stream_parser_->GetGenerateTimestampsFlag();
+ }
+
// Sets |frame_processor_|'s sequence mode to |sequence_mode|.
void SetSequenceMode(bool sequence_mode);
@@ -139,16 +151,30 @@ class MEDIA_EXPORT SourceBufferState {
const SourceBufferParseWarningCB& parse_warning_cb);
private:
- // State advances through this list. The intent is to ensure at least one
- // config is received prior to parser calling initialization callback, and
- // that such initialization callback occurs at most once per parser.
+ // State advances through this list to PARSER_INITIALIZED.
+ // The intent is to ensure at least one config is received prior to parser
+ // calling initialization callback, and that such initialization callback
+ // occurs at most once per parser.
+ // PENDING_PARSER_RECONFIG occurs if State had reached PARSER_INITIALIZED
+ // before changing to a new StreamParser in ChangeType(). In such case, State
+ // would then advance to PENDING_PARSER_REINIT, then PARSER_INITIALIZED upon
+ // the next initialization segment parsed, but would not run the
+ // initialization callback in this case (since such would already have
+ // occurred on the initial transition from PENDING_PARSER_INIT to
+ // PARSER_INITIALIZED.)
enum State {
UNINITIALIZED = 0,
PENDING_PARSER_CONFIG,
PENDING_PARSER_INIT,
- PARSER_INITIALIZED
+ PARSER_INITIALIZED,
+ PENDING_PARSER_RECONFIG,
+ PENDING_PARSER_REINIT
};
+ // Initializes |stream_parser_|. Also, updates |expected_audio_codecs| and
+ // |expected_video_codecs|.
+ void InitializeParser(const std::string& expected_codecs);
+
// Called by the |stream_parser_| when a new initialization segment is
// encountered.
// Returns true on a successful call. Returns false if an error occurred while
@@ -237,12 +263,6 @@ class MEDIA_EXPORT SourceBufferState {
std::vector<AudioCodec> expected_audio_codecs_;
std::vector<VideoCodec> expected_video_codecs_;
- // Indicates that timestampOffset should be updated automatically during
- // OnNewBuffers() based on the earliest end timestamp of the buffers provided.
- // TODO(wolenetz): Refactor this function while integrating April 29, 2014
- // changes to MSE spec. See http://crbug.com/371499.
- bool auto_update_timestamp_offset_;
-
DISALLOW_COPY_AND_ASSIGN(SourceBufferState);
};
diff --git a/chromium/media/filters/source_buffer_state_unittest.cc b/chromium/media/filters/source_buffer_state_unittest.cc
index caa4dd25c19..8448fef6970 100644
--- a/chromium/media/filters/source_buffer_state_unittest.cc
+++ b/chromium/media/filters/source_buffer_state_unittest.cc
@@ -78,13 +78,20 @@ class SourceBufferStateTest
std::unique_ptr<SourceBufferState> CreateAndInitSourceBufferState(
const std::string& expected_codecs) {
std::unique_ptr<SourceBufferState> sbs = CreateSourceBufferState();
+ // Instead of using SaveArg<> to update |new_config_cb_| when mocked Init is
+ // called, we use a lambda because SaveArg<> doesn't work if any of the
+ // mocked method's arguments are move-only type.
EXPECT_CALL(*mock_stream_parser_, Init(_, _, _, _, _, _, _, _))
- .WillOnce(SaveArg<1>(&new_config_cb_));
- sbs->Init(base::Bind(&SourceBufferStateTest::SourceInitDone,
- base::Unretained(this)),
+ .WillOnce([&](auto init_cb, auto config_cb, auto new_buffers_cb,
+ auto ignore_text_track, auto encrypted_media_init_data_cb,
+ auto new_segment_cb, auto end_of_segment_cb,
+ auto media_log) { new_config_cb_ = config_cb; });
+ sbs->Init(base::BindOnce(&SourceBufferStateTest::SourceInitDone,
+ base::Unretained(this)),
expected_codecs,
- base::Bind(&SourceBufferStateTest::StreamParserEncryptedInitData,
- base::Unretained(this)),
+ base::BindRepeating(
+ &SourceBufferStateTest::StreamParserEncryptedInitData,
+ base::Unretained(this)),
base::Bind(&SourceBufferStateTest::StreamParserNewTextTrack,
base::Unretained(this)));
diff --git a/chromium/media/filters/source_buffer_stream.cc b/chromium/media/filters/source_buffer_stream.cc
index ec616d4b0a5..8b0173117cf 100644
--- a/chromium/media/filters/source_buffer_stream.cc
+++ b/chromium/media/filters/source_buffer_stream.cc
@@ -147,7 +147,7 @@ std::string BufferQueueBuffersToLogString(
for (const auto& buf : buffers) {
result << "\tdts=" << buf->GetDecodeTimestamp().InMicroseconds() << " "
<< buf->AsHumanReadableString()
- << ", duration_type=" << static_cast<int>(buf->duration_type())
+ << ", is_duration_estimated=" << buf->is_duration_estimated()
<< "\n";
}
@@ -1250,7 +1250,7 @@ void SourceBufferStream<RangeClass>::TrimSpliceOverlap(
" (bad content) at time "
<< splice_timestamp.InMicroseconds();
- MEDIA_LOG(ERROR, media_log_)
+ MEDIA_LOG(WARNING, media_log_)
<< "Media is badly muxed. Detected " << overlapped_buffers.size()
<< " overlapping audio buffers at time "
<< splice_timestamp.InMicroseconds();
@@ -1267,6 +1267,16 @@ void SourceBufferStream<RangeClass>::TrimSpliceOverlap(
return;
}
+ // Trimming a buffer with estimated duration is too risky. Estimates are rough
+ // and what appears to be overlap may really just be a bad estimate. Imprecise
+ // trimming may lead to loss of AV sync.
+ if (overlapped_buffer->is_duration_estimated()) {
+ DVLOG(3) << __func__ << " Skipping audio splice trimming at PTS="
+ << splice_timestamp.InMicroseconds() << ". Overlapped buffer has "
+ << "estimated duration.";
+ return;
+ }
+
// Determine the duration of overlap.
base::TimeDelta overlapped_end_time =
overlapped_buffer->timestamp() + overlapped_buffer->duration();
@@ -1290,14 +1300,6 @@ void SourceBufferStream<RangeClass>::TrimSpliceOverlap(
return;
}
- // At this point, trimming will go ahead. Log UMAs about the type of duration
- // in the original overlapped buffer. The hope is that splicing on
- // rough-estimated durations is rare enough that we can disable it outright.
- // This would allow more liberal estimates of audio durations.
- UMA_HISTOGRAM_ENUMERATION(
- "Media.MSE.AudioSpliceDurationType", overlapped_buffer->duration_type(),
- static_cast<int>(DurationType::kDurationTypeMax) + 1);
-
// Trim overlap from the existing buffer.
DecoderBuffer::DiscardPadding discard_padding =
overlapped_buffer->discard_padding();
@@ -1974,13 +1976,21 @@ base::TimeDelta SourceBufferStream<RangeClass>::GetMaxInterbufferDistance()
template <typename RangeClass>
bool SourceBufferStream<RangeClass>::UpdateAudioConfig(
- const AudioDecoderConfig& config) {
+ const AudioDecoderConfig& config,
+ bool allow_codec_change) {
DCHECK(!audio_configs_.empty());
DCHECK(video_configs_.empty());
DVLOG(3) << "UpdateAudioConfig.";
- if (audio_configs_[0].codec() != config.codec()) {
- MEDIA_LOG(ERROR, media_log_) << "Audio codec changes not allowed.";
+ if (!allow_codec_change &&
+ audio_configs_[append_config_index_].codec() != config.codec()) {
+ // TODO(wolenetz): When we relax addSourceBuffer() and changeType() codec
+ // strictness, codec changes should be allowed even without changing the
+ // bytestream.
+ // TODO(wolenetz): Remove "experimental" from this error message when
+ // changeType() ships without needing experimental blink flag.
+ MEDIA_LOG(ERROR, media_log_) << "Audio codec changes not allowed unless "
+ "using experimental changeType().";
return false;
}
@@ -2002,13 +2012,21 @@ bool SourceBufferStream<RangeClass>::UpdateAudioConfig(
template <typename RangeClass>
bool SourceBufferStream<RangeClass>::UpdateVideoConfig(
- const VideoDecoderConfig& config) {
+ const VideoDecoderConfig& config,
+ bool allow_codec_change) {
DCHECK(!video_configs_.empty());
DCHECK(audio_configs_.empty());
DVLOG(3) << "UpdateVideoConfig.";
- if (video_configs_[0].codec() != config.codec()) {
- MEDIA_LOG(ERROR, media_log_) << "Video codec changes not allowed.";
+ if (!allow_codec_change &&
+ video_configs_[append_config_index_].codec() != config.codec()) {
+ // TODO(wolenetz): When we relax addSourceBuffer() and changeType() codec
+ // strictness, codec changes should be allowed even without changing the
+ // bytestream.
+ // TODO(wolenetz): Remove "experimental" from this error message when
+ // changeType() ships without needing experimental blink flag.
+ MEDIA_LOG(ERROR, media_log_) << "Video codec changes not allowed unless "
+ "using experimental changeType()";
return false;
}
diff --git a/chromium/media/filters/source_buffer_stream.h b/chromium/media/filters/source_buffer_stream.h
index a50e7c36df4..590d88d85cb 100644
--- a/chromium/media/filters/source_buffer_stream.h
+++ b/chromium/media/filters/source_buffer_stream.h
@@ -164,11 +164,17 @@ class MEDIA_EXPORT SourceBufferStream {
// Notifies this object that the audio config has changed and buffers in
// future Append() calls should be associated with this new config.
- bool UpdateAudioConfig(const AudioDecoderConfig& config);
+ // If the codec is allowed to change, the caller should set
+ // |allow_codec_change| to true.
+ bool UpdateAudioConfig(const AudioDecoderConfig& config,
+ bool allow_codec_change);
// Notifies this object that the video config has changed and buffers in
// future Append() calls should be associated with this new config.
- bool UpdateVideoConfig(const VideoDecoderConfig& config);
+ // If the codec is allowed to change, the caller should set
+ // |allow_codec_change| to true.
+ bool UpdateVideoConfig(const VideoDecoderConfig& config,
+ bool allow_codec_change);
// Returns the largest distance between two adjacent buffers in this stream,
// or an estimate if no two adjacent buffers have been appended to the stream
diff --git a/chromium/media/filters/source_buffer_stream_unittest.cc b/chromium/media/filters/source_buffer_stream_unittest.cc
index 83ec07e9820..9b57c0a5808 100644
--- a/chromium/media/filters/source_buffer_stream_unittest.cc
+++ b/chromium/media/filters/source_buffer_stream_unittest.cc
@@ -721,7 +721,7 @@ class SourceBufferStreamTest : public testing::TestWithParam<BufferingApi> {
&kDataA, kDataSize, is_keyframe, GetStreamType(), 0);
buffer->set_timestamp(buffer_timestamps[0]);
if (is_duration_estimated)
- buffer->set_duration_type(DurationType::kRoughEstimate);
+ buffer->set_is_duration_estimated(true);
if (buffer_timestamps[1] != buffer_timestamps[0]) {
buffer->SetDecodeTimestamp(
@@ -3472,7 +3472,7 @@ TEST_P(SourceBufferStreamTest, ConfigChange_Basic) {
CheckVideoConfig(video_config_);
// Signal a config change.
- STREAM_OP(UpdateVideoConfig(new_config));
+ STREAM_OP(UpdateVideoConfig(new_config, false));
// Make sure updating the config doesn't change anything since new_config
// should not be associated with the buffer GetNextBuffer() will return.
@@ -3508,7 +3508,7 @@ TEST_P(SourceBufferStreamTest, ConfigChange_Seek) {
Seek(0);
NewCodedFrameGroupAppend(0, 5, &kDataA);
- STREAM_OP(UpdateVideoConfig(new_config));
+ STREAM_OP(UpdateVideoConfig(new_config, false));
NewCodedFrameGroupAppend(5, 5, &kDataB);
// Seek to the start of the buffers with the new config and make sure a
@@ -4545,6 +4545,27 @@ TEST_P(SourceBufferStreamTest, Audio_NoSpliceForBadOverlap) {
CheckNoNextBuffer();
}
+TEST_P(SourceBufferStreamTest, Audio_NoSpliceForEstimatedDuration) {
+ SetAudioStream();
+ Seek(0);
+
+ // Append two buffers, the latter having estimated duration.
+ NewCodedFrameGroupAppend("0D10K 10D10EK");
+ CheckExpectedRangesByTimestamp("{ [0,20) }");
+ CheckExpectedBuffers("0D10K 10D10EK");
+ CheckNoNextBuffer();
+
+ Seek(0);
+
+ // Add a new frame in a separate coded frame group that falls in the middle of
+ // the second buffer. In spite of the overlap, no splice should be performed
+ // due to the overlapped buffer having estimated duration.
+ NewCodedFrameGroupAppend("15D10K");
+ CheckExpectedRangesByTimestamp("{ [0,25) }");
+ CheckExpectedBuffers("0D10K 10D10EK 15D10K");
+ CheckNoNextBuffer();
+}
+
TEST_P(SourceBufferStreamTest, Audio_SpliceTrimming_ExistingTrimming) {
const base::TimeDelta kDuration = base::TimeDelta::FromMilliseconds(4);
const base::TimeDelta kNoDiscard = base::TimeDelta();
@@ -4688,7 +4709,7 @@ TEST_P(SourceBufferStreamTest, Audio_ConfigChangeWithPreroll) {
NewCodedFrameGroupAppend("0K 3K 6K");
// Update the configuration.
- STREAM_OP(UpdateAudioConfig(new_config));
+ STREAM_OP(UpdateAudioConfig(new_config, false));
// We haven't read any buffers at this point, so the config for the next
// buffer at time 0 should still be the original config.
@@ -4948,7 +4969,7 @@ TEST_P(SourceBufferStreamTest, ConfigChange_ReSeek) {
// Append a few buffers, with a config change in the middle.
VideoDecoderConfig new_config = TestVideoConfig::Large();
NewCodedFrameGroupAppend("2000K 2010 2020D10");
- STREAM_OP(UpdateVideoConfig(new_config));
+ STREAM_OP(UpdateVideoConfig(new_config, false));
NewCodedFrameGroupAppend("2030K 2040 2050D10");
CheckExpectedRangesByTimestamp("{ [2000,2060) }");
diff --git a/chromium/media/filters/stream_parser_factory.cc b/chromium/media/filters/stream_parser_factory.cc
index 8b9ff97eab9..d515e2b0ce7 100644
--- a/chromium/media/filters/stream_parser_factory.cc
+++ b/chromium/media/filters/stream_parser_factory.cc
@@ -16,6 +16,7 @@
#include "build/build_config.h"
#include "media/base/media.h"
#include "media/base/media_switches.h"
+#include "media/base/video_codecs.h"
#include "media/formats/mp4/mp4_stream_parser.h"
#include "media/formats/mpeg/adts_stream_parser.h"
#include "media/formats/mpeg/mpeg1_audio_stream_parser.h"
@@ -92,9 +93,8 @@ static const CodecInfo kOpusCodecInfo = {"opus", CodecInfo::AUDIO, nullptr,
CodecInfo::HISTOGRAM_OPUS};
#if BUILDFLAG(ENABLE_AV1_DECODER)
-// TODO(dalecurtis): This is not the correct final string. Fix before enabling
-// by default. http://crbug.com/784607
-static const CodecInfo kAV1CodecInfo = {"av1", CodecInfo::VIDEO, nullptr,
+// Note: Validation of the codec string is handled by the caller.
+static const CodecInfo kAV1CodecInfo = {"av01.*", CodecInfo::VIDEO, nullptr,
CodecInfo::HISTOGRAM_AV1};
#endif
diff --git a/chromium/media/filters/video_frame_stream_unittest.cc b/chromium/media/filters/video_frame_stream_unittest.cc
index 6c3f7f3006f..ee116463f87 100644
--- a/chromium/media/filters/video_frame_stream_unittest.cc
+++ b/chromium/media/filters/video_frame_stream_unittest.cc
@@ -85,8 +85,8 @@ class VideoFrameStreamTest
video_frame_stream_.reset(new VideoFrameStream(
std::make_unique<VideoFrameStream::StreamTraits>(&media_log_),
message_loop_.task_runner(),
- base::Bind(&VideoFrameStreamTest::CreateVideoDecodersForTest,
- base::Unretained(this)),
+ base::BindRepeating(&VideoFrameStreamTest::CreateVideoDecodersForTest,
+ base::Unretained(this)),
&media_log_));
video_frame_stream_->set_decoder_change_observer_for_testing(base::Bind(
&VideoFrameStreamTest::OnDecoderChanged, base::Unretained(this)));
@@ -259,12 +259,14 @@ class VideoFrameStreamTest
void Initialize() {
pending_initialize_ = true;
video_frame_stream_->Initialize(
- demuxer_stream_.get(), base::Bind(&VideoFrameStreamTest::OnInitialized,
- base::Unretained(this)),
+ demuxer_stream_.get(),
+ base::BindOnce(&VideoFrameStreamTest::OnInitialized,
+ base::Unretained(this)),
cdm_context_.get(),
- base::Bind(&VideoFrameStreamTest::OnStatistics, base::Unretained(this)),
- base::Bind(&VideoFrameStreamTest::OnWaitingForDecryptionKey,
- base::Unretained(this)));
+ base::BindRepeating(&VideoFrameStreamTest::OnStatistics,
+ base::Unretained(this)),
+ base::BindRepeating(&VideoFrameStreamTest::OnWaitingForDecryptionKey,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/video_renderer_algorithm.cc b/chromium/media/filters/video_renderer_algorithm.cc
index aa98ee6cc22..68847a95d14 100644
--- a/chromium/media/filters/video_renderer_algorithm.cc
+++ b/chromium/media/filters/video_renderer_algorithm.cc
@@ -165,14 +165,22 @@ scoped_refptr<VideoFrame> VideoRendererAlgorithm::Render(
}
// Step 7: Drop frames which occur prior to the frame to be rendered. If any
- // frame has a zero render count it should be reported as dropped.
+ // frame unexpectedly has a zero render count it should be reported as
+ // dropped. When using cadence some frames may be expected to be skipped and
+ // should not be counted as dropped.
if (frame_to_render > 0) {
if (frames_dropped) {
for (int i = 0; i < frame_to_render; ++i) {
const ReadyFrame& frame = frame_queue_[i];
+
+ // If a frame was ever rendered, don't count it as dropped.
if (frame.render_count != frame.drop_count)
continue;
+ // If we expected to never render the frame, don't count it as dropped.
+ if (cadence_estimator_.has_cadence() && !frame.ideal_render_count)
+ continue;
+
// If frame dropping is disabled, ignore the results of the algorithm
// and return the earliest unrendered frame.
if (frame_dropping_disabled_) {
diff --git a/chromium/media/filters/video_renderer_algorithm_unittest.cc b/chromium/media/filters/video_renderer_algorithm_unittest.cc
index ec93d0ec9a9..1dc9e1f34e8 100644
--- a/chromium/media/filters/video_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/video_renderer_algorithm_unittest.cc
@@ -1122,22 +1122,15 @@ TEST_F(VideoRendererAlgorithmTest, BestFrameByFractionalCadence) {
TickGenerator frame_tg(base::TimeTicks(), test_rate[0]);
TickGenerator display_tg(tick_clock_->NowTicks(), test_rate[1]);
- const size_t desired_drop_pattern = test_rate[0] / test_rate[1] - 1;
scoped_refptr<VideoFrame> current_frame;
RunFramePumpTest(
true, &frame_tg, &display_tg,
- [&current_frame, desired_drop_pattern, this](
- const scoped_refptr<VideoFrame>& frame, size_t frames_dropped) {
+ [&current_frame, this](const scoped_refptr<VideoFrame>& frame,
+ size_t frames_dropped) {
ASSERT_TRUE(frame);
- // The first frame should have zero dropped frames, but each Render()
- // call after should drop the same number of frames based on the
- // fractional cadence.
- if (!current_frame)
- ASSERT_EQ(0u, frames_dropped);
- else
- ASSERT_EQ(desired_drop_pattern, frames_dropped);
-
+ // We don't count frames dropped that cadence says we should skip.
+ ASSERT_EQ(0u, frames_dropped);
ASSERT_NE(current_frame, frame);
ASSERT_TRUE(is_using_cadence());
current_frame = frame;
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index 273687d27a3..611b6cc86b9 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -297,7 +297,9 @@ TEST_F(VpxVideoDecoderTest, FrameValidAfterPoolDestruction) {
}
// The test stream uses profile 2, which needs high bit depth support in libvpx.
-#if !defined(LIBVPX_NO_HIGH_BIT_DEPTH)
+// On ARM we fail to decode the final, duplicate frame, so there is no point in
+// running this test (https://crbug.com/864458).
+#if !defined(LIBVPX_NO_HIGH_BIT_DEPTH) && !defined(ARCH_CPU_ARM_FAMILY)
TEST_F(VpxVideoDecoderTest, MemoryPoolAllowsMultipleDisplay) {
// Initialize with dummy data, we could read it from the test clip, but it's
// not necessary for this test.
@@ -309,36 +311,27 @@ TEST_F(VpxVideoDecoderTest, MemoryPoolAllowsMultipleDisplay) {
FFmpegGlue glue(&protocol);
ASSERT_TRUE(glue.OpenContext());
- AVPacket packet;
+ AVPacket packet = {};
while (av_read_frame(glue.format_context(), &packet) >= 0) {
- if (Decode(DecoderBuffer::CopyFrom(packet.data, packet.size)) !=
- DecodeStatus::OK) {
- av_packet_unref(&packet);
- break;
- }
+ DecodeStatus decode_status =
+ Decode(DecoderBuffer::CopyFrom(packet.data, packet.size));
av_packet_unref(&packet);
+ if (decode_status != DecodeStatus::OK)
+ break;
}
- // Android returns 25 frames while other platforms return 26 for some reason;
- // we don't really care about the exact number.
- ASSERT_GE(output_frames_.size(), 25u);
-
- scoped_refptr<VideoFrame> last_frame = output_frames_.back();
+ ASSERT_EQ(output_frames_.size(), 26u);
- // Duplicate frame is actually two before the last in this bitstream.
- scoped_refptr<VideoFrame> dupe_frame =
- output_frames_[output_frames_.size() - 3];
+ // The final frame is a duplicate of the third-from-final one.
+ scoped_refptr<VideoFrame> last_frame = output_frames_[25];
+ scoped_refptr<VideoFrame> dupe_frame = output_frames_[23];
-#if !defined(OS_ANDROID)
- // Android doesn't seem to expose this bug, but the rest of the test is still
- // reasonable to complete even on Android.
EXPECT_EQ(last_frame->data(VideoFrame::kYPlane),
dupe_frame->data(VideoFrame::kYPlane));
EXPECT_EQ(last_frame->data(VideoFrame::kUPlane),
dupe_frame->data(VideoFrame::kUPlane));
EXPECT_EQ(last_frame->data(VideoFrame::kVPlane),
dupe_frame->data(VideoFrame::kVPlane));
-#endif
// This will release all frames held by the memory pool, but should not
// release |last_frame| since we still have a ref despite sharing the same
@@ -351,6 +344,6 @@ TEST_F(VpxVideoDecoderTest, MemoryPoolAllowsMultipleDisplay) {
memset(last_frame->data(VideoFrame::kYPlane), 0,
last_frame->row_bytes(VideoFrame::kYPlane));
}
-#endif // !defined(LIBVPX_NO_HIGH_BIT_DEPTH)
+#endif // !defined(LIBVPX_NO_HIGH_BIT_DEPTH) && !defined(ARCH_CPU_ARM_FAMILY)
} // namespace media
diff --git a/chromium/media/formats/common/stream_parser_test_base.cc b/chromium/media/formats/common/stream_parser_test_base.cc
index 42665927a88..24cc64cb877 100644
--- a/chromium/media/formats/common/stream_parser_test_base.cc
+++ b/chromium/media/formats/common/stream_parser_test_base.cc
@@ -37,13 +37,18 @@ StreamParserTestBase::StreamParserTestBase(
std::unique_ptr<StreamParser> stream_parser)
: parser_(std::move(stream_parser)) {
parser_->Init(
- base::Bind(&StreamParserTestBase::OnInitDone, base::Unretained(this)),
- base::Bind(&StreamParserTestBase::OnNewConfig, base::Unretained(this)),
- base::Bind(&StreamParserTestBase::OnNewBuffers, base::Unretained(this)),
+ base::BindOnce(&StreamParserTestBase::OnInitDone, base::Unretained(this)),
+ base::BindRepeating(&StreamParserTestBase::OnNewConfig,
+ base::Unretained(this)),
+ base::BindRepeating(&StreamParserTestBase::OnNewBuffers,
+ base::Unretained(this)),
true,
- base::Bind(&StreamParserTestBase::OnKeyNeeded, base::Unretained(this)),
- base::Bind(&StreamParserTestBase::OnNewSegment, base::Unretained(this)),
- base::Bind(&StreamParserTestBase::OnEndOfSegment, base::Unretained(this)),
+ base::BindRepeating(&StreamParserTestBase::OnKeyNeeded,
+ base::Unretained(this)),
+ base::BindRepeating(&StreamParserTestBase::OnNewSegment,
+ base::Unretained(this)),
+ base::BindRepeating(&StreamParserTestBase::OnEndOfSegment,
+ base::Unretained(this)),
&media_log_);
}
@@ -81,9 +86,7 @@ bool StreamParserTestBase::AppendDataInPieces(const uint8_t* data,
void StreamParserTestBase::OnInitDone(
const StreamParser::InitParameters& params) {
- EXPECT_TRUE(params.auto_update_timestamp_offset);
- DVLOG(1) << __func__ << "(" << params.duration.InMilliseconds() << ", "
- << params.auto_update_timestamp_offset << ")";
+ DVLOG(1) << __func__ << "(" << params.duration.InMilliseconds() << ")";
}
bool StreamParserTestBase::OnNewConfig(
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.cc b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
index 60ccf910e2d..b9c4b73344a 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
@@ -204,7 +204,7 @@ Mp2tStreamParser::~Mp2tStreamParser() {
}
void Mp2tStreamParser::Init(
- const InitCB& init_cb,
+ InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool /* ignore_text_tracks */,
@@ -221,7 +221,7 @@ void Mp2tStreamParser::Init(
DCHECK(!new_segment_cb.is_null());
DCHECK(!end_of_segment_cb.is_null());
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
config_cb_ = config_cb;
new_buffers_cb_ = new_buffers_cb;
encrypted_media_init_data_cb_ = encrypted_media_init_data_cb;
@@ -278,6 +278,10 @@ void Mp2tStreamParser::Flush() {
timestamp_unroller_.Reset();
}
+bool Mp2tStreamParser::GetGenerateTimestampsFlag() const {
+ return false;
+}
+
bool Mp2tStreamParser::Parse(const uint8_t* buf, int size) {
DVLOG(1) << "Mp2tStreamParser::Parse size=" << size;
@@ -699,7 +703,7 @@ bool Mp2tStreamParser::FinishInitializationIfNeeded() {
queue_with_config.audio_config.IsValidConfig() ? 1 : 0;
params.detected_video_track_count =
queue_with_config.video_config.IsValidConfig() ? 1 : 0;
- base::ResetAndReturn(&init_cb_).Run(params);
+ std::move(init_cb_).Run(params);
is_initialized_ = true;
return true;
@@ -899,17 +903,8 @@ void Mp2tStreamParser::RegisterNewKeyIdAndIv(const std::string& key_id,
decrypt_config_ = DecryptConfig::CreateCencConfig(key_id, iv, {});
break;
case EncryptionScheme::CIPHER_MODE_AES_CBC:
- // MP2 Transport Streams don't always specify the encryption pattern up
- // front. Instead it is determined later by the stream type. So if the
- // pattern is unknown, leave it out.
- EncryptionPattern pattern = initial_scheme_.pattern();
- if (pattern.IsInEffect()) {
- decrypt_config_ =
- DecryptConfig::CreateCbcsConfig(key_id, iv, {}, pattern);
- } else {
- decrypt_config_ =
- DecryptConfig::CreateCbcsConfig(key_id, iv, {}, base::nullopt);
- }
+ decrypt_config_ = DecryptConfig::CreateCbcsConfig(
+ key_id, iv, {}, initial_scheme_.pattern());
break;
}
}
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.h b/chromium/media/formats/mp2t/mp2t_stream_parser.h
index e46327b723c..994150cd781 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.h
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.h
@@ -40,7 +40,7 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
~Mp2tStreamParser() override;
// StreamParser implementation.
- void Init(const InitCB& init_cb,
+ void Init(InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -49,6 +49,7 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
const EndMediaSegmentCB& end_of_segment_cb,
MediaLog* media_log) override;
void Flush() override;
+ bool GetGenerateTimestampsFlag() const override;
bool Parse(const uint8_t* buf, int size) override;
private:
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
index a0ae6df897e..f3189e9e176 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
@@ -99,6 +99,7 @@ std::string DecryptSampleAES(const std::string& key,
while (bytes_remaining) {
int unused;
size_t amount_to_decrypt = has_pattern ? 16UL : bytes_remaining;
+ EXPECT_EQ(amount_to_decrypt % 16UL, 0UL);
EXPECT_EQ(EVP_CipherUpdate(ctx.get(), out_ptr, &unused, in_ptr,
amount_to_decrypt),
1);
@@ -106,7 +107,7 @@ std::string DecryptSampleAES(const std::string& key,
if (bytes_remaining) {
out_ptr += amount_to_decrypt;
in_ptr += amount_to_decrypt;
- size_t amount_to_skip = 144UL;
+ size_t amount_to_skip = 144UL; // Skip 9 blocks.
if (amount_to_skip > bytes_remaining)
amount_to_skip = bytes_remaining;
memcpy(out_ptr, in_ptr, amount_to_skip);
@@ -126,8 +127,12 @@ std::string DecryptBuffer(const StreamParserBuffer& buffer,
const EncryptionScheme& scheme) {
EXPECT_TRUE(scheme.is_encrypted());
EXPECT_TRUE(scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC);
- bool has_pattern = scheme.pattern().IsInEffect();
- EXPECT_TRUE(!has_pattern || scheme.pattern() == EncryptionPattern(1, 9));
+
+ // Audio streams use whole block full sample encryption (so pattern = {0,0}),
+ // so only the video stream uses pattern decryption. |has_pattern| is only
+ // used by DecryptSampleAES(), which assumes a {1,9} pattern if
+ // |has_pattern| = true.
+ bool has_pattern = scheme.pattern() == EncryptionPattern(1, 9);
std::string key;
EXPECT_TRUE(
@@ -225,8 +230,7 @@ class Mp2tStreamParserTest : public testing::Test {
}
void OnInit(const StreamParser::InitParameters& params) {
- DVLOG(1) << "OnInit: dur=" << params.duration.InMilliseconds()
- << ", autoTimestampOffset=" << params.auto_update_timestamp_offset;
+ DVLOG(1) << "OnInit: dur=" << params.duration.InMilliseconds();
}
bool OnNewConfig(std::unique_ptr<MediaTracks> tracks,
@@ -351,14 +355,18 @@ class Mp2tStreamParserTest : public testing::Test {
void InitializeParser() {
parser_->Init(
- base::Bind(&Mp2tStreamParserTest::OnInit, base::Unretained(this)),
- base::Bind(&Mp2tStreamParserTest::OnNewConfig, base::Unretained(this)),
- base::Bind(&Mp2tStreamParserTest::OnNewBuffers, base::Unretained(this)),
+ base::BindOnce(&Mp2tStreamParserTest::OnInit, base::Unretained(this)),
+ base::BindRepeating(&Mp2tStreamParserTest::OnNewConfig,
+ base::Unretained(this)),
+ base::BindRepeating(&Mp2tStreamParserTest::OnNewBuffers,
+ base::Unretained(this)),
true,
- base::Bind(&Mp2tStreamParserTest::OnKeyNeeded, base::Unretained(this)),
- base::Bind(&Mp2tStreamParserTest::OnNewSegment, base::Unretained(this)),
- base::Bind(&Mp2tStreamParserTest::OnEndOfSegment,
- base::Unretained(this)),
+ base::BindRepeating(&Mp2tStreamParserTest::OnKeyNeeded,
+ base::Unretained(this)),
+ base::BindRepeating(&Mp2tStreamParserTest::OnNewSegment,
+ base::Unretained(this)),
+ base::BindRepeating(&Mp2tStreamParserTest::OnEndOfSegment,
+ base::Unretained(this)),
&media_log_);
}
diff --git a/chromium/media/formats/mp4/avc.cc b/chromium/media/formats/mp4/avc.cc
index e9cb86c6b1d..509335b07e9 100644
--- a/chromium/media/formats/mp4/avc.cc
+++ b/chromium/media/formats/mp4/avc.cc
@@ -172,11 +172,6 @@ bool AVC::ConvertConfigToAnnexB(const AVCDecoderConfigurationRecord& avc_config,
}
// Verifies AnnexB NALU order according to ISO/IEC 14496-10 Section 7.4.1.2.3
-bool AVC::IsValidAnnexB(const std::vector<uint8_t>& buffer,
- const std::vector<SubsampleEntry>& subsamples) {
- return IsValidAnnexB(&buffer[0], buffer.size(), subsamples);
-}
-
bool AVC::IsValidAnnexB(const uint8_t* buffer,
size_t size,
const std::vector<SubsampleEntry>& subsamples) {
@@ -350,7 +345,7 @@ bool AVCBitstreamConverter::IsValid(
if (disable_validation_)
return true;
#endif // BUILDFLAG(ENABLE_DOLBY_VISION_DEMUXING)
- return AVC::IsValidAnnexB(*frame_buf, *subsamples);
+ return AVC::IsValidAnnexB(frame_buf->data(), frame_buf->size(), *subsamples);
}
} // namespace mp4
diff --git a/chromium/media/formats/mp4/avc.h b/chromium/media/formats/mp4/avc.h
index b9ce1c80230..655aa2f8653 100644
--- a/chromium/media/formats/mp4/avc.h
+++ b/chromium/media/formats/mp4/avc.h
@@ -49,10 +49,6 @@ class MEDIA_EXPORT AVC {
// |subsamples| contains the information about what parts of the buffer are
// encrypted and which parts are clear.
// Returns true if |buffer| contains conformant Annex B data
- // TODO(acolwell): Remove the std::vector version when we can use,
- // C++11's std::vector<T>::data() method.
- static bool IsValidAnnexB(const std::vector<uint8_t>& buffer,
- const std::vector<SubsampleEntry>& subsamples);
static bool IsValidAnnexB(const uint8_t* buffer,
size_t size,
const std::vector<SubsampleEntry>& subsamples);
diff --git a/chromium/media/formats/mp4/avc_unittest.cc b/chromium/media/formats/mp4/avc_unittest.cc
index 26c3fb3d125..0bca2a026fa 100644
--- a/chromium/media/formats/mp4/avc_unittest.cc
+++ b/chromium/media/formats/mp4/avc_unittest.cc
@@ -230,7 +230,7 @@ TEST_P(AVCConversionTest, ParseCorrectly) {
std::vector<SubsampleEntry> subsamples;
MakeInputForLength(GetParam(), &buf);
EXPECT_TRUE(AVC::ConvertFrameToAnnexB(GetParam(), &buf, &subsamples));
- EXPECT_TRUE(AVC::IsValidAnnexB(buf, subsamples));
+ EXPECT_TRUE(AVC::IsValidAnnexB(buf.data(), buf.size(), subsamples));
EXPECT_EQ(buf.size(), sizeof(kExpected));
EXPECT_EQ(0, memcmp(kExpected, &buf[0], sizeof(kExpected)));
EXPECT_EQ("P,SDC", AnnexBToString(buf, subsamples));
@@ -350,7 +350,7 @@ TEST_F(AVCConversionTest, StringConversionFunctions) {
std::vector<uint8_t> buf;
std::vector<SubsampleEntry> subsamples;
StringToAnnexB(str, &buf, &subsamples);
- EXPECT_TRUE(AVC::IsValidAnnexB(buf, subsamples));
+ EXPECT_TRUE(AVC::IsValidAnnexB(buf.data(), buf.size(), subsamples));
EXPECT_EQ(str, AnnexBToString(buf, subsamples));
}
@@ -380,8 +380,8 @@ TEST_F(AVCConversionTest, ValidAnnexBConstructs) {
std::vector<uint8_t> buf;
std::vector<SubsampleEntry> subsamples;
StringToAnnexB(test_cases[i], &buf, NULL);
- EXPECT_TRUE(AVC::IsValidAnnexB(buf, subsamples)) << "'" << test_cases[i]
- << "' failed";
+ EXPECT_TRUE(AVC::IsValidAnnexB(buf.data(), buf.size(), subsamples))
+ << "'" << test_cases[i] << "' failed";
}
}
@@ -405,8 +405,8 @@ TEST_F(AVCConversionTest, InvalidAnnexBConstructs) {
std::vector<uint8_t> buf;
std::vector<SubsampleEntry> subsamples;
StringToAnnexB(test_cases[i], &buf, NULL);
- EXPECT_FALSE(AVC::IsValidAnnexB(buf, subsamples)) << "'" << test_cases[i]
- << "' failed";
+ EXPECT_FALSE(AVC::IsValidAnnexB(buf.data(), buf.size(), subsamples))
+ << "'" << test_cases[i] << "' failed";
}
}
@@ -450,7 +450,7 @@ TEST_F(AVCConversionTest, InsertParamSetsAnnexB) {
EXPECT_TRUE(AVC::InsertParamSetsAnnexB(avc_config, &buf, &subsamples))
<< "'" << test_cases[i].input << "' insert failed.";
- EXPECT_TRUE(AVC::IsValidAnnexB(buf, subsamples))
+ EXPECT_TRUE(AVC::IsValidAnnexB(buf.data(), buf.size(), subsamples))
<< "'" << test_cases[i].input << "' created invalid AnnexB.";
EXPECT_EQ(test_cases[i].expected, AnnexBToString(buf, subsamples))
<< "'" << test_cases[i].input << "' generated unexpected output.";
diff --git a/chromium/media/formats/mp4/box_definitions.cc b/chromium/media/formats/mp4/box_definitions.cc
index 6def180d143..f3761045648 100644
--- a/chromium/media/formats/mp4/box_definitions.cc
+++ b/chromium/media/formats/mp4/box_definitions.cc
@@ -690,6 +690,81 @@ bool VPCodecConfigurationRecord::Parse(BoxReader* reader) {
return true;
}
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+AV1CodecConfigurationRecord::AV1CodecConfigurationRecord()
+ : profile(VIDEO_CODEC_PROFILE_UNKNOWN) {}
+
+AV1CodecConfigurationRecord::AV1CodecConfigurationRecord(
+ const AV1CodecConfigurationRecord& other) = default;
+
+AV1CodecConfigurationRecord::~AV1CodecConfigurationRecord() = default;
+
+FourCC AV1CodecConfigurationRecord::BoxType() const {
+ return FOURCC_AV1C;
+}
+
+// Parse the AV1CodecConfigurationRecord, which has the following format:
+// unsigned int (1) marker = 1;
+// unsigned int (7) version = 1;
+// unsigned int (3) seq_profile;
+// unsigned int (5) seq_level_idx_0;
+// unsigned int (1) seq_tier_0;
+// unsigned int (1) high_bitdepth;
+// unsigned int (1) twelve_bit;
+// unsigned int (1) monochrome;
+// unsigned int (1) chroma_subsampling_x;
+// unsigned int (1) chroma_subsampling_y;
+// unsigned int (2) chroma_sample_position;
+// unsigned int (3) reserved = 0;
+//
+// unsigned int (1) initial_presentation_delay_present;
+// if (initial_presentation_delay_present) {
+// unsigned int (4) initial_presentation_delay_minus_one;
+// } else {
+// unsigned int (4) reserved = 0;
+// }
+//
+// unsigned int (8)[] configOBUs;
+bool AV1CodecConfigurationRecord::Parse(BoxReader* reader) {
+ uint8_t av1c_byte = 0;
+ RCHECK(reader->Read1(&av1c_byte));
+ const uint8_t av1c_marker = av1c_byte >> 7;
+ if (!av1c_marker) {
+ MEDIA_LOG(ERROR, reader->media_log()) << "Unsupported av1C: marker unset.";
+ return false;
+ }
+
+ const uint8_t av1c_version = av1c_byte & 0b01111111;
+ if (av1c_version != 1) {
+ MEDIA_LOG(ERROR, reader->media_log())
+ << "Unsupported av1C: unexpected version number: " << av1c_version;
+ return false;
+ }
+
+ RCHECK(reader->Read1(&av1c_byte));
+ const uint8_t seq_profile = av1c_byte >> 5;
+ switch (seq_profile) {
+ case 0:
+ profile = AV1PROFILE_PROFILE_MAIN;
+ break;
+ case 1:
+ profile = AV1PROFILE_PROFILE_HIGH;
+ break;
+ case 2:
+ profile = AV1PROFILE_PROFILE_PRO;
+ break;
+ default:
+ MEDIA_LOG(ERROR, reader->media_log())
+ << "Unsupported av1C: unknown profile 0x" << std::hex << seq_profile;
+ return false;
+ }
+
+ // The remaining fields are ignored since we don't care about them yet.
+
+ return true;
+}
+#endif // BUILDFLAG(ENABLE_AV1_DECODER)
+
PixelAspectRatioBox::PixelAspectRatioBox() : h_spacing(1), v_spacing(1) {}
PixelAspectRatioBox::PixelAspectRatioBox(const PixelAspectRatioBox& other) =
default;
@@ -842,20 +917,19 @@ bool VideoSampleEntry::Parse(BoxReader* reader) {
#if BUILDFLAG(ENABLE_AV1_DECODER)
case FOURCC_AV01: {
DVLOG(2) << __func__ << " reading AV1 configuration.";
- // TODO(dalecurtis): AV1 profiles are not finalized, this needs updating
- // to read the actual profile and configuration before enabling for
- // release. http://crbug.com/784993
+ AV1CodecConfigurationRecord av1_config;
+ RCHECK(reader->ReadChild(&av1_config));
frame_bitstream_converter = nullptr;
video_codec = kCodecAV1;
- video_codec_profile = AV1PROFILE_PROFILE0;
+ video_codec_profile = av1_config.profile;
break;
}
#endif
default:
// Unknown/unsupported format
- MEDIA_LOG(ERROR, reader->media_log()) << __func__
- << " unsupported video format "
- << FourCCToString(actual_format);
+ MEDIA_LOG(ERROR, reader->media_log())
+ << "Unsupported VisualSampleEntry type "
+ << FourCCToString(actual_format);
return false;
}
diff --git a/chromium/media/formats/mp4/box_definitions.h b/chromium/media/formats/mp4/box_definitions.h
index 847c6b7e86b..d07ebbc8744 100644
--- a/chromium/media/formats/mp4/box_definitions.h
+++ b/chromium/media/formats/mp4/box_definitions.h
@@ -252,6 +252,14 @@ struct MEDIA_EXPORT VPCodecConfigurationRecord : Box {
VideoCodecProfile profile;
};
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+struct MEDIA_EXPORT AV1CodecConfigurationRecord : Box {
+ DECLARE_BOX_METHODS(AV1CodecConfigurationRecord);
+
+ VideoCodecProfile profile;
+};
+#endif
+
struct MEDIA_EXPORT PixelAspectRatioBox : Box {
DECLARE_BOX_METHODS(PixelAspectRatioBox);
diff --git a/chromium/media/formats/mp4/fourccs.h b/chromium/media/formats/mp4/fourccs.h
index 6d028ac788a..1b73d00c8d6 100644
--- a/chromium/media/formats/mp4/fourccs.h
+++ b/chromium/media/formats/mp4/fourccs.h
@@ -21,6 +21,7 @@ enum FourCC {
#endif
#if BUILDFLAG(ENABLE_AV1_DECODER)
FOURCC_AV01 = 0x61763031, // "av01"
+ FOURCC_AV1C = 0x61763143, // "av1C"
#endif
FOURCC_AVC1 = 0x61766331,
FOURCC_AVC3 = 0x61766333,
diff --git a/chromium/media/formats/mp4/hevc.cc b/chromium/media/formats/mp4/hevc.cc
index 275b02be7ee..ecf813d1a7a 100644
--- a/chromium/media/formats/mp4/hevc.cc
+++ b/chromium/media/formats/mp4/hevc.cc
@@ -141,10 +141,10 @@ bool HEVC::InsertParamSetsAnnexB(
const HEVCDecoderConfigurationRecord& hevc_config,
std::vector<uint8_t>* buffer,
std::vector<SubsampleEntry>* subsamples) {
- DCHECK(HEVC::IsValidAnnexB(*buffer, *subsamples));
+ DCHECK(HEVC::IsValidAnnexB(buffer->data(), buffer->size(), *subsamples));
std::unique_ptr<H265Parser> parser(new H265Parser());
- const uint8_t* start = &(*buffer)[0];
+ const uint8_t* start = buffer->data();
parser->SetEncryptedStream(start, buffer->size(), *subsamples);
H265NALU nalu;
@@ -179,7 +179,7 @@ bool HEVC::InsertParamSetsAnnexB(
buffer->insert(config_insert_point,
param_sets.begin(), param_sets.end());
- DCHECK(HEVC::IsValidAnnexB(*buffer, *subsamples));
+ DCHECK(HEVC::IsValidAnnexB(buffer->data(), buffer->size(), *subsamples));
return true;
}
@@ -205,11 +205,6 @@ bool HEVC::ConvertConfigToAnnexB(
}
// Verifies AnnexB NALU order according to section 7.4.2.4.4 of ISO/IEC 23008-2.
-bool HEVC::IsValidAnnexB(const std::vector<uint8_t>& buffer,
- const std::vector<SubsampleEntry>& subsamples) {
- return IsValidAnnexB(&buffer[0], buffer.size(), subsamples);
-}
-
bool HEVC::IsValidAnnexB(const uint8_t* buffer,
size_t size,
const std::vector<SubsampleEntry>& subsamples) {
@@ -251,7 +246,7 @@ bool HEVCBitstreamConverter::ConvertFrame(
bool HEVCBitstreamConverter::IsValid(
std::vector<uint8_t>* frame_buf,
std::vector<SubsampleEntry>* subsamples) const {
- return HEVC::IsValidAnnexB(*frame_buf, *subsamples);
+ return HEVC::IsValidAnnexB(frame_buf->data(), frame_buf->size(), *subsamples);
}
} // namespace mp4
diff --git a/chromium/media/formats/mp4/hevc.h b/chromium/media/formats/mp4/hevc.h
index 0d489570542..f3576f9b9b9 100644
--- a/chromium/media/formats/mp4/hevc.h
+++ b/chromium/media/formats/mp4/hevc.h
@@ -83,10 +83,6 @@ class MEDIA_EXPORT HEVC {
// |subsamples| contains the information about what parts of the buffer are
// encrypted and which parts are clear.
// Returns true if |buffer| contains conformant Annex B data
- // TODO(servolk): Remove the std::vector version when we can use,
- // C++11's std::vector<T>::data() method.
- static bool IsValidAnnexB(const std::vector<uint8_t>& buffer,
- const std::vector<SubsampleEntry>& subsamples);
static bool IsValidAnnexB(const uint8_t* buffer,
size_t size,
const std::vector<SubsampleEntry>& subsamples);
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.cc b/chromium/media/formats/mp4/mp4_stream_parser.cc
index ee14479e26a..dc7645a0b99 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser.cc
@@ -96,7 +96,7 @@ MP4StreamParser::MP4StreamParser(const std::set<int>& audio_object_types,
MP4StreamParser::~MP4StreamParser() = default;
void MP4StreamParser::Init(
- const InitCB& init_cb,
+ InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool /* ignore_text_tracks */,
@@ -114,7 +114,7 @@ void MP4StreamParser::Init(
DCHECK(!end_of_segment_cb.is_null());
ChangeState(kParsingBoxes);
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
config_cb_ = config_cb;
new_buffers_cb_ = new_buffers_cb;
encrypted_media_init_data_cb_ = encrypted_media_init_data_cb;
@@ -136,6 +136,10 @@ void MP4StreamParser::Flush() {
ChangeState(kParsingBoxes);
}
+bool MP4StreamParser::GetGenerateTimestampsFlag() const {
+ return false;
+}
+
bool MP4StreamParser::Parse(const uint8_t* buf, int size) {
DCHECK_NE(state_, kWaitingForInit);
@@ -628,7 +632,7 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
params.detected_audio_track_count = detected_audio_track_count;
params.detected_video_track_count = detected_video_track_count;
params.detected_text_track_count = detected_text_track_count;
- base::ResetAndReturn(&init_cb_).Run(params);
+ std::move(init_cb_).Run(params);
}
return true;
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.h b/chromium/media/formats/mp4/mp4_stream_parser.h
index 585a789e807..45ee6f9c6e9 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.h
+++ b/chromium/media/formats/mp4/mp4_stream_parser.h
@@ -39,7 +39,7 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
bool has_flac);
~MP4StreamParser() override;
- void Init(const InitCB& init_cb,
+ void Init(InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -48,6 +48,7 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
const EndMediaSegmentCB& end_of_segment_cb,
MediaLog* media_log) override;
void Flush() override;
+ bool GetGenerateTimestampsFlag() const override;
bool Parse(const uint8_t* buf, int size) override;
// Calculates the rotation value from the track header display matricies.
diff --git a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
index dac384d7c87..5ce3fd89512 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -96,12 +96,9 @@ class MP4StreamParserTest : public testing::Test {
void InitF(const StreamParser::InitParameters& expected_params,
const StreamParser::InitParameters& params) {
- DVLOG(1) << "InitF: dur=" << params.duration.InMicroseconds()
- << ", autoTimestampOffset=" << params.auto_update_timestamp_offset;
+ DVLOG(1) << "InitF: dur=" << params.duration.InMicroseconds();
EXPECT_EQ(expected_params.duration, params.duration);
EXPECT_EQ(expected_params.timeline_offset, params.timeline_offset);
- EXPECT_EQ(expected_params.auto_update_timestamp_offset,
- params.auto_update_timestamp_offset);
EXPECT_EQ(expected_params.liveness, params.liveness);
EXPECT_EQ(expected_params.detected_audio_track_count,
params.detected_audio_track_count);
@@ -188,15 +185,20 @@ class MP4StreamParserTest : public testing::Test {
void InitializeParserWithInitParametersExpectations(
StreamParser::InitParameters params) {
- parser_->Init(
- base::Bind(&MP4StreamParserTest::InitF, base::Unretained(this), params),
- base::Bind(&MP4StreamParserTest::NewConfigF, base::Unretained(this)),
- base::Bind(&MP4StreamParserTest::NewBuffersF, base::Unretained(this)),
- true,
- base::Bind(&MP4StreamParserTest::KeyNeededF, base::Unretained(this)),
- base::Bind(&MP4StreamParserTest::NewSegmentF, base::Unretained(this)),
- base::Bind(&MP4StreamParserTest::EndOfSegmentF, base::Unretained(this)),
- &media_log_);
+ parser_->Init(base::BindOnce(&MP4StreamParserTest::InitF,
+ base::Unretained(this), params),
+ base::BindRepeating(&MP4StreamParserTest::NewConfigF,
+ base::Unretained(this)),
+ base::BindRepeating(&MP4StreamParserTest::NewBuffersF,
+ base::Unretained(this)),
+ true,
+ base::BindRepeating(&MP4StreamParserTest::KeyNeededF,
+ base::Unretained(this)),
+ base::BindRepeating(&MP4StreamParserTest::NewSegmentF,
+ base::Unretained(this)),
+ base::BindRepeating(&MP4StreamParserTest::EndOfSegmentF,
+ base::Unretained(this)),
+ &media_log_);
}
StreamParser::InitParameters GetDefaultInitParametersExpectations() {
@@ -348,7 +350,7 @@ TEST_F(MP4StreamParserTest, HEVC_in_MP4_container) {
bool expect_success = true;
#else
bool expect_success = false;
- EXPECT_MEDIA_LOG(ErrorLog("Parse unsupported video format hev1"));
+ EXPECT_MEDIA_LOG(ErrorLog("Unsupported VisualSampleEntry type hev1"));
#endif
auto params = GetDefaultInitParametersExpectations();
params.duration = base::TimeDelta::FromMicroseconds(1002000);
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index 8717ac299f6..5d791040d0a 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -62,7 +62,7 @@ MPEGAudioStreamParserBase::MPEGAudioStreamParserBase(uint32_t start_code_mask,
MPEGAudioStreamParserBase::~MPEGAudioStreamParserBase() = default;
void MPEGAudioStreamParserBase::Init(
- const InitCB& init_cb,
+ InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -72,7 +72,7 @@ void MPEGAudioStreamParserBase::Init(
MediaLog* media_log) {
DVLOG(1) << __func__;
DCHECK_EQ(state_, UNINITIALIZED);
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
config_cb_ = config_cb;
new_buffers_cb_ = new_buffers_cb;
new_segment_cb_ = new_segment_cb;
@@ -91,6 +91,10 @@ void MPEGAudioStreamParserBase::Flush() {
in_media_segment_ = false;
}
+bool MPEGAudioStreamParserBase::GetGenerateTimestampsFlag() const {
+ return true;
+}
+
bool MPEGAudioStreamParserBase::Parse(const uint8_t* buf, int size) {
DVLOG(1) << __func__ << "(" << size << ")";
DCHECK(buf);
@@ -229,8 +233,7 @@ int MPEGAudioStreamParserBase::ParseFrame(const uint8_t* data,
if (!init_cb_.is_null()) {
InitParameters params(kInfiniteDuration);
params.detected_audio_track_count = 1;
- params.auto_update_timestamp_offset = true;
- base::ResetAndReturn(&init_cb_).Run(params);
+ std::move(init_cb_).Run(params);
}
}
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
index f0712380ef4..42495dcc059 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
@@ -34,7 +34,7 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
~MPEGAudioStreamParserBase() override;
// StreamParser implementation.
- void Init(const InitCB& init_cb,
+ void Init(InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -43,6 +43,7 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
const EndMediaSegmentCB& end_of_segment_cb,
MediaLog* media_log) override;
void Flush() override;
+ bool GetGenerateTimestampsFlag() const override;
bool Parse(const uint8_t* buf, int size) override;
protected:
diff --git a/chromium/media/formats/webm/webm_cluster_parser.cc b/chromium/media/formats/webm/webm_cluster_parser.cc
index 1e2bb5a8e63..3b5ba5b7bef 100644
--- a/chromium/media/formats/webm/webm_cluster_parser.cc
+++ b/chromium/media/formats/webm/webm_cluster_parser.cc
@@ -629,7 +629,6 @@ WebMClusterParser::Track::Track(int track_num,
: track_num_(track_num),
track_type_(track_type),
default_duration_(default_duration),
- min_frame_duration_(kNoTimestamp),
max_frame_duration_(kNoTimestamp),
media_log_(media_log) {
DCHECK(default_duration_ == kNoTimestamp ||
@@ -718,19 +717,8 @@ void WebMClusterParser::Track::ApplyDurationEstimateIfNeeded() {
if (!last_added_buffer_missing_duration_)
return;
- bool constant_duration_estimate = false;
- last_added_buffer_missing_duration_->set_duration(
- GetDurationEstimate(&constant_duration_estimate));
-
- // Signal to downstream to make informed decisions about buffer adjacency
- // and splicing.
- if (constant_duration_estimate) {
- last_added_buffer_missing_duration_->set_duration_type(
- DurationType::kConstantEstimate);
- } else {
- last_added_buffer_missing_duration_->set_duration_type(
- DurationType::kRoughEstimate);
- }
+ last_added_buffer_missing_duration_->set_duration(GetDurationEstimate());
+ last_added_buffer_missing_duration_->set_is_duration_estimated(true);
LIMITED_MEDIA_LOG(INFO, media_log_, num_duration_estimates_,
kMaxDurationEstimateLogs)
@@ -784,22 +772,14 @@ bool WebMClusterParser::Track::QueueBuffer(
}
if (duration > base::TimeDelta()) {
- base::TimeDelta orig_min_duration = min_frame_duration_;
base::TimeDelta orig_max_duration = max_frame_duration_;
- if (min_frame_duration_ == kNoTimestamp) {
- DCHECK_EQ(max_frame_duration_, kNoTimestamp);
- min_frame_duration_ = max_frame_duration_ = duration;
+ if (max_frame_duration_ == kNoTimestamp) {
+ max_frame_duration_ = duration;
} else {
- min_frame_duration_ = std::min(min_frame_duration_, duration);
max_frame_duration_ = std::max(max_frame_duration_, duration);
}
- if (min_frame_duration_ != orig_min_duration) {
- DVLOG(3) << "Updated min duration estimate:" << orig_min_duration
- << " -> " << min_frame_duration_ << " at timestamp: "
- << buffer->GetDecodeTimestamp().InSecondsF();
- }
if (max_frame_duration_ != orig_max_duration) {
DVLOG(3) << "Updated max duration estimate:" << orig_max_duration
<< " -> " << max_frame_duration_ << " at timestamp: "
@@ -811,13 +791,10 @@ bool WebMClusterParser::Track::QueueBuffer(
return true;
}
-base::TimeDelta WebMClusterParser::Track::GetDurationEstimate(
- bool* constant_duration_estimate) {
- *constant_duration_estimate = false;
+base::TimeDelta WebMClusterParser::Track::GetDurationEstimate() {
base::TimeDelta duration;
- if (min_frame_duration_ == kNoTimestamp) {
- DCHECK_EQ(max_frame_duration_, kNoTimestamp);
+ if (max_frame_duration_ == kNoTimestamp) {
DVLOG(3) << __func__ << " : using hardcoded default duration";
if (track_type_ == TrackType::AUDIO) {
duration =
@@ -828,18 +805,10 @@ base::TimeDelta WebMClusterParser::Track::GetDurationEstimate(
base::TimeDelta::FromMilliseconds(kDefaultVideoBufferDurationInMs);
}
} else {
- *constant_duration_estimate = min_frame_duration_ == max_frame_duration_;
-
- if (track_type_ == TrackType::AUDIO) {
- // Audio uses min to avoid overtriggering splice trimming logic. See
- // http://crbug.com/396634
- duration = min_frame_duration_;
- } else {
- // Both kText and kVideo types safely use max because these formats don't
- // undergo trimming analagous to the audio splicing (no risk of over
- // trimming nor av sync loss).
- duration = max_frame_duration_;
- }
+ // Use max duration to minimize the risk of introducing gaps in the buffered
+ // range. For audio, this is still safe because overlap trimming is not
+ // applied to buffers where is_duration_estimated() = true.
+ duration = max_frame_duration_;
}
DCHECK(duration > base::TimeDelta());
diff --git a/chromium/media/formats/webm/webm_cluster_parser.h b/chromium/media/formats/webm/webm_cluster_parser.h
index 84d177ba793..ffc5ae251ae 100644
--- a/chromium/media/formats/webm/webm_cluster_parser.h
+++ b/chromium/media/formats/webm/webm_cluster_parser.h
@@ -108,9 +108,8 @@ class MEDIA_EXPORT WebMClusterParser : public WebMParserClient {
bool QueueBuffer(scoped_refptr<StreamParserBuffer> buffer);
// Helper that calculates the buffer duration to use in
- // ApplyDurationEstimateIfNeeded(). |constant_duration| will be set true
- // when track is so far comprised of all same-duration packets.
- base::TimeDelta GetDurationEstimate(bool* constant_duration);
+ // ApplyDurationEstimateIfNeeded().
+ base::TimeDelta GetDurationEstimate();
// Counts the number of estimated durations used in this track. Used to
// prevent log spam for MEDIA_LOG()s about estimated duration.
@@ -137,14 +136,10 @@ class MEDIA_EXPORT WebMClusterParser : public WebMParserClient {
// If kNoTimestamp, then |{min|max}_frame_duration_| will be used.
base::TimeDelta default_duration_;
- // Tracks the min/max durations seen for this track. Used to estimate block
- // durations at the end of clusters. Video uses maximum to minimize chance
- // of introudcing discontinuities. Audio uses minimum to minimize chance of
- // overtriggering splice logic, which may lead to AV sync loss. Keeping both
- // min and max allows us to detect cases where min==max, which gives high
- // enough confidence in estimate to potentially allow splicing. Research
- // is ongoing. See http://crbug.com/396634.
- base::TimeDelta min_frame_duration_;
+ // Tracks the max duration seen for this track. Used to estimate block
+ // durations at the end of clusters. Max minimizes the chance of introducing
+ // gaps in the buffered range and is safe for audio because we don't splice
+ // on estimated durations. See http://crbug.com/396634.
base::TimeDelta max_frame_duration_;
MediaLog* media_log_;
diff --git a/chromium/media/formats/webm/webm_cluster_parser_unittest.cc b/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
index b5d0b8c61a4..b605117e358 100644
--- a/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_cluster_parser_unittest.cc
@@ -830,11 +830,9 @@ TEST_F(WebMClusterParserTest, ParseWithoutAnyDurationsSimpleBlocks) {
// Absent DefaultDuration information, SimpleBlock durations are derived from
// inter-buffer track timestamp delta if within the cluster. Duration for the
// last block in a cluster is estimated independently for each track in the
- // cluster. For video tracks we use the maximum seen so far. For audio we use
- // the the minimum.
- // TODO(chcunningham): Move audio over to use the maximum.
+ // cluster using the maximum seen so far.
- const int kExpectedAudioEstimationInMs = 22;
+ const int kExpectedAudioEstimationInMs = 23;
const int kExpectedVideoEstimationInMs = 34;
const BlockInfo kBlockInfo1[] = {
{kAudioTrackNum, 0, 23, true, NULL, 0, false},
@@ -902,11 +900,9 @@ TEST_F(WebMClusterParserTest, ParseWithoutAnyDurationsBlockGroups) {
// Absent DefaultDuration and BlockDuration information, BlockGroup block
// durations are derived from inter-buffer track timestamp delta if within the
// cluster. Duration for the last block in a cluster is estimated
- // independently for each track in the cluster. For video tracks we use the
- // maximum seen so far. For audio we use the the minimum.
- // TODO(chcunningham): Move audio over to use the maximum.
+ // independently for each track in the cluster using the maximum seen so far.
- const int kExpectedAudioEstimationInMs = 22;
+ const int kExpectedAudioEstimationInMs = 23;
const int kExpectedVideoEstimationInMs = 34;
const BlockInfo kBlockInfo1[] = {
{kAudioTrackNum, 0, -23, false, NULL, 0, false},
diff --git a/chromium/media/formats/webm/webm_stream_parser.cc b/chromium/media/formats/webm/webm_stream_parser.cc
index e398713f44c..ee899fe8e91 100644
--- a/chromium/media/formats/webm/webm_stream_parser.cc
+++ b/chromium/media/formats/webm/webm_stream_parser.cc
@@ -30,7 +30,7 @@ WebMStreamParser::WebMStreamParser()
WebMStreamParser::~WebMStreamParser() = default;
void WebMStreamParser::Init(
- const InitCB& init_cb,
+ InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -48,7 +48,7 @@ void WebMStreamParser::Init(
DCHECK(!end_of_segment_cb.is_null());
ChangeState(kParsingHeaders);
- init_cb_ = init_cb;
+ init_cb_ = std::move(init_cb);
config_cb_ = config_cb;
new_buffers_cb_ = new_buffers_cb;
ignore_text_tracks_ = ignore_text_tracks;
@@ -68,6 +68,10 @@ void WebMStreamParser::Flush() {
ChangeState(kParsingHeaders);
}
+bool WebMStreamParser::GetGenerateTimestampsFlag() const {
+ return false;
+}
+
bool WebMStreamParser::Parse(const uint8_t* buf, int size) {
DCHECK_NE(state_, kWaitingForInit);
@@ -249,7 +253,7 @@ int WebMStreamParser::ParseInfoAndTracks(const uint8_t* data, int size) {
tracks_parser.detected_video_track_count();
params.detected_text_track_count =
tracks_parser.detected_text_track_count();
- base::ResetAndReturn(&init_cb_).Run(params);
+ std::move(init_cb_).Run(params);
}
return bytes_parsed;
diff --git a/chromium/media/formats/webm/webm_stream_parser.h b/chromium/media/formats/webm/webm_stream_parser.h
index bc79c605326..dc0b9167a74 100644
--- a/chromium/media/formats/webm/webm_stream_parser.h
+++ b/chromium/media/formats/webm/webm_stream_parser.h
@@ -28,7 +28,7 @@ class MEDIA_EXPORT WebMStreamParser : public StreamParser {
~WebMStreamParser() override;
// StreamParser implementation.
- void Init(const InitCB& init_cb,
+ void Init(InitCB init_cb,
const NewConfigCB& config_cb,
const NewBuffersCB& new_buffers_cb,
bool ignore_text_tracks,
@@ -37,6 +37,7 @@ class MEDIA_EXPORT WebMStreamParser : public StreamParser {
const EndMediaSegmentCB& end_of_segment_cb,
MediaLog* media_log) override;
void Flush() override;
+ bool GetGenerateTimestampsFlag() const override;
bool Parse(const uint8_t* buf, int size) override;
private:
diff --git a/chromium/media/formats/webm/webm_stream_parser_unittest.cc b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
index cfc3d6fc66c..c6180d6273c 100644
--- a/chromium/media/formats/webm/webm_stream_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
@@ -41,18 +41,19 @@ class WebMStreamParserTest : public testing::Test {
EXPECT_CALL(*this, NewBuffersCB(_))
.Times(testing::AnyNumber())
.WillRepeatedly(testing::Return(true));
- parser_->Init(
- base::Bind(&WebMStreamParserTest::InitF, base::Unretained(this),
- expected_params),
- base::Bind(&WebMStreamParserTest::NewConfigCB, base::Unretained(this)),
- base::Bind(&WebMStreamParserTest::NewBuffersCB, base::Unretained(this)),
- false, // don't ignore_text_track
- encrypted_media_init_data_cb,
- base::Bind(&WebMStreamParserTest::NewMediaSegmentCB,
- base::Unretained(this)),
- base::Bind(&WebMStreamParserTest::EndMediaSegmentCB,
- base::Unretained(this)),
- &media_log_);
+ parser_->Init(base::BindOnce(&WebMStreamParserTest::InitF,
+ base::Unretained(this), expected_params),
+ base::BindRepeating(&WebMStreamParserTest::NewConfigCB,
+ base::Unretained(this)),
+ base::BindRepeating(&WebMStreamParserTest::NewBuffersCB,
+ base::Unretained(this)),
+ false, // don't ignore_text_track
+ encrypted_media_init_data_cb,
+ base::BindRepeating(&WebMStreamParserTest::NewMediaSegmentCB,
+ base::Unretained(this)),
+ base::BindRepeating(&WebMStreamParserTest::EndMediaSegmentCB,
+ base::Unretained(this)),
+ &media_log_);
bool result = parser_->Parse(buffer->data(), buffer->data_size());
EXPECT_TRUE(result);
}
diff --git a/chromium/media/formats/webm/webm_video_client.cc b/chromium/media/formats/webm/webm_video_client.cc
index 61bc28ed979..813a0863eef 100644
--- a/chromium/media/formats/webm/webm_video_client.cc
+++ b/chromium/media/formats/webm/webm_video_client.cc
@@ -66,11 +66,11 @@ bool WebMVideoClient::InitializeConfig(
profile = GetVP9CodecProfile(codec_private);
#if BUILDFLAG(ENABLE_AV1_DECODER)
} else if (codec_id == "V_AV1") {
- // TODO(dalecurtis): AV1 profiles are not finalized, this needs updating
- // to read the actual profile and configuration before enabling for
+ // TODO(dalecurtis): AV1 profiles in WebM are not finalized, this needs
+ // updating to read the actual profile and configuration before enabling for
// release. http://crbug.com/784993
video_codec = kCodecAV1;
- profile = AV1PROFILE_PROFILE0;
+ profile = AV1PROFILE_PROFILE_MAIN;
#endif
} else {
MEDIA_LOG(ERROR, media_log_) << "Unsupported video codec_id " << codec_id;
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index 28436882c29..1e56dc25830 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -105,6 +105,7 @@ component("gpu") {
"gpu_video_decode_accelerator_factory.h",
"gpu_video_encode_accelerator_factory.cc",
"gpu_video_encode_accelerator_factory.h",
+ "image_processor.h",
]
public_deps = [
@@ -148,9 +149,6 @@ component("gpu") {
if (is_android) {
sources += [
- "android/android_image_reader_abi.h",
- "android/android_image_reader_compat.cc",
- "android/android_image_reader_compat.h",
"android/android_video_decode_accelerator.cc",
"android/android_video_decode_accelerator.h",
"android/android_video_encode_accelerator.cc",
@@ -175,12 +173,10 @@ component("gpu") {
"android/codec_image_group.h",
"android/codec_wrapper.cc",
"android/codec_wrapper.h",
- "android/content_video_view_overlay.cc",
- "android/content_video_view_overlay.h",
- "android/content_video_view_overlay_allocator.cc",
- "android/content_video_view_overlay_allocator.h",
"android/device_info.cc",
"android/device_info.h",
+ "android/image_reader_gl_owner.cc",
+ "android/image_reader_gl_owner.h",
"android/media_codec_video_decoder.cc",
"android/media_codec_video_decoder.h",
"android/promotion_hint_aggregator.h",
@@ -194,17 +190,17 @@ component("gpu") {
"android/texture_owner.h",
"android/texture_pool.cc",
"android/texture_pool.h",
- "android/texture_wrapper.cc",
- "android/texture_wrapper.h",
"android/video_frame_factory.h",
"android/video_frame_factory_impl.cc",
"android/video_frame_factory_impl.h",
]
+ libs += [ "android" ]
deps += [
# TODO(crbug.com/789435): This can be removed once CdmManager is removed.
"//media/mojo:buildflags",
"//services/service_manager/public/cpp:cpp",
"//third_party/libyuv",
+ "//ui/gl:gl_jni_headers",
]
# TODO(crbug.com/789435): This is needed for AVDA to access the CDM
@@ -259,6 +255,7 @@ component("gpu") {
if (is_win) {
sources += [
+ "windows/d3d11_create_device_cb.h",
"windows/d3d11_h264_accelerator.cc",
"windows/d3d11_h264_accelerator.h",
"windows/d3d11_picture_buffer.cc",
@@ -326,8 +323,6 @@ source_set("common") {
"h264_decoder.h",
"h264_dpb.cc",
"h264_dpb.h",
- "shared_memory_region.cc",
- "shared_memory_region.h",
]
if (use_v4l2_codec || use_vaapi) {
sources += [
@@ -392,6 +387,7 @@ if (is_win || is_android || use_v4l2_codec || use_vaapi) {
"video_decode_accelerator_unittest.cc",
]
deps += [
+ "//mojo/core/embedder",
"//ui/display",
"//ui/display/types",
"//ui/platform_window",
@@ -431,16 +427,15 @@ source_set("android_video_decode_accelerator_unittests") {
if (is_android) {
testonly = true
sources = [
- "android/android_image_reader_compat_unittest.cc",
"android/android_video_decode_accelerator_unittest.cc",
"android/android_video_surface_chooser_impl_unittest.cc",
"android/avda_codec_allocator_unittest.cc",
"android/codec_image_group_unittest.cc",
"android/codec_image_unittest.cc",
"android/codec_wrapper_unittest.cc",
- "android/content_video_view_overlay_allocator_unittest.cc",
"android/fake_codec_allocator.cc",
"android/fake_codec_allocator.h",
+ "android/image_reader_gl_owner_unittest.cc",
"android/media_codec_video_decoder_unittest.cc",
"android/mock_android_video_surface_chooser.cc",
"android/mock_android_video_surface_chooser.h",
@@ -552,6 +547,12 @@ if (use_v4l2_codec || use_vaapi) {
"jpeg_decode_accelerator_unittest.cc",
"test/video_accelerator_unittest_helpers.h",
]
+ data = [
+ "//media/test/data/peach_pi-1280x720.jpg",
+ "//media/test/data/peach_pi-40x23.jpg",
+ "//media/test/data/peach_pi-41x22.jpg",
+ "//media/test/data/peach_pi-41x23.jpg",
+ ]
if (use_x11) {
deps += [ "//ui/gfx/x" ]
}
@@ -596,9 +597,13 @@ source_set("unit_tests") {
if (use_vaapi) {
deps += [ "//media/gpu/vaapi:unit_test" ]
}
+ if (use_v4l2_codec || use_vaapi) {
+ sources += [ "vp8_decoder_unittest.cc" ]
+ }
if (is_win && enable_library_cdms) {
sources += [
"windows/d3d11_cdm_proxy_unittest.cc",
+ "windows/d3d11_decryptor_unittest.cc",
"windows/d3d11_mocks.cc",
"windows/d3d11_mocks.h",
"windows/d3d11_video_decoder_unittest.cc",
diff --git a/chromium/media/gpu/DEPS b/chromium/media/gpu/DEPS
index 41a66a57415..0a84e8474b5 100644
--- a/chromium/media/gpu/DEPS
+++ b/chromium/media/gpu/DEPS
@@ -1,5 +1,6 @@
# Do NOT add net/ or ui/base without a great reason, they're huge!
include_rules = [
+ "+mojo/core/embedder",
"+services/service_manager/public",
"+third_party/angle",
"+third_party/libyuv",
diff --git a/chromium/media/gpu/OWNERS b/chromium/media/gpu/OWNERS
index 2581cf75d0e..0a32d675a73 100644
--- a/chromium/media/gpu/OWNERS
+++ b/chromium/media/gpu/OWNERS
@@ -9,3 +9,6 @@ wuchengli@chromium.org
# For Android media gpu files.
per-file *android*=liberato@chromium.org
per-file *avda*=liberato@chromium.org
+
+# For Mac encoder files.
+per-file *vt_video_encode*=emircan@chromium.org
diff --git a/chromium/media/gpu/accelerated_video_decoder.h b/chromium/media/gpu/accelerated_video_decoder.h
index 67800c501ab..ee9dc93ee26 100644
--- a/chromium/media/gpu/accelerated_video_decoder.h
+++ b/chromium/media/gpu/accelerated_video_decoder.h
@@ -55,8 +55,10 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
kRanOutOfSurfaces, // Waiting for the client to free up output surfaces.
kNeedContextUpdate, // Waiting for the client to update decoding context
// with data acquired from the accelerator.
- kNoKey, // The buffer is encrypted and could not be processed because the
- // key for decryption is missing.
+ kTryAgain, // The accelerator needs additional data (independently
+ // provided) in order to proceed. This may be a new key in order to decrypt
+ // encrypted data, or existing hardware resources freed so that they can be
+ // reused. Decoding can resume once the data has been provided.
};
// Try to decode more of the stream, returning decoded frames asynchronously.
@@ -70,6 +72,14 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
virtual gfx::Size GetPicSize() const = 0;
virtual size_t GetRequiredNumOfPictures() const = 0;
+ // About 3 secs for 30 fps video. When the new sized keyframe is missed, the
+ // decoder cannot decode the frame. The number of frames are skipped until
+ // getting new keyframe. If dropping more than the number of frames, the
+ // decoder reports decode error, which may take longer time to recover it.
+ // The number is the sweet spot which the decoder can tolerate to handle the
+ // missing keyframe by itself. In addition, this situation is exceptional.
+ static constexpr size_t kVPxMaxNumOfSizeChangeFailures = 75;
+
private:
DISALLOW_COPY_AND_ASSIGN(AcceleratedVideoDecoder);
};
diff --git a/chromium/media/gpu/android/android_image_reader_abi.h b/chromium/media/gpu/android/android_image_reader_abi.h
deleted file mode 100644
index 02e1d3871eb..00000000000
--- a/chromium/media/gpu/android/android_image_reader_abi.h
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
-#define MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
-
-// Minimal binary interface definitions for AImage,AImageReader
-// and ANativeWindow based on include/media/NdkImage.h,
-// include/media/NdkImageReader.h and include/android/native_window_jni.h
-// from the Android NDK for platform level 26+. This is only
-// intended for use from the AndroidImageReader wrapper for building
-// without NDK platform level support, it is not a general-use header
-// and is not complete. Only the functions/data types which
-// are currently needed by media/gpu/android/image_reader_gl_owner.h are
-// included in this ABI
-//
-// Please refer to the API documentation for details:
-// https://developer.android.com/ndk/reference/group/media (AIMage and
-// AImageReader)
-// https://developer.android.com/ndk/reference/group/native-activity
-// (ANativeWindow)
-
-#include <android/native_window.h>
-#include <media/NdkMediaError.h>
-
-#include <jni.h>
-#include <stdint.h>
-
-// Use "C" linkage to match the original header file. This isn't strictly
-// required since the file is not declaring global functions, but the types
-// should remain in the global namespace for compatibility, and it's a reminder
-// that forward declarations elsewhere should use "extern "C" to avoid
-// namespace issues.
-extern "C" {
-
-// For AImage
-typedef struct AHardwareBuffer AHardwareBuffer;
-
-typedef struct AImage AImage;
-
-enum AIMAGE_FORMATS {
- AIMAGE_FORMAT_YUV_420_888 = 0x23,
- IMAGE_FORMAT_PRIVATE = 0x22
-};
-
-using pAImage_delete = void (*)(AImage* image);
-
-using pAImage_getHardwareBuffer = media_status_t (*)(const AImage* image,
- AHardwareBuffer** buffer);
-
-using pAImage_getWidth = media_status_t (*)(const AImage* image,
- int32_t* width);
-
-using pAImage_getHeight = media_status_t (*)(const AImage* image,
- int32_t* height);
-
-// For AImageReader
-
-typedef struct AImageReader AImageReader;
-
-typedef void (*AImageReader_ImageCallback)(void* context, AImageReader* reader);
-
-typedef struct AImageReader_ImageListener {
- void* context;
- AImageReader_ImageCallback onImageAvailable;
-} AImageReader_ImageListener;
-
-using pAImageReader_new = media_status_t (*)(int32_t width,
- int32_t height,
- int32_t format,
- int32_t maxImages,
- AImageReader** reader);
-
-using pAImageReader_setImageListener =
- media_status_t (*)(AImageReader* reader,
- AImageReader_ImageListener* listener);
-
-using pAImageReader_delete = void (*)(AImageReader* reader);
-
-using pAImageReader_getWindow = media_status_t (*)(AImageReader* reader,
- ANativeWindow** window);
-
-using pAImageReader_acquireLatestImageAsync =
- media_status_t (*)(AImageReader* reader,
- AImage** image,
- int* acquireFenceFd);
-
-// For ANativeWindow
-using pANativeWindow_toSurface = jobject (*)(JNIEnv* env,
- ANativeWindow* window);
-
-} // extern "C"
-
-#endif // MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
diff --git a/chromium/media/gpu/android/android_image_reader_compat.cc b/chromium/media/gpu/android/android_image_reader_compat.cc
deleted file mode 100644
index 19230ed84db..00000000000
--- a/chromium/media/gpu/android/android_image_reader_compat.cc
+++ /dev/null
@@ -1,138 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/android_image_reader_compat.h"
-
-#include <dlfcn.h>
-
-#include "base/android/build_info.h"
-#include "base/feature_list.h"
-#include "base/logging.h"
-#include "media/base/media_switches.h"
-
-#define LOAD_FUNCTION(lib, func) \
- do { \
- func##_ = reinterpret_cast<p##func>(dlsym(lib, #func)); \
- if (!func##_) { \
- DLOG(ERROR) << "Unable to load function " << #func; \
- return false; \
- } \
- } while (0)
-
-namespace media {
-
-AndroidImageReader& AndroidImageReader::GetInstance() {
- // C++11 static local variable initialization is
- // thread-safe.
- static base::NoDestructor<AndroidImageReader> instance;
- return *instance;
-}
-
-bool AndroidImageReader::IsSupported() {
- return is_supported_;
-}
-
-AndroidImageReader::AndroidImageReader() {
- is_supported_ =
- base::FeatureList::IsEnabled(media::kAImageReaderVideoOutput) &&
- LoadFunctions();
-}
-
-bool AndroidImageReader::LoadFunctions() {
- // If the Chromium build requires __ANDROID_API__ >= 26 at some
- // point in the future, we could directly use the global functions instead of
- // dynamic loading. However, since this would be incompatible with pre-Oreo
- // devices, this is unlikely to happen in the foreseeable future, so we use
- // dynamic loading.
-
- // Functions are not present for android version older than OREO
- if (base::android::BuildInfo::GetInstance()->sdk_int() <
- base::android::SDK_VERSION_OREO) {
- return false;
- }
-
- void* libmediandk = dlopen("libmediandk.so", RTLD_NOW);
- if (libmediandk == nullptr) {
- LOG(ERROR) << "Couldnt open libmediandk.so";
- return false;
- }
-
- LOAD_FUNCTION(libmediandk, AImage_delete);
- LOAD_FUNCTION(libmediandk, AImage_getHardwareBuffer);
- LOAD_FUNCTION(libmediandk, AImage_getWidth);
- LOAD_FUNCTION(libmediandk, AImage_getHeight);
- LOAD_FUNCTION(libmediandk, AImageReader_new);
- LOAD_FUNCTION(libmediandk, AImageReader_setImageListener);
- LOAD_FUNCTION(libmediandk, AImageReader_delete);
- LOAD_FUNCTION(libmediandk, AImageReader_getWindow);
- LOAD_FUNCTION(libmediandk, AImageReader_acquireLatestImageAsync);
-
- void* libandroid = dlopen("libandroid.so", RTLD_NOW);
- if (libandroid == nullptr) {
- LOG(ERROR) << "Couldnt open libandroid.so";
- return false;
- }
-
- LOAD_FUNCTION(libandroid, ANativeWindow_toSurface);
-
- return true;
-}
-
-void AndroidImageReader::AImage_delete(AImage* image) {
- AImage_delete_(image);
-}
-
-media_status_t AndroidImageReader::AImage_getHardwareBuffer(
- const AImage* image,
- AHardwareBuffer** buffer) {
- return AImage_getHardwareBuffer_(image, buffer);
-}
-
-media_status_t AndroidImageReader::AImage_getWidth(const AImage* image,
- int32_t* width) {
- return AImage_getWidth_(image, width);
-}
-
-media_status_t AndroidImageReader::AImage_getHeight(const AImage* image,
- int32_t* height) {
- return AImage_getHeight_(image, height);
-}
-
-media_status_t AndroidImageReader::AImageReader_new(int32_t width,
- int32_t height,
- int32_t format,
- int32_t maxImages,
- AImageReader** reader) {
- return AImageReader_new_(width, height, format, maxImages, reader);
-}
-
-media_status_t AndroidImageReader::AImageReader_setImageListener(
- AImageReader* reader,
- AImageReader_ImageListener* listener) {
- return AImageReader_setImageListener_(reader, listener);
-}
-
-void AndroidImageReader::AImageReader_delete(AImageReader* reader) {
- AImageReader_delete_(reader);
-}
-
-media_status_t AndroidImageReader::AImageReader_getWindow(
- AImageReader* reader,
- ANativeWindow** window) {
- return AImageReader_getWindow_(reader, window);
-}
-
-media_status_t AndroidImageReader::AImageReader_acquireLatestImageAsync(
- AImageReader* reader,
- AImage** image,
- int* acquireFenceFd) {
- return AImageReader_acquireLatestImageAsync_(reader, image, acquireFenceFd);
-}
-
-jobject AndroidImageReader::ANativeWindow_toSurface(JNIEnv* env,
- ANativeWindow* window) {
- return ANativeWindow_toSurface_(env, window);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/android_image_reader_compat.h b/chromium/media/gpu/android/android_image_reader_compat.h
deleted file mode 100644
index 4dd2ccdaae2..00000000000
--- a/chromium/media/gpu/android/android_image_reader_compat.h
+++ /dev/null
@@ -1,75 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
-#define MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
-
-#include "base/macros.h"
-#include "base/no_destructor.h"
-#include "media/gpu/android/android_image_reader_abi.h"
-#include "media/gpu/media_gpu_export.h"
-
-namespace media {
-
-// This class provides runtime support for working with AImage, AImageReader and
-// ANativeWindow objects on Android O systems without requiring building for the
-// Android O NDK level. Don't call GetInstance() unless IsSupported() returns
-// true.
-class MEDIA_GPU_EXPORT AndroidImageReader {
- public:
- // Thread safe GetInstance.
- static AndroidImageReader& GetInstance();
-
- // Check if the image reader usage is supported. This function returns TRUE
- // if android version is >=OREO, the media flag is enabled and all the
- // required functions are loaded.
- bool IsSupported();
-
- // Naming convention of all the below functions are chosen to exactly match
- // the function names in the NDK.
- void AImage_delete(AImage* image);
- media_status_t AImage_getHardwareBuffer(const AImage* image,
- AHardwareBuffer** buffer);
- media_status_t AImage_getWidth(const AImage* image, int32_t* width);
- media_status_t AImage_getHeight(const AImage* image, int32_t* height);
- media_status_t AImageReader_new(int32_t width,
- int32_t height,
- int32_t format,
- int32_t maxImages,
- AImageReader** reader);
- media_status_t AImageReader_setImageListener(
- AImageReader* reader,
- AImageReader_ImageListener* listener);
- void AImageReader_delete(AImageReader* reader);
- media_status_t AImageReader_getWindow(AImageReader* reader,
- ANativeWindow** window);
- media_status_t AImageReader_acquireLatestImageAsync(AImageReader* reader,
- AImage** image,
- int* acquireFenceFd);
- jobject ANativeWindow_toSurface(JNIEnv* env, ANativeWindow* window);
-
- private:
- friend class base::NoDestructor<AndroidImageReader>;
-
- AndroidImageReader();
- bool LoadFunctions();
-
- bool is_supported_;
- pAImage_delete AImage_delete_;
- pAImage_getHardwareBuffer AImage_getHardwareBuffer_;
- pAImage_getWidth AImage_getWidth_;
- pAImage_getHeight AImage_getHeight_;
- pAImageReader_new AImageReader_new_;
- pAImageReader_setImageListener AImageReader_setImageListener_;
- pAImageReader_delete AImageReader_delete_;
- pAImageReader_getWindow AImageReader_getWindow_;
- pAImageReader_acquireLatestImageAsync AImageReader_acquireLatestImageAsync_;
- pANativeWindow_toSurface ANativeWindow_toSurface_;
-
- DISALLOW_COPY_AND_ASSIGN(AndroidImageReader);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
diff --git a/chromium/media/gpu/android/android_image_reader_compat_unittest.cc b/chromium/media/gpu/android/android_image_reader_compat_unittest.cc
deleted file mode 100644
index 622225519a9..00000000000
--- a/chromium/media/gpu/android/android_image_reader_compat_unittest.cc
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/android_image_reader_compat.h"
-
-#include <stdint.h>
-#include <memory>
-
-#include "base/android/build_info.h"
-#include "base/test/scoped_feature_list.h"
-#include "media/base/media_switches.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-class AndroidImageReaderTest : public testing::Test {
- public:
- AndroidImageReaderTest() {
- scoped_feature_list_.InitAndEnableFeature(media::kAImageReaderVideoOutput);
- }
- ~AndroidImageReaderTest() override = default;
-
- private:
- base::test::ScopedFeatureList scoped_feature_list_;
-};
-
-// Getting instance of AndroidImageReader will invoke AndroidImageReader
-// constructor which will dlopen the mediandk and androidndk .so files and do
-// all the required symbol lookups.
-TEST_F(AndroidImageReaderTest, GetImageReaderInstance) {
- // It is expected that image reader support will be available from android
- // version OREO.
- EXPECT_EQ(AndroidImageReader::GetInstance().IsSupported(),
- base::android::BuildInfo::GetInstance()->sdk_int() >=
- base::android::SDK_VERSION_OREO);
-}
-
-// There should be only 1 instance of AndroidImageReader im memory. Hence 2
-// instances should have same memory address.
-TEST_F(AndroidImageReaderTest, CompareImageReaderInstance) {
- AndroidImageReader& a1 = AndroidImageReader::GetInstance();
- AndroidImageReader& a2 = AndroidImageReader::GetInstance();
- ASSERT_EQ(&a1, &a2);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator.cc b/chromium/media/gpu/android/android_video_decode_accelerator.cc
index b9624012ad6..f8d5a17b884 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator.cc
+++ b/chromium/media/gpu/android/android_video_decode_accelerator.cc
@@ -35,13 +35,12 @@
#include "media/base/media_switches.h"
#include "media/base/media_util.h"
#include "media/base/timestamp_constants.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_decoder_config.h"
#include "media/gpu/android/android_video_surface_chooser_impl.h"
#include "media/gpu/android/avda_picture_buffer_manager.h"
-#include "media/gpu/android/content_video_view_overlay.h"
#include "media/gpu/android/device_info.h"
#include "media/gpu/android/promotion_hint_aggregator_impl.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/media_buildflags.h"
#include "media/mojo/buildflags.h"
#include "media/video/picture.h"
@@ -125,7 +124,7 @@ bool ShouldDeferSurfaceCreation(AVDACodecAllocator* codec_allocator,
DeviceInfo* device_info) {
// TODO(liberato): We might still want to defer if we've got a routing
// token. It depends on whether we want to use it right away or not.
- if (overlay_info.HasValidSurfaceId() || overlay_info.HasValidRoutingToken())
+ if (overlay_info.HasValidRoutingToken())
return false;
return codec == kCodecH264 && codec_allocator->IsAnyRegisteredAVDA() &&
@@ -247,8 +246,12 @@ static AVDAManager* GetManager() {
AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord(
const BitstreamBuffer& bitstream_buffer)
: buffer(bitstream_buffer) {
- if (buffer.id() != -1)
- memory.reset(new SharedMemoryRegion(buffer, true));
+ if (buffer.id() != -1) {
+ memory.reset(new WritableUnalignedMapping(buffer.handle(), buffer.size(),
+ buffer.offset()));
+ // The handle is no longer needed and can be closed.
+ bitstream_buffer.handle().Close();
+ }
}
AndroidVideoDecodeAccelerator::BitstreamRecord::BitstreamRecord(
@@ -375,10 +378,6 @@ bool AndroidVideoDecodeAccelerator::Initialize(const Config& config,
return false;
}
- // SetSurface() can't be called before Initialize(), so we pick up our first
- // surface ID from the codec configuration.
- DCHECK(!pending_surface_id_);
-
// We signaled that we support deferred initialization, so see if the client
// does also.
deferred_initialization_pending_ = config.is_deferred_initialization_allowed;
@@ -390,7 +389,6 @@ bool AndroidVideoDecodeAccelerator::Initialize(const Config& config,
codec_config_->codec, device_info_)) {
// We should never be here if a SurfaceView is required.
// TODO(liberato): This really isn't true with AndroidOverlay.
- DCHECK(!config_.overlay_info.HasValidSurfaceId());
defer_surface_creation_ = true;
}
@@ -448,7 +446,6 @@ void AndroidVideoDecodeAccelerator::StartSurfaceChooser() {
// surface creation for other reasons, in which case the sync path with just
// signal success optimistically.
if (during_initialize_ && !deferred_initialization_pending_) {
- DCHECK(!config_.overlay_info.HasValidSurfaceId());
DCHECK(!config_.overlay_info.HasValidRoutingToken());
// Note that we might still send feedback to |surface_chooser_|, which might
// call us back. However, it will only ever tell us to use TextureOwner,
@@ -461,11 +458,7 @@ void AndroidVideoDecodeAccelerator::StartSurfaceChooser() {
// told not to use an overlay (kNoSurfaceID or a null routing token), then we
// leave the factory blank.
AndroidOverlayFactoryCB factory;
- if (config_.overlay_info.HasValidSurfaceId()) {
- factory = base::BindRepeating(&ContentVideoViewOverlay::Create,
- config_.overlay_info.surface_id);
- } else if (config_.overlay_info.HasValidRoutingToken() &&
- overlay_factory_cb_) {
+ if (config_.overlay_info.HasValidRoutingToken() && overlay_factory_cb_) {
factory = base::BindRepeating(overlay_factory_cb_,
*config_.overlay_info.routing_token);
}
@@ -655,7 +648,7 @@ bool AndroidVideoDecodeAccelerator::QueueInput() {
return true;
}
- std::unique_ptr<SharedMemoryRegion> shm;
+ std::unique_ptr<WritableUnalignedMapping> shm;
if (pending_input_buf_index_ == -1) {
// When |pending_input_buf_index_| is not -1, the buffer is already dequeued
@@ -663,8 +656,8 @@ bool AndroidVideoDecodeAccelerator::QueueInput() {
// closed.
shm = std::move(pending_bitstream_records_.front().memory);
- if (!shm->Map()) {
- NOTIFY_ERROR(UNREADABLE_INPUT, "SharedMemoryRegion::Map() failed");
+ if (!shm->IsValid()) {
+ NOTIFY_ERROR(UNREADABLE_INPUT, "UnalignedSharedMemory::Map() failed");
return false;
}
}
@@ -1336,20 +1329,15 @@ void AndroidVideoDecodeAccelerator::SetOverlayInfo(
// Note that these might be kNoSurfaceID / empty. In that case, we will
// revoke the factory.
- int32_t surface_id = overlay_info.surface_id;
OverlayInfo::RoutingToken routing_token = overlay_info.routing_token;
// We don't want to change the factory unless this info has actually changed.
// We'll get the same info many times if some other part of the config is now
// different, such as fullscreen state.
base::Optional<AndroidOverlayFactoryCB> new_factory;
- if (surface_id != previous_info.surface_id ||
- routing_token != previous_info.routing_token) {
+ if (routing_token != previous_info.routing_token) {
if (routing_token && overlay_factory_cb_)
new_factory = base::BindRepeating(overlay_factory_cb_, *routing_token);
- else if (surface_id != SurfaceManager::kNoSurfaceID)
- new_factory =
- base::BindRepeating(&ContentVideoViewOverlay::Create, surface_id);
}
surface_chooser_helper_.UpdateChooserState(new_factory);
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator.h b/chromium/media/gpu/android/android_video_decode_accelerator.h
index a58c8cc4aef..b2caf4159e1 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator.h
+++ b/chromium/media/gpu/android/android_video_decode_accelerator.h
@@ -17,7 +17,7 @@
#include "base/threading/thread_checker.h"
#include "base/timer/timer.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android/media_codec_bridge_impl.h"
#include "media/base/android/media_crypto_context.h"
#include "media/base/android_overlay_mojo_factory.h"
@@ -35,7 +35,6 @@
namespace media {
class AndroidVideoSurfaceChooser;
-class SharedMemoryRegion;
class PromotionHintAggregator;
// A VideoDecodeAccelerator implementation for Android. This class decodes the
@@ -305,8 +304,8 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
BitstreamBuffer buffer;
- // |memory| is not mapped, and may be null if buffer has no data.
- std::unique_ptr<SharedMemoryRegion> memory;
+ // |memory| may be null if buffer has no data.
+ std::unique_ptr<WritableUnalignedMapping> memory;
};
// Encoded bitstream buffers to be passed to media codec, queued until an
@@ -370,11 +369,6 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
// been defered until the first Decode() call.
bool defer_surface_creation_;
- // Has a value if a SetSurface() call has occurred and a new surface should be
- // switched to when possible. Cleared during OnSurfaceDestroyed() and if all
- // pictures have been rendered in DequeueOutput().
- base::Optional<int32_t> pending_surface_id_;
-
// Copy of the VDA::Config we were given.
Config config_;
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc b/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
index a947cc548ea..713ab977b0c 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
@@ -128,6 +128,15 @@ class AndroidVideoDecodeAcceleratorTest
gl::init::ShutdownGL(false);
}
+ std::unique_ptr<AndroidOverlay> OverlayFactory(const base::UnguessableToken&,
+ AndroidOverlayConfig config) {
+ // This shouldn't be called by AVDA. Our mock surface chooser won't use it
+ // either, though it'd be nice to check to token. Note that this isn't the
+ // same as an emtpy factory callback; that means "no factory". This one
+ // looks like a working factory, as long as nobody calls it.
+ return nullptr;
+ }
+
// Create and initialize AVDA with |config_|, and return the result.
bool InitializeAVDA(bool force_defer_surface_creation = false) {
// Because VDA has a custom deleter, we must assign it to |vda_| carefully.
@@ -135,7 +144,9 @@ class AndroidVideoDecodeAcceleratorTest
codec_allocator_.get(), std::move(chooser_that_is_usually_null_),
base::BindRepeating(&MakeContextCurrent),
base::BindRepeating(&GetContextGroup, context_group_),
- AndroidOverlayMojoFactoryCB(), device_info_.get());
+ base::BindRepeating(&AndroidVideoDecodeAcceleratorTest::OverlayFactory,
+ base::Unretained(this)),
+ device_info_.get());
vda_.reset(avda);
avda->force_defer_surface_creation_for_testing_ =
force_defer_surface_creation;
@@ -149,7 +160,7 @@ class AndroidVideoDecodeAcceleratorTest
// Initialize |vda_|, providing a new surface for it. You may get the surface
// by asking |codec_allocator_|.
void InitializeAVDAWithOverlay() {
- config_.overlay_info.surface_id = 123;
+ config_.overlay_info.routing_token = base::UnguessableToken::Create();
ASSERT_TRUE(InitializeAVDA());
base::RunLoop().RunUntilIdle();
ASSERT_TRUE(chooser_->factory_);
@@ -301,8 +312,6 @@ TEST_P(AndroidVideoDecodeAcceleratorTest,
// surface, though.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
- config_.overlay_info.surface_id = SurfaceManager::kNoSurfaceID;
-
EXPECT_CALL(*chooser_, MockUpdateState()).Times(0);
EXPECT_CALL(client_, NotifyInitializationComplete(true));
@@ -538,7 +547,7 @@ TEST_P(AndroidVideoDecodeAcceleratorTest,
EXPECT_CALL(*chooser_, MockUpdateState()).Times(1);
OverlayInfo overlay_info = config_.overlay_info;
- overlay_info.surface_id++;
+ overlay_info.routing_token = base::UnguessableToken::Create();
avda()->SetOverlayInfo(overlay_info);
}
diff --git a/chromium/media/gpu/android/android_video_encode_accelerator.cc b/chromium/media/gpu/android/android_video_encode_accelerator.cc
index 684ffea6d2d..770d0bf2b15 100644
--- a/chromium/media/gpu/android/android_video_encode_accelerator.cc
+++ b/chromium/media/gpu/android/android_video_encode_accelerator.cc
@@ -19,7 +19,7 @@
#include "media/base/android/media_codec_util.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/limits.h"
-#include "media/gpu/shared_memory_region.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/video/picture.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "ui/gl/android/scoped_java_surface.h"
@@ -429,12 +429,16 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() {
BitstreamBuffer bitstream_buffer = available_bitstream_buffers_.back();
available_bitstream_buffers_.pop_back();
- std::unique_ptr<SharedMemoryRegion> shm(
- new SharedMemoryRegion(bitstream_buffer, false));
- RETURN_ON_FAILURE(shm->Map(), "Failed to map SHM", kPlatformFailureError);
- RETURN_ON_FAILURE(size <= shm->size(),
- "Encoded buffer too large: " << size << ">" << shm->size(),
- kPlatformFailureError);
+ auto shm = std::make_unique<WritableUnalignedMapping>(
+ bitstream_buffer.handle(), bitstream_buffer.size(),
+ bitstream_buffer.offset());
+ // The handle is no longer needed and should be closed.
+ bitstream_buffer.handle().Close();
+ RETURN_ON_FAILURE(shm->IsValid(), "Failed to map SHM", kPlatformFailureError);
+ RETURN_ON_FAILURE(
+ size <= bitstream_buffer.size(),
+ "Encoded buffer too large: " << size << ">" << bitstream_buffer.size(),
+ kPlatformFailureError);
status = media_codec_->CopyFromOutputBuffer(buf_index, offset, shm->memory(),
size);
@@ -446,8 +450,8 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() {
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
- client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size,
- key_frame, frame_timestamp));
+ client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(),
+ BitstreamBufferMetadata(size, key_frame, frame_timestamp)));
}
} // namespace media
diff --git a/chromium/media/gpu/android/avda_codec_allocator.h b/chromium/media/gpu/android/avda_codec_allocator.h
index a5faf4e2001..bd65661bb3b 100644
--- a/chromium/media/gpu/android/avda_codec_allocator.h
+++ b/chromium/media/gpu/android/avda_codec_allocator.h
@@ -25,7 +25,6 @@
#include "media/base/android/media_codec_bridge_impl.h"
#include "media/base/android/media_crypto_context.h"
#include "media/base/media.h"
-#include "media/base/surface_manager.h"
#include "media/base/video_codecs.h"
#include "media/gpu/android/avda_surface_bundle.h"
#include "media/gpu/media_gpu_export.h"
diff --git a/chromium/media/gpu/android/avda_codec_image.cc b/chromium/media/gpu/android/avda_codec_image.cc
index 7b94cc8c218..9d9509aecb2 100644
--- a/chromium/media/gpu/android/avda_codec_image.cc
+++ b/chromium/media/gpu/android/avda_codec_image.cc
@@ -76,13 +76,14 @@ bool AVDACodecImage::CopyTexSubImage(unsigned target,
return false;
}
-bool AVDACodecImage::ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
- int z_order,
- gfx::OverlayTransform transform,
- const gfx::Rect& bounds_rect,
- const gfx::RectF& crop_rect,
- bool enable_blend,
- gfx::GpuFence* gpu_fence) {
+bool AVDACodecImage::ScheduleOverlayPlane(
+ gfx::AcceleratedWidget widget,
+ int z_order,
+ gfx::OverlayTransform transform,
+ const gfx::Rect& bounds_rect,
+ const gfx::RectF& crop_rect,
+ bool enable_blend,
+ std::unique_ptr<gfx::GpuFence> gpu_fence) {
// This should only be called when we're rendering to a SurfaceView.
if (has_texture_owner_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
diff --git a/chromium/media/gpu/android/avda_codec_image.h b/chromium/media/gpu/android/avda_codec_image.h
index 2e210b70de6..e271cc9be8f 100644
--- a/chromium/media/gpu/android/avda_codec_image.h
+++ b/chromium/media/gpu/android/avda_codec_image.h
@@ -43,7 +43,7 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
bool enable_blend,
- gfx::GpuFence* gpu_fence) override;
+ std::unique_ptr<gfx::GpuFence> gpu_fence) override;
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
diff --git a/chromium/media/gpu/android/avda_picture_buffer_manager.cc b/chromium/media/gpu/android/avda_picture_buffer_manager.cc
index ee4c4ee3067..d760da02e96 100644
--- a/chromium/media/gpu/android/avda_picture_buffer_manager.cc
+++ b/chromium/media/gpu/android/avda_picture_buffer_manager.cc
@@ -53,7 +53,7 @@ bool AVDAPictureBufferManager::Initialize(
if (!surface_bundle->overlay) {
// Create the texture owner.
- texture_owner_ = SurfaceTextureGLOwner::Create();
+ texture_owner_ = TextureOwner::Create();
if (!texture_owner_)
return false;
diff --git a/chromium/media/gpu/android/avda_surface_bundle.h b/chromium/media/gpu/android/avda_surface_bundle.h
index c24eebe2569..9ea45fa1583 100644
--- a/chromium/media/gpu/android/avda_surface_bundle.h
+++ b/chromium/media/gpu/android/avda_surface_bundle.h
@@ -7,7 +7,6 @@
#include "base/memory/ref_counted_delete_on_sequence.h"
#include "media/base/android/android_overlay.h"
-#include "media/base/surface_manager.h"
#include "media/gpu/android/surface_texture_gl_owner.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gl/android/scoped_java_surface.h"
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index 658edfc574a..21efc385558 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -61,7 +61,10 @@ unsigned CodecImage::GetInternalFormat() {
}
bool CodecImage::BindTexImage(unsigned target) {
- return false;
+ // If we're using an overlay, then pretend it's bound. That way, we'll get
+ // calls to ScheduleOverlayPlane. Otherwise, fail so that we will be asked
+ // to CopyTexImage. Note that we could just CopyTexImage here.
+ return !texture_owner_;
}
void CodecImage::ReleaseTexImage(unsigned target) {}
@@ -86,13 +89,14 @@ bool CodecImage::CopyTexSubImage(unsigned target,
return false;
}
-bool CodecImage::ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
- int z_order,
- gfx::OverlayTransform transform,
- const gfx::Rect& bounds_rect,
- const gfx::RectF& crop_rect,
- bool enable_blend,
- gfx::GpuFence* gpu_fence) {
+bool CodecImage::ScheduleOverlayPlane(
+ gfx::AcceleratedWidget widget,
+ int z_order,
+ gfx::OverlayTransform transform,
+ const gfx::Rect& bounds_rect,
+ const gfx::RectF& crop_rect,
+ bool enable_blend,
+ std::unique_ptr<gfx::GpuFence> gpu_fence) {
if (texture_owner_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
"TextureOwner backed.";
@@ -226,7 +230,7 @@ bool CodecImage::RenderToOverlay() {
return true;
}
-void CodecImage::SurfaceDestroyed() {
+void CodecImage::ReleaseCodecBuffer() {
output_buffer_ = nullptr;
phase_ = Phase::kInvalidated;
}
diff --git a/chromium/media/gpu/android/codec_image.h b/chromium/media/gpu/android/codec_image.h
index d1b6b5ad735..55bf51220ff 100644
--- a/chromium/media/gpu/android/codec_image.h
+++ b/chromium/media/gpu/android/codec_image.h
@@ -48,7 +48,7 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
bool enable_blend,
- gfx::GpuFence* gpu_fence) override;
+ std::unique_ptr<gfx::GpuFence> gpu_fence) override;
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
@@ -83,9 +83,8 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// buffer. Returns false if the buffer was invalidated.
bool RenderToTextureOwnerBackBuffer();
- // Called when we're no longer renderable because our surface is gone. We'll
- // discard any codec buffer, and generally do nothing.
- virtual void SurfaceDestroyed();
+ // Release any codec buffer without rendering, if we have one.
+ virtual void ReleaseCodecBuffer();
protected:
~CodecImage() override;
diff --git a/chromium/media/gpu/android/codec_image_group.cc b/chromium/media/gpu/android/codec_image_group.cc
index c88aab0e3ab..643e38309ec 100644
--- a/chromium/media/gpu/android/codec_image_group.cc
+++ b/chromium/media/gpu/android/codec_image_group.cc
@@ -46,7 +46,7 @@ void CodecImageGroup::AddCodecImage(CodecImage* image) {
// If somebody adds an image after the surface has been destroyed, fail the
// image immediately. This can happen due to thread hopping.
if (!surface_bundle_) {
- image->SurfaceDestroyed();
+ image->ReleaseCodecBuffer();
return;
}
@@ -66,8 +66,10 @@ void CodecImageGroup::OnCodecImageDestroyed(CodecImage* image) {
}
void CodecImageGroup::OnSurfaceDestroyed(AndroidOverlay* overlay) {
+ // Release any codec buffer, so that the image doesn't try to render to the
+ // overlay. If it already did, that's fine.
for (CodecImage* image : images_)
- image->SurfaceDestroyed();
+ image->ReleaseCodecBuffer();
// While this might cause |surface_bundle_| to be deleted, it's okay because
// it's a RefCountedDeleteOnSequence.
diff --git a/chromium/media/gpu/android/codec_image_group_unittest.cc b/chromium/media/gpu/android/codec_image_group_unittest.cc
index 90638553630..d2b16eb6390 100644
--- a/chromium/media/gpu/android/codec_image_group_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_group_unittest.cc
@@ -40,7 +40,7 @@ class CodecImageGroupWithDestructionHook : public CodecImageGroup {
base::OnceClosure destruction_cb_;
};
-// CodecImage with a mocked SurfaceDestroyed.
+// CodecImage with a mocked ReleaseCodecBuffer.
class MockCodecImage : public CodecImage {
public:
MockCodecImage()
@@ -48,7 +48,7 @@ class MockCodecImage : public CodecImage {
nullptr,
PromotionHintAggregator::NotifyPromotionHintCB()) {}
- MOCK_METHOD0(SurfaceDestroyed, void());
+ MOCK_METHOD0(ReleaseCodecBuffer, void());
protected:
~MockCodecImage() override {}
@@ -123,7 +123,7 @@ TEST_F(CodecImageGroupTest, SurfaceBundleWithoutOverlayDoesntCrash) {
scoped_refptr<CodecImageGroup> image_group =
base::MakeRefCounted<CodecImageGroup>(gpu_task_runner_, surface_bundle);
// TODO(liberato): we should also make sure that adding an image doesn't call
- // SurfaceDestroyed when it's added.
+ // ReleaseCodecBuffer when it's added.
}
TEST_F(CodecImageGroupTest, ImagesRetainRefToGroup) {
@@ -179,8 +179,8 @@ TEST_F(CodecImageGroupTest, ImageGroupDropsForwardsSurfaceDestruction) {
// Destroy the surface. All destruction messages should be posted to the
// gpu thread.
- EXPECT_CALL(*image_1.get(), SurfaceDestroyed()).Times(0);
- EXPECT_CALL(*image_2.get(), SurfaceDestroyed()).Times(0);
+ EXPECT_CALL(*image_1.get(), ReleaseCodecBuffer()).Times(0);
+ EXPECT_CALL(*image_2.get(), ReleaseCodecBuffer()).Times(0);
// Note that we're calling this on the wrong thread, but that's okay.
rec.overlay()->OnSurfaceDestroyed();
env_.RunUntilIdle();
@@ -189,8 +189,8 @@ TEST_F(CodecImageGroupTest, ImageGroupDropsForwardsSurfaceDestruction) {
testing::Mock::VerifyAndClearExpectations(this);
// Now run |gpu_task_runner_| and verify that the callbacks run.
- EXPECT_CALL(*image_1.get(), SurfaceDestroyed()).Times(1);
- EXPECT_CALL(*image_2.get(), SurfaceDestroyed()).Times(1);
+ EXPECT_CALL(*image_1.get(), ReleaseCodecBuffer()).Times(1);
+ EXPECT_CALL(*image_2.get(), ReleaseCodecBuffer()).Times(1);
gpu_task_runner_->RunUntilIdle();
testing::Mock::VerifyAndClearExpectations(this);
diff --git a/chromium/media/gpu/android/codec_wrapper.cc b/chromium/media/gpu/android/codec_wrapper.cc
index 98f9225ad01..7d851d1a369 100644
--- a/chromium/media/gpu/android/codec_wrapper.cc
+++ b/chromium/media/gpu/android/codec_wrapper.cc
@@ -91,6 +91,10 @@ class CodecWrapperImpl : public base::RefCountedThreadSafe<CodecWrapperImpl> {
// codec.
base::Closure output_buffer_release_cb_;
+ // Do we owe the client an EOS in DequeueOutput, due to an eos that we elided
+ // while we're already flushed?
+ bool elided_eos_pending_ = false;
+
DISALLOW_COPY_AND_ASSIGN(CodecWrapperImpl);
};
@@ -182,6 +186,7 @@ bool CodecWrapperImpl::Flush() {
return false;
}
state_ = State::kFlushed;
+ elided_eos_pending_ = false;
return true;
}
@@ -217,9 +222,14 @@ CodecWrapperImpl::QueueStatus CodecWrapperImpl::QueueInputBuffer(
// Queue EOS if it's an EOS buffer.
if (buffer.end_of_stream()) {
// Some MediaCodecs consider it an error to get an EOS as the first buffer
- // (http://crbug.com/672268).
- DCHECK_NE(state_, State::kFlushed);
- codec_->QueueEOS(input_buffer);
+ // (http://crbug.com/672268). Just elide it. We also elide kDrained, since
+ // kFlushed => elided eos => kDrained, and it would still be the first
+ // buffer from MediaCodec's perspective. While kDrained does not imply that
+ // it's the first buffer in all cases, it's still safe to elide.
+ if (state_ == State::kFlushed || state_ == State::kDrained)
+ elided_eos_pending_ = true;
+ else
+ codec_->QueueEOS(input_buffer);
state_ = State::kDraining;
return QueueStatus::kOk;
}
@@ -267,6 +277,15 @@ CodecWrapperImpl::DequeueStatus CodecWrapperImpl::DequeueOutputBuffer(
// destructor calls ReleaseCodecOutputBuffer().
DCHECK(!*codec_buffer);
+ if (elided_eos_pending_) {
+ // An eos was sent while we were already flushed -- pretend it's ready.
+ elided_eos_pending_ = false;
+ state_ = State::kDrained;
+ if (end_of_stream)
+ *end_of_stream = true;
+ return DequeueStatus::kOk;
+ }
+
// Dequeue in a loop so we can avoid propagating the uninteresting
// OUTPUT_FORMAT_CHANGED and OUTPUT_BUFFERS_CHANGED statuses to our caller.
for (int attempt = 0; attempt < 3; ++attempt) {
diff --git a/chromium/media/gpu/android/codec_wrapper_unittest.cc b/chromium/media/gpu/android/codec_wrapper_unittest.cc
index d649baacac7..b01bb38919a 100644
--- a/chromium/media/gpu/android/codec_wrapper_unittest.cc
+++ b/chromium/media/gpu/android/codec_wrapper_unittest.cc
@@ -286,4 +286,26 @@ TEST_F(CodecWrapperTest, SurfaceBundleIsTaken) {
ASSERT_EQ(wrapper_->SurfaceBundle(), nullptr);
}
+TEST_F(CodecWrapperTest, EOSWhileFlushedOrDrainedIsElided) {
+ // Nothing should call QueueEOS.
+ EXPECT_CALL(*codec_, QueueEOS(_)).Times(0);
+
+ // Codec starts in the flushed state.
+ auto eos = DecoderBuffer::CreateEOSBuffer();
+ wrapper_->QueueInputBuffer(*eos, EncryptionScheme());
+ std::unique_ptr<CodecOutputBuffer> codec_buffer;
+ bool is_eos = false;
+ wrapper_->DequeueOutputBuffer(nullptr, &is_eos, &codec_buffer);
+ ASSERT_TRUE(is_eos);
+
+ // Since we also just got the codec into the drained state, make sure that
+ // it is elided here too.
+ ASSERT_TRUE(wrapper_->IsDrained());
+ eos = DecoderBuffer::CreateEOSBuffer();
+ wrapper_->QueueInputBuffer(*eos, EncryptionScheme());
+ is_eos = false;
+ wrapper_->DequeueOutputBuffer(nullptr, &is_eos, &codec_buffer);
+ ASSERT_TRUE(is_eos);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/android/content_video_view_overlay.cc b/chromium/media/gpu/android/content_video_view_overlay.cc
deleted file mode 100644
index 626075f5a55..00000000000
--- a/chromium/media/gpu/android/content_video_view_overlay.cc
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/content_video_view_overlay.h"
-
-#include <memory>
-
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "gpu/ipc/common/gpu_surface_lookup.h"
-
-namespace media {
-
-// static
-std::unique_ptr<AndroidOverlay> ContentVideoViewOverlay::Create(
- int surface_id,
- AndroidOverlayConfig config) {
- return std::make_unique<ContentVideoViewOverlay>(surface_id,
- std::move(config));
-}
-
-ContentVideoViewOverlay::ContentVideoViewOverlay(int surface_id,
- AndroidOverlayConfig config)
- : surface_id_(surface_id), config_(std::move(config)), weak_factory_(this) {
- if (ContentVideoViewOverlayAllocator::GetInstance()->AllocateSurface(this)) {
- // We have the surface -- post a callback to our OnSurfaceAvailable.
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::Bind(&ContentVideoViewOverlay::OnSurfaceAvailable,
- weak_factory_.GetWeakPtr(), true));
- }
-}
-
-ContentVideoViewOverlay::~ContentVideoViewOverlay() {
- // Deallocate the surface. It's okay if we don't own it.
- // Note that this only happens once any codec is done with us.
- ContentVideoViewOverlayAllocator::GetInstance()->DeallocateSurface(this);
-}
-
-void ContentVideoViewOverlay::ScheduleLayout(const gfx::Rect& rect) {}
-
-const base::android::JavaRef<jobject>& ContentVideoViewOverlay::GetJavaSurface()
- const {
- return surface_.j_surface();
-}
-
-void ContentVideoViewOverlay::OnSurfaceAvailable(bool success) {
- if (!success) {
- // Notify that the surface won't be available.
- config_.is_failed(this);
- // |this| may be deleted.
- return;
- }
-
- // Get the surface and notify our client.
- surface_ =
- gpu::GpuSurfaceLookup::GetInstance()->AcquireJavaSurface(surface_id_);
-
- // If no surface was returned, then fail instead.
- if (surface_.IsEmpty()) {
- config_.is_failed(this);
- // |this| may be deleted.
- return;
- }
-
- config_.is_ready(this);
-}
-
-void ContentVideoViewOverlay::OnSurfaceDestroyed() {
- RunSurfaceDestroyedCallbacks();
- // |this| may be deleted, or deletion might be posted elsewhere.
-}
-
-int32_t ContentVideoViewOverlay::GetSurfaceId() {
- return surface_id_;
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/content_video_view_overlay.h b/chromium/media/gpu/android/content_video_view_overlay.h
deleted file mode 100644
index 07f40328744..00000000000
--- a/chromium/media/gpu/android/content_video_view_overlay.h
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_H_
-#define MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_H_
-
-#include <memory>
-
-#include "base/memory/weak_ptr.h"
-#include "media/base/android/android_overlay.h"
-#include "media/gpu/android/content_video_view_overlay_allocator.h"
-#include "ui/gl/android/scoped_java_surface.h"
-
-namespace media {
-
-class ContentVideoViewOverlay
- : public ContentVideoViewOverlayAllocator::Client {
- public:
- // This exists so we can bind construction into a callback returning
- // std::unique_ptr<AndroidOverlay>.
- static std::unique_ptr<AndroidOverlay> Create(int surface_id,
- AndroidOverlayConfig config);
-
- // |config| is ignored except for callbacks. Callbacks will not be called
- // before this returns.
- ContentVideoViewOverlay(int surface_id, AndroidOverlayConfig config);
- ~ContentVideoViewOverlay() override;
-
- // AndroidOverlay (via ContentVideoViewOverlayAllocator::Client)
- // ContentVideoView ignores this, unfortunately.
- void ScheduleLayout(const gfx::Rect& rect) override;
- const base::android::JavaRef<jobject>& GetJavaSurface() const override;
-
- // ContentVideoViewOverlayAllocator::Client
- void OnSurfaceAvailable(bool success) override;
- void OnSurfaceDestroyed() override;
- int32_t GetSurfaceId() override;
-
- private:
- int surface_id_;
- AndroidOverlayConfig config_;
- gl::ScopedJavaSurface surface_;
-
- base::WeakPtrFactory<ContentVideoViewOverlay> weak_factory_;
- DISALLOW_COPY_AND_ASSIGN(ContentVideoViewOverlay);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_H_
diff --git a/chromium/media/gpu/android/content_video_view_overlay_allocator.cc b/chromium/media/gpu/android/content_video_view_overlay_allocator.cc
deleted file mode 100644
index a205ae783d3..00000000000
--- a/chromium/media/gpu/android/content_video_view_overlay_allocator.cc
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/content_video_view_overlay_allocator.h"
-#include "base/threading/thread_task_runner_handle.h"
-
-#include "media/gpu/android/avda_codec_allocator.h"
-
-namespace media {
-
-// static
-ContentVideoViewOverlayAllocator*
-ContentVideoViewOverlayAllocator::GetInstance() {
- static ContentVideoViewOverlayAllocator* allocator =
- new ContentVideoViewOverlayAllocator(
- AVDACodecAllocator::GetInstance(base::ThreadTaskRunnerHandle::Get()));
- return allocator;
-}
-
-ContentVideoViewOverlayAllocator::ContentVideoViewOverlayAllocator(
- AVDACodecAllocator* allocator)
- : allocator_(allocator) {}
-
-ContentVideoViewOverlayAllocator::~ContentVideoViewOverlayAllocator() {}
-
-bool ContentVideoViewOverlayAllocator::AllocateSurface(Client* client) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- const int32_t surface_id = client->GetSurfaceId();
- DVLOG(1) << __func__ << ": " << surface_id;
- DCHECK_NE(surface_id, SurfaceManager::kNoSurfaceID);
-
- // If it's not owned or being released, |client| now owns it.
- // Note: it's owned until it's released, since AVDACodecAllocator does that.
- // It keeps the bundle around (and also the overlay that's the current owner)
- // until the codec is done with it. That's required to use AndroidOverlay.
- // So, we don't need to check for 'being released'; the owner is good enough.
- auto it = surface_owners_.find(surface_id);
- if (it == surface_owners_.end()) {
- OwnerRecord record;
- record.owner = client;
- surface_owners_.insert(OwnerMap::value_type(surface_id, record));
- return true;
- }
-
- // Otherwise |client| replaces the previous waiter (if any).
- OwnerRecord& record = it->second;
- if (record.waiter)
- record.waiter->OnSurfaceAvailable(false);
- record.waiter = client;
- return false;
-}
-
-void ContentVideoViewOverlayAllocator::DeallocateSurface(Client* client) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- const int32_t surface_id = client->GetSurfaceId();
- DCHECK_NE(surface_id, SurfaceManager::kNoSurfaceID);
-
- // If we time out waiting for the surface to be destroyed, then we might have
- // already removed |surface_id|. If it's now trying to deallocate, then
- // maybe we just weren't patient enough, or mediaserver restarted.
- auto it = surface_owners_.find(surface_id);
- if (it == surface_owners_.end())
- return;
-
- OwnerRecord& record = it->second;
- if (record.owner == client)
- record.owner = nullptr;
- else if (record.waiter == client)
- record.waiter = nullptr;
-
- // Promote the waiter if possible.
- if (record.waiter && !record.owner) {
- record.owner = record.waiter;
- record.waiter = nullptr;
- record.owner->OnSurfaceAvailable(true);
- return;
- }
-
- // Remove the record if it's now unused.
- if (!record.owner && !record.waiter)
- surface_owners_.erase(it);
-}
-
-// During surface teardown we have to handle the following cases.
-// 1) No AVDA has acquired the surface, or the surface has already been
-// completely released.
-// This case is easy -- there's no owner or waiter, and we can return.
-//
-// 2) A MediaCodec is currently being configured with the surface on another
-// thread. Whether an AVDA owns the surface or has already deallocated it,
-// the MediaCodec should be dropped when configuration completes.
-// In this case, we'll find an owner. We'll notify it about the destruction.
-// Note that AVDA doesn't handle this case correctly right now, since it
-// doesn't know the state of codec creation on the codec thread. This is
-// only a problem because CVV has the 'wait on main thread' semantics.
-//
-// 3) An AVDA owns the surface and it responds to OnSurfaceDestroyed() by:
-// a) Replacing the destroyed surface by calling MediaCodec#setSurface().
-// b) Releasing the MediaCodec it's attached to.
-// In case a, the surface will be destroyed during OnSurfaceDestroyed.
-// In case b, we'll have to wait for the release to complete.
-//
-// 4) No AVDA owns the surface, but the MediaCodec it's attached to is currently
-// being destroyed on another thread.
-// This is the same as 3b.
-void ContentVideoViewOverlayAllocator::OnSurfaceDestroyed(int32_t surface_id) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DVLOG(1) << __func__ << ": " << surface_id;
-
- // If it isn't currently owned, then we're done. Rememeber that the overlay
- // must outlive any user of it (MediaCodec!), and currently AVDACodecAllocator
- // is responsible for making sure that happens for AVDA.
- auto it = surface_owners_.find(surface_id);
- if (it == surface_owners_.end())
- return;
-
- // Notify the owner and waiter (if any).
- OwnerRecord& record = it->second;
- if (record.waiter) {
- record.waiter->OnSurfaceAvailable(false);
- record.waiter = nullptr;
- }
-
- DCHECK(record.owner);
-
- // |record| could be removed by the callback, if it deallocates the surface.
- record.owner->OnSurfaceDestroyed();
-
- // If owner deallocated the surface, then we don't need to wait. Note that
- // the owner might have been deleted in that case. Since CVVOverlay only
- // deallocates the surface during destruction, it's a safe bet.
- it = surface_owners_.find(surface_id);
- if (it == surface_owners_.end())
- return;
-
- // The surface is still in use, but should have been posted to another thread
- // for destruction. Note that this isn't technically required for overlays
- // in general, but CVV requires it. All of the pending release stuff should
- // be moved here, or to custom deleters of CVVOverlay. However, in the
- // interest of not changing too much at once, we let AVDACodecAllocator
- // handle it. Since we're deprecating CVVOverlay anyway, all of this can be
- // removed eventually.
- // If the wait fails, then clean up |surface_owners_| anyway, since the codec
- // release is probably hung up.
- if (!allocator_->WaitForPendingRelease(record.owner))
- surface_owners_.erase(it);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/content_video_view_overlay_allocator.h b/chromium/media/gpu/android/content_video_view_overlay_allocator.h
deleted file mode 100644
index 4e6d4db3390..00000000000
--- a/chromium/media/gpu/android/content_video_view_overlay_allocator.h
+++ /dev/null
@@ -1,86 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_ALLOCATOR_H_
-#define MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_ALLOCATOR_H_
-
-#include <stddef.h>
-
-#include "base/containers/flat_map.h"
-#include "base/macros.h"
-#include "base/threading/thread_checker.h"
-#include "media/base/android/android_overlay.h"
-#include "media/gpu/media_gpu_export.h"
-
-namespace media {
-
-class AVDACodecAllocator;
-class ContentVideoViewOverlayAllocatorTest;
-
-// ContentVideoViewOverlayAllocator lets different instances of CVVOverlay that
-// share the same surface ID to be synchronized with respect to each other.
-// It also manages synchronous surface destruction.
-class MEDIA_GPU_EXPORT ContentVideoViewOverlayAllocator {
- public:
- class Client : public AndroidOverlay {
- public:
- // Called when the requested SurfaceView becomes available after a call to
- // AllocateSurface()
- virtual void OnSurfaceAvailable(bool success) = 0;
-
- // Called when the allocated surface is being destroyed. This must either
- // replace the surface with MediaCodec#setSurface, or release the MediaCodec
- // it's attached to. The client no longer owns the surface and doesn't
- // need to call DeallocateSurface();
- virtual void OnSurfaceDestroyed() = 0;
-
- // Return the surface id of the client's ContentVideoView.
- virtual int32_t GetSurfaceId() = 0;
-
- protected:
- ~Client() override {}
- };
-
- static ContentVideoViewOverlayAllocator* GetInstance();
-
- // Called synchronously when the given surface is being destroyed on the
- // browser UI thread.
- void OnSurfaceDestroyed(int32_t surface_id);
-
- // Returns true if the caller now owns the surface, or false if someone else
- // owns the surface. |client| will be notified when the surface is available
- // via OnSurfaceAvailable().
- bool AllocateSurface(Client* client);
-
- // Relinquish ownership of the surface or stop waiting for it to be available.
- // The caller must guarantee that when calling this the surface is either no
- // longer attached to a MediaCodec, or the MediaCodec it was attached to is
- // was released with ReleaseMediaCodec().
- void DeallocateSurface(Client* client);
-
- private:
- friend class ContentVideoViewOverlayAllocatorTest;
-
- ContentVideoViewOverlayAllocator(AVDACodecAllocator* allocator);
- ~ContentVideoViewOverlayAllocator();
-
- struct OwnerRecord {
- Client* owner = nullptr;
- Client* waiter = nullptr;
- };
-
- // Indexed by surface id.
- using OwnerMap = base::flat_map<int32_t, OwnerRecord>;
- OwnerMap surface_owners_;
-
- AVDACodecAllocator* allocator_;
-
- base::ThreadChecker thread_checker_;
-
- DISALLOW_COPY_AND_ASSIGN(ContentVideoViewOverlayAllocator);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_CONTENT_VIDEO_VIEW_OVERLAY_ALLOCATOR_H_
diff --git a/chromium/media/gpu/android/content_video_view_overlay_allocator_unittest.cc b/chromium/media/gpu/android/content_video_view_overlay_allocator_unittest.cc
deleted file mode 100644
index 977810125aa..00000000000
--- a/chromium/media/gpu/android/content_video_view_overlay_allocator_unittest.cc
+++ /dev/null
@@ -1,155 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/content_video_view_overlay_allocator.h"
-
-#include <stdint.h>
-
-#include <memory>
-
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/test/scoped_task_environment.h"
-#include "base/test/simple_test_tick_clock.h"
-#include "base/threading/sequenced_task_runner_handle.h"
-#include "base/time/tick_clock.h"
-#include "media/base/surface_manager.h"
-#include "media/gpu/android/fake_codec_allocator.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-using testing::AnyNumber;
-using testing::Invoke;
-using testing::Return;
-using testing::StrictMock;
-using testing::_;
-
-namespace media {
-class ContentVideoViewOverlayAllocatorTest : public testing::Test {
- public:
- class MockClient
- : public StrictMock<ContentVideoViewOverlayAllocator::Client> {
- public:
- MOCK_METHOD1(ScheduleLayout, void(const gfx::Rect&));
- MOCK_CONST_METHOD0(GetJavaSurface,
- const base::android::JavaRef<jobject>&());
-
- MOCK_METHOD1(OnSurfaceAvailable, void(bool success));
- MOCK_METHOD0(OnSurfaceDestroyed, void());
- MOCK_METHOD0(GetSurfaceId, int32_t());
- };
-
- ContentVideoViewOverlayAllocatorTest() {}
-
- ~ContentVideoViewOverlayAllocatorTest() override {}
-
- protected:
- void SetUp() override {
- codec_allocator_ =
- new FakeCodecAllocator(base::SequencedTaskRunnerHandle::Get());
- allocator_ = new ContentVideoViewOverlayAllocator(codec_allocator_);
-
- avda1_ = new MockClient();
- avda2_ = new MockClient();
- avda3_ = new MockClient();
- // Default all |avda*| instances to surface ID 1.
- SetSurfaceId(avda1_, 1);
- SetSurfaceId(avda2_, 1);
- SetSurfaceId(avda3_, 1);
- }
-
- void TearDown() override {
- delete avda3_;
- delete avda2_;
- delete avda1_;
- delete allocator_;
- delete codec_allocator_;
- }
-
- void SetSurfaceId(MockClient* client, int32_t surface_id) {
- ON_CALL(*client, GetSurfaceId()).WillByDefault(Return(surface_id));
- EXPECT_CALL(*client, GetSurfaceId()).Times(AnyNumber());
- }
-
- protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
- ContentVideoViewOverlayAllocator* allocator_;
- FakeCodecAllocator* codec_allocator_;
-
- MockClient* avda1_;
- MockClient* avda2_;
- MockClient* avda3_;
-};
-
-TEST_F(ContentVideoViewOverlayAllocatorTest, AllocatingAnOwnedSurfaceFails) {
- ASSERT_TRUE(allocator_->AllocateSurface(avda1_));
- ASSERT_FALSE(allocator_->AllocateSurface(avda2_));
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- LaterWaitersReplaceEarlierWaiters) {
- allocator_->AllocateSurface(avda1_);
- allocator_->AllocateSurface(avda2_);
- EXPECT_CALL(*avda2_, OnSurfaceAvailable(false));
- allocator_->AllocateSurface(avda3_);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- WaitersBecomeOwnersWhenSurfacesAreReleased) {
- allocator_->AllocateSurface(avda1_);
- allocator_->AllocateSurface(avda2_);
- EXPECT_CALL(*avda2_, OnSurfaceAvailable(true));
- allocator_->DeallocateSurface(avda1_);
- // The surface should still be owned.
- ASSERT_FALSE(allocator_->AllocateSurface(avda1_));
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- DeallocatingUnownedSurfacesIsSafe) {
- allocator_->DeallocateSurface(avda1_);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- WaitersAreRemovedIfTheyDeallocate) {
- allocator_->AllocateSurface(avda1_);
- allocator_->AllocateSurface(avda2_);
- allocator_->DeallocateSurface(avda2_);
- // |avda2_| should should not receive a notification.
- EXPECT_CALL(*avda2_, OnSurfaceAvailable(_)).Times(0);
- allocator_->DeallocateSurface(avda1_);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest, OwnersAreNotifiedOnDestruction) {
- allocator_->AllocateSurface(avda1_);
- // Owner is notified for a surface it owns.
- EXPECT_CALL(*avda1_, OnSurfaceDestroyed());
- allocator_->OnSurfaceDestroyed(1);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- NonOwnersAreNotNotifiedOnDestruction) {
- allocator_->AllocateSurface(avda1_);
- // Not notified for a surface it doesn't own.
- EXPECT_CALL(*avda1_, OnSurfaceDestroyed()).Times(0);
- allocator_->OnSurfaceDestroyed(123);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest, WaitersAreNotifiedOnDestruction) {
- allocator_->AllocateSurface(avda1_);
- allocator_->AllocateSurface(avda2_);
- EXPECT_CALL(*avda1_, OnSurfaceDestroyed());
- EXPECT_CALL(*avda2_, OnSurfaceAvailable(false));
- allocator_->OnSurfaceDestroyed(1);
-}
-
-TEST_F(ContentVideoViewOverlayAllocatorTest,
- DeallocatingIsSafeDuringSurfaceDestroyed) {
- allocator_->AllocateSurface(avda1_);
- EXPECT_CALL(*avda1_, OnSurfaceDestroyed()).WillOnce(Invoke([=]() {
- allocator_->DeallocateSurface(avda1_);
- }));
- allocator_->OnSurfaceDestroyed(1);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/image_reader_gl_owner.cc b/chromium/media/gpu/android/image_reader_gl_owner.cc
new file mode 100644
index 00000000000..38c607bd4a7
--- /dev/null
+++ b/chromium/media/gpu/android/image_reader_gl_owner.cc
@@ -0,0 +1,347 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/image_reader_gl_owner.h"
+
+#include <android/native_window_jni.h>
+#include <jni.h>
+#include <stdint.h>
+
+#include "base/android/jni_android.h"
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/metrics/histogram_macros.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "ui/gl/gl_fence_android_native_fence_sync.h"
+#include "ui/gl/scoped_binders.h"
+#include "ui/gl/scoped_make_current.h"
+
+namespace media {
+
+// FrameAvailableEvent_ImageReader is a RefCounted wrapper for a WaitableEvent
+// (it's not possible to put one in RefCountedData). This lets us safely signal
+// an event on any thread.
+struct FrameAvailableEvent_ImageReader
+ : public base::RefCountedThreadSafe<FrameAvailableEvent_ImageReader> {
+ FrameAvailableEvent_ImageReader()
+ : event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
+ base::WaitableEvent::InitialState::NOT_SIGNALED) {}
+ void Signal() { event.Signal(); }
+ base::WaitableEvent event;
+
+ // This callback function will be called when there is a new image available
+ // for in the image reader's queue.
+ static void CallbackSignal(void* context, AImageReader* reader) {
+ (reinterpret_cast<FrameAvailableEvent_ImageReader*>(context))->Signal();
+ }
+
+ private:
+ friend class RefCountedThreadSafe<FrameAvailableEvent_ImageReader>;
+
+ ~FrameAvailableEvent_ImageReader() = default;
+};
+
+ImageReaderGLOwner::ImageReaderGLOwner(GLuint texture_id)
+ : current_image_(nullptr),
+ texture_id_(texture_id),
+ loader_(base::android::AndroidImageReader::GetInstance()),
+ context_(gl::GLContext::GetCurrent()),
+ surface_(gl::GLSurface::GetCurrent()),
+ frame_available_event_(new FrameAvailableEvent_ImageReader()) {
+ DCHECK(context_);
+ DCHECK(surface_);
+
+ // Set the width, height and format to some default value. This parameters
+ // are/maybe overriden by the producer sending buffers to this imageReader's
+ // Surface.
+ int32_t width = 1, height = 1, maxImages = 3;
+ AIMAGE_FORMATS format = AIMAGE_FORMAT_YUV_420_888;
+ AImageReader* reader = nullptr;
+
+ // Create a new reader for images of the desired size and format.
+ media_status_t return_code =
+ loader_.AImageReader_new(width, height, format, maxImages, &reader);
+ if (return_code != AMEDIA_OK) {
+ LOG(ERROR) << " Image reader creation failed.";
+ if (return_code == AMEDIA_ERROR_INVALID_PARAMETER)
+ LOG(ERROR) << "Either reader is NULL, or one or more of width, height, "
+ "format, maxImages arguments is not supported";
+ else
+ LOG(ERROR) << "unknown error";
+ return;
+ }
+ DCHECK(reader);
+ image_reader_ = reader;
+
+ // Create a new Image Listner.
+ listener_ = std::make_unique<AImageReader_ImageListener>();
+ listener_->context = reinterpret_cast<void*>(frame_available_event_.get());
+ listener_->onImageAvailable =
+ &FrameAvailableEvent_ImageReader::CallbackSignal;
+
+ // Set the onImageAvailable listener of this image reader.
+ if (loader_.AImageReader_setImageListener(image_reader_, listener_.get()) !=
+ AMEDIA_OK) {
+ LOG(ERROR) << " Failed to register AImageReader listener";
+ return;
+ }
+}
+
+ImageReaderGLOwner::~ImageReaderGLOwner() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(image_reader_);
+
+ // Now we can stop listening to new images.
+ loader_.AImageReader_setImageListener(image_reader_, NULL);
+
+ // Delete the image before closing the associated image reader.
+ if (current_image_)
+ loader_.AImage_delete(current_image_);
+
+ // Delete the image reader.
+ loader_.AImageReader_delete(image_reader_);
+
+ // Delete texture
+ ui::ScopedMakeCurrent scoped_make_current(context_.get(), surface_.get());
+ if (context_->IsCurrent(surface_.get())) {
+ glDeleteTextures(1, &texture_id_);
+ DCHECK_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
+ }
+}
+
+GLuint ImageReaderGLOwner::GetTextureId() const {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return texture_id_;
+}
+
+gl::ScopedJavaSurface ImageReaderGLOwner::CreateJavaSurface() const {
+ // Get the android native window from the image reader.
+ ANativeWindow* window = nullptr;
+ if (loader_.AImageReader_getWindow(image_reader_, &window) != AMEDIA_OK) {
+ LOG(ERROR) << "unable to get a window from image reader.";
+ return gl::ScopedJavaSurface::AcquireExternalSurface(nullptr);
+ }
+
+ // Get the java surface object from the Android native window.
+ JNIEnv* env = base::android::AttachCurrentThread();
+ jobject j_surface = loader_.ANativeWindow_toSurface(env, window);
+ DCHECK(j_surface);
+
+ // Get the scoped java surface that is owned externally.
+ return gl::ScopedJavaSurface::AcquireExternalSurface(j_surface);
+}
+
+void ImageReaderGLOwner::UpdateTexImage() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(image_reader_);
+
+ // Acquire the latest image asynchronously
+ AImage* image = nullptr;
+ int acquireFenceFd = 0;
+ media_status_t return_code = AMEDIA_OK;
+ return_code = loader_.AImageReader_acquireLatestImageAsync(
+ image_reader_, &image, &acquireFenceFd);
+
+ // TODO(http://crbug.com/846050).
+ // Need to add some better error handling if below error occurs. Currently we
+ // just return if error occurs.
+ switch (return_code) {
+ case AMEDIA_ERROR_INVALID_PARAMETER:
+ LOG(ERROR) << " Image is NULL";
+ base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
+ return_code);
+ return;
+ case AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED:
+ LOG(ERROR)
+ << "number of concurrently acquired images has reached the limit";
+ base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
+ return_code);
+ return;
+ case AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE:
+ LOG(ERROR) << "no buffers currently available in the reader queue";
+ base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
+ return_code);
+ return;
+ case AMEDIA_ERROR_UNKNOWN:
+ LOG(ERROR) << "method fails for some other reasons";
+ base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
+ return_code);
+ return;
+ case AMEDIA_OK:
+ // Method call succeeded.
+ break;
+ default:
+ // No other error code should be returned.
+ NOTREACHED();
+ return;
+ }
+
+ // If there is no new image simply return. At this point previous image will
+ // still be bound to the texture.
+ if (!image) {
+ return;
+ }
+
+ // If we have a new Image, delete the previously acquired image (if any).
+ if (current_image_) {
+ // Delete the image synchronously. Create and insert a fence signal.
+ std::unique_ptr<gl::GLFenceAndroidNativeFenceSync> android_native_fence =
+ gl::GLFenceAndroidNativeFenceSync::CreateForGpuFence();
+ if (!android_native_fence) {
+ LOG(ERROR) << "Failed to create android native fence sync object.";
+ return;
+ }
+ std::unique_ptr<gfx::GpuFence> gpu_fence =
+ android_native_fence->GetGpuFence();
+ if (!gpu_fence) {
+ LOG(ERROR) << "Unable to get a gpu fence object.";
+ return;
+ }
+ gfx::GpuFenceHandle fence_handle =
+ gfx::CloneHandleForIPC(gpu_fence->GetGpuFenceHandle());
+ if (fence_handle.is_null()) {
+ LOG(ERROR) << "Gpu fence handle is null";
+ return;
+ }
+ loader_.AImage_deleteAsync(current_image_, fence_handle.native_fd.fd);
+ current_image_ = nullptr;
+ }
+
+ // Make the newly acuired image as current image.
+ current_image_ = image;
+
+ // If acquireFenceFd is -1, we do not need synchronization fence and image is
+ // ready to be used immediately. Else we need to create a sync fence which is
+ // used to signal when the buffer/image is ready to be consumed.
+ if (acquireFenceFd != -1) {
+ // Create a new egl sync object using the acquireFenceFd.
+ EGLint attribs[] = {EGL_SYNC_NATIVE_FENCE_FD_ANDROID, acquireFenceFd,
+ EGL_NONE};
+ std::unique_ptr<gl::GLFenceEGL> egl_fence(
+ gl::GLFenceEGL::Create(EGL_SYNC_NATIVE_FENCE_ANDROID, attribs));
+
+ // Insert the fence sync gl command using the helper class in
+ // gl_fence_egl.h.
+ if (egl_fence == nullptr) {
+ LOG(ERROR) << " Failed to created egl fence object ";
+ return;
+ }
+ DCHECK(egl_fence);
+
+ // Make the server wait and not the client.
+ egl_fence->ServerWait();
+ }
+
+ // Get the hardware buffer from the image.
+ AHardwareBuffer* buffer = nullptr;
+ DCHECK(current_image_);
+ if (loader_.AImage_getHardwareBuffer(current_image_, &buffer) != AMEDIA_OK) {
+ LOG(ERROR) << "hardware buffer is null";
+ return;
+ }
+
+ // Create a egl image from the hardware buffer. Get the image size to create
+ // egl image.
+ int32_t image_height = 0, image_width = 0;
+ if (loader_.AImage_getWidth(current_image_, &image_width) != AMEDIA_OK) {
+ LOG(ERROR) << "image width is null OR image has been deleted";
+ return;
+ }
+ if (loader_.AImage_getHeight(current_image_, &image_height) != AMEDIA_OK) {
+ LOG(ERROR) << "image height is null OR image has been deleted";
+ return;
+ }
+ gfx::Size image_size(image_width, image_height);
+ scoped_refptr<gl::GLImageAHardwareBuffer> egl_image(
+ new gl::GLImageAHardwareBuffer(image_size));
+ if (!egl_image->Initialize(buffer, false)) {
+ LOG(ERROR) << "Failed to create EGL image ";
+ egl_image = nullptr;
+ return;
+ }
+
+ // Now bind this egl image to the texture target GL_TEXTURE_EXTERNAL_OES. Note
+ // that once the egl image is bound, it can be destroyed safely without
+ // affecting the rendering using this texture image.
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id_);
+ egl_image->BindTexImage(GL_TEXTURE_EXTERNAL_OES);
+}
+
+void ImageReaderGLOwner::GetTransformMatrix(float mtx[]) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ // Assign a Y inverted Identity matrix. Both MCVD and AVDA path performs a Y
+ // inversion of this matrix later. Hence if we assign a Y inverted matrix
+ // here, it simply becomes an identity matrix later and will have no effect
+ // on the image data.
+ static constexpr float kYInvertedIdentity[16]{1, 0, 0, 0, 0, -1, 0, 0,
+ 0, 0, 1, 0, 0, 1, 0, 1};
+ memcpy(mtx, kYInvertedIdentity, sizeof(kYInvertedIdentity));
+}
+
+void ImageReaderGLOwner::ReleaseBackBuffers() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ // ReleaseBackBuffers() call is not required with image reader.
+}
+
+gl::GLContext* ImageReaderGLOwner::GetContext() const {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return context_.get();
+}
+
+gl::GLSurface* ImageReaderGLOwner::GetSurface() const {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return surface_.get();
+}
+
+void ImageReaderGLOwner::SetReleaseTimeToNow() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ release_time_ = base::TimeTicks::Now();
+}
+
+void ImageReaderGLOwner::IgnorePendingRelease() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ release_time_ = base::TimeTicks();
+}
+
+bool ImageReaderGLOwner::IsExpectingFrameAvailable() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return !release_time_.is_null();
+}
+
+void ImageReaderGLOwner::WaitForFrameAvailable() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(!release_time_.is_null());
+
+ // 5msec covers >99.9% of cases, so just wait for up to that much before
+ // giving up. If an error occurs, we might not ever get a notification.
+ const base::TimeDelta max_wait = base::TimeDelta::FromMilliseconds(5);
+ const base::TimeTicks call_time = base::TimeTicks::Now();
+ const base::TimeDelta elapsed = call_time - release_time_;
+ const base::TimeDelta remaining = max_wait - elapsed;
+ release_time_ = base::TimeTicks();
+
+ if (remaining <= base::TimeDelta()) {
+ if (!frame_available_event_->event.IsSignaled()) {
+ DVLOG(1) << "Deferred WaitForFrameAvailable() timed out, elapsed: "
+ << elapsed.InMillisecondsF() << "ms";
+ }
+ return;
+ }
+
+ DCHECK_LE(remaining, max_wait);
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.CodecImage.ImageReaderGLOwner.WaitTimeForFrame");
+ if (!frame_available_event_->event.TimedWait(remaining)) {
+ DVLOG(1) << "WaitForFrameAvailable() timed out, elapsed: "
+ << elapsed.InMillisecondsF()
+ << "ms, additionally waited: " << remaining.InMillisecondsF()
+ << "ms, total: " << (elapsed + remaining).InMillisecondsF()
+ << "ms";
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/android/image_reader_gl_owner.h b/chromium/media/gpu/android/image_reader_gl_owner.h
new file mode 100644
index 00000000000..a359ad397db
--- /dev/null
+++ b/chromium/media/gpu/android/image_reader_gl_owner.h
@@ -0,0 +1,74 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
+#define MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
+
+#include <memory>
+
+#include "base/android/android_image_reader_compat.h"
+#include "media/gpu/android/texture_owner.h"
+#include "ui/gl/gl_fence_egl.h"
+#include "ui/gl/gl_image_ahardwarebuffer.h"
+
+namespace media {
+
+struct FrameAvailableEvent_ImageReader;
+
+// This class wraps the AImageReader usage and is used to create a GL texture
+// using the current platform GL context and returns a new ImageReaderGLOwner
+// attached to it. The surface handle of the AImageReader is attached to
+// decoded media frames. Media frames can update the attached surface handle
+// with image data and this class helps to create an eglImage using that image
+// data present in the surface.
+class MEDIA_GPU_EXPORT ImageReaderGLOwner : public TextureOwner {
+ public:
+ GLuint GetTextureId() const override;
+ gl::GLContext* GetContext() const override;
+ gl::GLSurface* GetSurface() const override;
+ gl::ScopedJavaSurface CreateJavaSurface() const override;
+ void UpdateTexImage() override;
+ void GetTransformMatrix(float mtx[16]) override;
+ void ReleaseBackBuffers() override;
+ void SetReleaseTimeToNow() override;
+ void IgnorePendingRelease() override;
+ bool IsExpectingFrameAvailable() override;
+ void WaitForFrameAvailable() override;
+
+ private:
+ friend class TextureOwner;
+
+ ImageReaderGLOwner(GLuint texture_id);
+ ~ImageReaderGLOwner() override;
+
+ // AImageReader instance
+ AImageReader* image_reader_;
+
+ // Most recently acquired image using image reader. This works like a cached
+ // image until next new image is acquired which overwrites this.
+ AImage* current_image_;
+ GLuint texture_id_;
+ std::unique_ptr<AImageReader_ImageListener> listener_;
+
+ // reference to the class instance which is used to dynamically
+ // load the functions in android libraries at runtime.
+ base::android::AndroidImageReader& loader_;
+
+ // The context and surface that were used to create |texture_id_|.
+ scoped_refptr<gl::GLContext> context_;
+ scoped_refptr<gl::GLSurface> surface_;
+
+ // When SetReleaseTimeToNow() was last called. i.e., when the last
+ // codec buffer was released to this surface. Or null if
+ // IgnorePendingRelease() or WaitForFrameAvailable() have been called since.
+ base::TimeTicks release_time_;
+ scoped_refptr<FrameAvailableEvent_ImageReader> frame_available_event_;
+
+ THREAD_CHECKER(thread_checker_);
+ DISALLOW_COPY_AND_ASSIGN(ImageReaderGLOwner);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
diff --git a/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc b/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc
new file mode 100644
index 00000000000..b03177d8d95
--- /dev/null
+++ b/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc
@@ -0,0 +1,106 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/texture_owner.h"
+
+#include <stdint.h>
+#include <memory>
+
+#include "base/message_loop/message_loop.h"
+#include "base/test/scoped_feature_list.h"
+#include "media/base/media_switches.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gl/gl_bindings.h"
+#include "ui/gl/gl_context_egl.h"
+#include "ui/gl/gl_surface_egl.h"
+#include "ui/gl/init/gl_factory.h"
+
+namespace media {
+
+class ImageReaderGLOwnerTest : public testing::Test {
+ public:
+ ImageReaderGLOwnerTest() {}
+ ~ImageReaderGLOwnerTest() override {}
+
+ protected:
+ void SetUp() override {
+ scoped_feature_list_.InitAndEnableFeature(media::kAImageReaderVideoOutput);
+ gl::init::InitializeGLOneOffImplementation(gl::kGLImplementationEGLGLES2,
+ false, false, false, true);
+ surface_ = new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240));
+ surface_->Initialize();
+
+ share_group_ = new gl::GLShareGroup();
+ context_ = new gl::GLContextEGL(share_group_.get());
+ context_->Initialize(surface_.get(), gl::GLContextAttribs());
+ ASSERT_TRUE(context_->MakeCurrent(surface_.get()));
+
+ image_reader_ = TextureOwner::Create();
+ }
+
+ void TearDown() override {
+ image_reader_ = nullptr;
+ context_ = nullptr;
+ share_group_ = nullptr;
+ surface_ = nullptr;
+ gl::init::ShutdownGL(false);
+ }
+
+ base::test::ScopedFeatureList scoped_feature_list_;
+ scoped_refptr<TextureOwner> image_reader_;
+ GLuint texture_id_ = 0;
+
+ scoped_refptr<gl::GLContext> context_;
+ scoped_refptr<gl::GLShareGroup> share_group_;
+ scoped_refptr<gl::GLSurface> surface_;
+ base::MessageLoop message_loop_;
+};
+
+TEST_F(ImageReaderGLOwnerTest, ImageReaderObjectCreation) {
+ ASSERT_TRUE(image_reader_);
+}
+
+TEST_F(ImageReaderGLOwnerTest, ScopedJavaSurfaceCreation) {
+ gl::ScopedJavaSurface temp = image_reader_->CreateJavaSurface();
+ ASSERT_TRUE(temp.IsValid());
+}
+
+// Verify that ImageReaderGLOwner creates a bindable GL texture, and deletes
+// it during destruction.
+TEST_F(ImageReaderGLOwnerTest, GLTextureIsCreatedAndDestroyed) {
+ // |texture_id| should not work anymore after we delete image_reader_.
+ image_reader_ = nullptr;
+ ASSERT_FALSE(glIsTexture(texture_id_));
+}
+
+// Make sure that image_reader_ remembers the correct context and surface.
+TEST_F(ImageReaderGLOwnerTest, ContextAndSurfaceAreCaptured) {
+ ASSERT_EQ(context_, image_reader_->GetContext());
+ ASSERT_EQ(surface_, image_reader_->GetSurface());
+}
+
+// Verify that destruction works even if some other context is current.
+TEST_F(ImageReaderGLOwnerTest, DestructionWorksWithWrongContext) {
+ scoped_refptr<gl::GLSurface> new_surface(
+ new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240)));
+ new_surface->Initialize();
+
+ scoped_refptr<gl::GLShareGroup> new_share_group(new gl::GLShareGroup());
+ scoped_refptr<gl::GLContext> new_context(
+ new gl::GLContextEGL(new_share_group.get()));
+ new_context->Initialize(new_surface.get(), gl::GLContextAttribs());
+ ASSERT_TRUE(new_context->MakeCurrent(new_surface.get()));
+
+ image_reader_ = nullptr;
+ ASSERT_FALSE(glIsTexture(texture_id_));
+
+ // |new_context| should still be current.
+ ASSERT_TRUE(new_context->IsCurrent(new_surface.get()));
+
+ new_context = nullptr;
+ new_share_group = nullptr;
+ new_surface = nullptr;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index f7e45e6ef8c..04de2ca939e 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -481,6 +481,10 @@ void MediaCodecVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void MediaCodecVideoDecoder::FlushCodec() {
DVLOG(2) << __func__;
+
+ // If a deferred flush was pending, then it isn't anymore.
+ deferred_flush_pending_ = false;
+
if (!codec_ || codec_->IsFlushed())
return;
@@ -559,7 +563,10 @@ bool MediaCodecVideoDecoder::QueueInput() {
// If the codec is drained, flush it when there is a pending decode and no
// unreleased output buffers. This lets us avoid both unbacking frames when we
// flush, and flushing unnecessarily, like at EOS.
- if (codec_->IsDrained()) {
+ //
+ // Often, we'll elide the eos to drain the codec, but we want to pretend that
+ // we did. In this case, we should also flush.
+ if (codec_->IsDrained() || deferred_flush_pending_) {
if (!codec_->HasUnreleasedOutputBuffers() && !pending_decodes_.empty()) {
FlushCodec();
return true;
@@ -659,8 +666,9 @@ bool MediaCodecVideoDecoder::DequeueOutput() {
}
// If we're draining for reset or destroy we can discard |output_buffer|
- // without rendering it.
- if (drain_type_)
+ // without rendering it. This is also true if we elided the drain itself,
+ // and deferred a flush that would have happened when the drain completed.
+ if (drain_type_ || deferred_flush_pending_)
return true;
// Record the frame type that we're sending and some information about why.
@@ -721,6 +729,16 @@ void MediaCodecVideoDecoder::StartDrainingCodec(DrainType drain_type) {
// the codec isn't already draining.
drain_type_ = drain_type;
+ // We can safely invalidate outstanding buffers for both types of drain, and
+ // doing so can only make the drain complete quicker. Note that we do this
+ // even if we're eliding the drain, since we're either going to flush the
+ // codec or destroy it. While we're not required to do this, it might affect
+ // stability if we don't (https://crbug.com/869365). AVDA, in particular,
+ // dropped all pending codec output buffers when starting a reset (seek) or
+ // a destroy.
+ if (codec_)
+ codec_->DiscardOutputBuffers();
+
// Skip the drain if possible. Only VP8 codecs need draining because
// they can hang in release() or flush() otherwise
// (http://crbug.com/598963).
@@ -728,6 +746,11 @@ void MediaCodecVideoDecoder::StartDrainingCodec(DrainType drain_type) {
// instead. Draining is responsible for a lot of complexity.
if (decoder_config_.codec() != kCodecVP8 || !codec_ || codec_->IsFlushed() ||
codec_->IsDrained()) {
+ // If the codec isn't already drained or flushed, then we have to remember
+ // that we owe it a flush. We also have to remember not to deliver any
+ // output buffers that might still be in progress in the codec.
+ deferred_flush_pending_ =
+ codec_ && !codec_->IsDrained() && !codec_->IsFlushed();
OnCodecDrained();
return;
}
@@ -736,9 +759,6 @@ void MediaCodecVideoDecoder::StartDrainingCodec(DrainType drain_type) {
if (!codec_->IsDraining())
pending_decodes_.push_back(PendingDecode::CreateEos());
- // We can safely invalidate outstanding buffers for both types of drain, and
- // doing so can only make the drain complete quicker.
- codec_->DiscardOutputBuffers();
PumpCodec(true);
}
@@ -754,7 +774,14 @@ void MediaCodecVideoDecoder::OnCodecDrained() {
}
std::move(reset_cb_).Run();
- FlushCodec();
+
+ // Flush the codec unless (a) it's already flushed, (b) it's drained and the
+ // flush will be handled automatically on the next decode, or (c) we've
+ // elided the eos and want to defer the flush.
+ if (codec_ && !codec_->IsFlushed() && !codec_->IsDrained() &&
+ !deferred_flush_pending_) {
+ FlushCodec();
+ }
}
void MediaCodecVideoDecoder::EnterTerminalState(State state) {
@@ -797,7 +824,6 @@ void MediaCodecVideoDecoder::ReleaseCodec() {
}
AndroidOverlayFactoryCB MediaCodecVideoDecoder::CreateOverlayFactoryCb() {
- DCHECK(!overlay_info_.HasValidSurfaceId());
if (!overlay_factory_cb_ || !overlay_info_.HasValidRoutingToken())
return AndroidOverlayFactoryCB();
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index 1bdaab20417..4ca9eebd488 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -9,7 +9,7 @@
#include "base/optional.h"
#include "base/threading/thread_checker.h"
#include "base/timer/elapsed_timer.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/base/overlay_info.h"
#include "media/base/video_decoder.h"
@@ -281,6 +281,12 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder
// Do we need a hw-secure codec?
bool requires_secure_codec_ = false;
+ // Should we flush the codec on the next decode, and pretend that it is
+ // drained currently? Note that we'll automatically flush if the codec is
+ // drained; this flag indicates that we also elided the drain, so the codec is
+ // in some random state, possibly with output buffers pending.
+ bool deferred_flush_pending_ = false;
+
// Optional crypto object from the Cdm.
base::android::ScopedJavaGlobalRef<jobject> media_crypto_;
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index a19ca8163f7..d2ce2509e0d 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -11,7 +11,7 @@
#include "base/test/mock_callback.h"
#include "base/test/scoped_task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android/media_codec_util.h"
#include "media/base/android/mock_android_overlay.h"
#include "media/base/android/mock_media_crypto_context.h"
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.cc b/chromium/media/gpu/android/surface_texture_gl_owner.cc
index ef1de92ed72..483934c3af5 100644
--- a/chromium/media/gpu/android/surface_texture_gl_owner.cc
+++ b/chromium/media/gpu/android/surface_texture_gl_owner.cc
@@ -30,24 +30,6 @@ struct FrameAvailableEvent
~FrameAvailableEvent() = default;
};
-scoped_refptr<TextureOwner> SurfaceTextureGLOwner::Create() {
- GLuint texture_id;
- glGenTextures(1, &texture_id);
- if (!texture_id)
- return nullptr;
-
- // Set the parameters on the texture.
- gl::ScopedActiveTexture active_texture(GL_TEXTURE0);
- gl::ScopedTextureBinder texture_binder(GL_TEXTURE_EXTERNAL_OES, texture_id);
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- DCHECK_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
-
- return new SurfaceTextureGLOwner(texture_id);
-}
-
SurfaceTextureGLOwner::SurfaceTextureGLOwner(GLuint texture_id)
: surface_texture_(gl::SurfaceTexture::Create(texture_id)),
texture_id_(texture_id),
@@ -74,7 +56,7 @@ SurfaceTextureGLOwner::~SurfaceTextureGLOwner() {
if (!context_->IsCurrent(nullptr)) {
scoped_make_current =
std::make_unique<ui::ScopedMakeCurrent>(context_.get(), surface_.get());
- if (!scoped_make_current->Succeeded())
+ if (!context_->IsCurrent(surface_.get()))
return;
}
@@ -152,7 +134,8 @@ void SurfaceTextureGLOwner::WaitForFrameAvailable() {
}
DCHECK_LE(remaining, max_wait);
- SCOPED_UMA_HISTOGRAM_TIMER("Media.AvdaCodecImage.WaitTimeForFrame");
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.CodecImage.SurfaceTextureGLOwner.WaitTimeForFrame");
if (!frame_available_event_->event.TimedWait(remaining)) {
DVLOG(1) << "WaitForFrameAvailable() timed out, elapsed: "
<< elapsed.InMillisecondsF()
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.h b/chromium/media/gpu/android/surface_texture_gl_owner.h
index 5277ef90461..1cc7d2c7ca6 100644
--- a/chromium/media/gpu/android/surface_texture_gl_owner.h
+++ b/chromium/media/gpu/android/surface_texture_gl_owner.h
@@ -16,12 +16,14 @@ namespace media {
struct FrameAvailableEvent;
+// This class wraps the Surface Texture usage. It is used to create a surface
+// texture attached to a new texture of the current platform GL context. The
+// surface handle of the SurfaceTexture is attached to the decoded media
+// frames. Media frames can update the attached surface handle with image data.
+// This class helps to update the attached texture using that image data
+// present in the surface.
class MEDIA_GPU_EXPORT SurfaceTextureGLOwner : public TextureOwner {
public:
- // Creates a GL texture using the current platform GL context and returns a
- // new SurfaceTextureGLOwner attached to it. Returns null on failure.
- static scoped_refptr<TextureOwner> Create();
-
GLuint GetTextureId() const override;
gl::GLContext* GetContext() const override;
gl::GLSurface* GetSurface() const override;
@@ -35,16 +37,16 @@ class MEDIA_GPU_EXPORT SurfaceTextureGLOwner : public TextureOwner {
void WaitForFrameAvailable() override;
private:
+ friend class TextureOwner;
+
SurfaceTextureGLOwner(GLuint texture_id);
~SurfaceTextureGLOwner() override;
scoped_refptr<gl::SurfaceTexture> surface_texture_;
GLuint texture_id_;
-
// The context and surface that were used to create |texture_id_|.
scoped_refptr<gl::GLContext> context_;
scoped_refptr<gl::GLSurface> surface_;
-
// When SetReleaseTimeToNow() was last called. i.e., when the last
// codec buffer was released to this surface. Or null if
// IgnorePendingRelease() or WaitForFrameAvailable() have been called since.
@@ -52,7 +54,6 @@ class MEDIA_GPU_EXPORT SurfaceTextureGLOwner : public TextureOwner {
scoped_refptr<FrameAvailableEvent> frame_available_event_;
THREAD_CHECKER(thread_checker_);
-
DISALLOW_COPY_AND_ASSIGN(SurfaceTextureGLOwner);
};
diff --git a/chromium/media/gpu/android/texture_owner.cc b/chromium/media/gpu/android/texture_owner.cc
index 8b3801f1838..28d0b2c737f 100644
--- a/chromium/media/gpu/android/texture_owner.cc
+++ b/chromium/media/gpu/android/texture_owner.cc
@@ -4,7 +4,13 @@
#include "media/gpu/android/texture_owner.h"
+#include "base/android/android_image_reader_compat.h"
+#include "base/feature_list.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/media_switches.h"
+#include "media/gpu/android/image_reader_gl_owner.h"
+#include "media/gpu/android/surface_texture_gl_owner.h"
+#include "ui/gl/scoped_binders.h"
namespace media {
@@ -15,4 +21,29 @@ TextureOwner::TextureOwner()
TextureOwner::~TextureOwner() = default;
+scoped_refptr<TextureOwner> TextureOwner::Create() {
+ GLuint texture_id;
+ glGenTextures(1, &texture_id);
+ if (!texture_id)
+ return nullptr;
+
+ // Set the parameters on the texture.
+ gl::ScopedActiveTexture active_texture(GL_TEXTURE0);
+ gl::ScopedTextureBinder texture_binder(GL_TEXTURE_EXTERNAL_OES, texture_id);
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ DCHECK_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
+
+ // If AImageReader is supported and is enabled by media flag, use it.
+ if (base::FeatureList::IsEnabled(media::kAImageReaderVideoOutput) &&
+ base::android::AndroidImageReader::GetInstance().IsSupported()) {
+ return new ImageReaderGLOwner(texture_id);
+ }
+
+ // If not, fall back to legacy path.
+ return new SurfaceTextureGLOwner(texture_id);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/android/texture_owner.h b/chromium/media/gpu/android/texture_owner.h
index 8660d783752..fe1584cd682 100644
--- a/chromium/media/gpu/android/texture_owner.h
+++ b/chromium/media/gpu/android/texture_owner.h
@@ -25,6 +25,10 @@ namespace media {
class MEDIA_GPU_EXPORT TextureOwner
: public base::RefCountedDeleteOnSequence<TextureOwner> {
public:
+ // Creates a GL texture using the current platform GL context and returns a
+ // new TextureOwner attached to it. Returns null on failure.
+ static scoped_refptr<TextureOwner> Create();
+
TextureOwner();
scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
@@ -41,6 +45,7 @@ class MEDIA_GPU_EXPORT TextureOwner
// Update the texture image using the latest available image data.
virtual void UpdateTexImage() = 0;
+
// Transformation matrix if any associated with the texture image.
virtual void GetTransformMatrix(float mtx[16]) = 0;
virtual void ReleaseBackBuffers() = 0;
diff --git a/chromium/media/gpu/android/texture_pool.cc b/chromium/media/gpu/android/texture_pool.cc
index efdb4552824..da257c93ee4 100644
--- a/chromium/media/gpu/android/texture_pool.cc
+++ b/chromium/media/gpu/android/texture_pool.cc
@@ -4,12 +4,14 @@
#include "media/gpu/android/texture_pool.h"
+#include "gpu/command_buffer/service/abstract_texture.h"
#include "gpu/command_buffer/service/texture_manager.h"
-#include "media/gpu/android/texture_wrapper.h"
#include "media/gpu/command_buffer_helper.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/scoped_make_current.h"
+using gpu::gles2::AbstractTexture;
+
namespace media {
TexturePool::TexturePool(scoped_refptr<CommandBufferHelper> helper)
@@ -21,36 +23,21 @@ TexturePool::TexturePool(scoped_refptr<CommandBufferHelper> helper)
}
TexturePool::~TexturePool() {
- // Note that the size of |pool_| doesn't, in general, tell us if there are any
- // textures. If the stub has been destroyed, then we will drop the
- // TextureRefs but leave null entries in the map. So, we check |stub_| too.
- if (pool_.size() && helper_) {
- // TODO(liberato): consider using ScopedMakeCurrent here, though if we are
- // ever called as part of decoder teardown, then using ScopedMakeCurrent
- // isn't safe. For now, we preserve the old behavior (MakeCurrent).
- //
- // We check IsContextCurrent, even though that only checks for the
- // underlying shared context if |context| is a virtual context. Assuming
- // that all TextureRef does is to delete a texture, this is enough. Of
- // course, we shouldn't assume that this is all it does.
- bool have_context =
- helper_->IsContextCurrent() || helper_->MakeContextCurrent();
- DestroyAllPlatformTextures(have_context);
- }
+ // We'll drop all textures from the pool, if any. It's okay if we don't have
+ // a current context, since AbstractTexture handles it.
}
-void TexturePool::AddTexture(std::unique_ptr<TextureWrapper> texture) {
+void TexturePool::AddTexture(std::unique_ptr<AbstractTexture> texture) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(pool_.find(texture.get()) == pool_.end());
// Don't permit additions after we've lost the stub.
// TODO(liberato): consider making this fail gracefully. However, nobody
// should be doing this, so for now it's a DCHECK.
DCHECK(helper_);
- TextureWrapper* texture_raw = texture.get();
- pool_[texture_raw] = std::move(texture);
+ pool_.insert(std::move(texture));
}
-void TexturePool::ReleaseTexture(TextureWrapper* texture,
+void TexturePool::ReleaseTexture(AbstractTexture* texture,
const gpu::SyncToken& sync_token) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -70,49 +57,28 @@ void TexturePool::ReleaseTexture(TextureWrapper* texture,
scoped_refptr<TexturePool>(this), texture));
}
-void TexturePool::OnSyncTokenReleased(TextureWrapper* texture) {
+void TexturePool::OnSyncTokenReleased(AbstractTexture* texture) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
auto iter = pool_.find(texture);
DCHECK(iter != pool_.end());
- // If we can't make the context current, then notify the texture. Note that
- // the wrapper might already have been destroyed, which is fine. We elide
- // the MakeContextCurrent if our underlying physical context is current, which
- // only works if we don't do much besides delete the texture.
- bool have_context =
- helper_ && (helper_->IsContextCurrent() || helper_->MakeContextCurrent());
- if (iter->second && !have_context)
- texture->ForceContextLost();
+ // Drop the texture. This is safe without the context being current. It's
+ // also safe if the stub has been destroyed.
pool_.erase(iter);
}
-void TexturePool::DestroyAllPlatformTextures(bool have_context) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Destroy the wrapper, but keep the entry around in the map. We do this so
- // that ReleaseTexture can still check that at least the texture was, at some
- // point, in the map. Hopefully, since nobody should be adding textures to
- // the pool after we've lost the stub, there's no issue with aliasing if the
- // ptr is re-used; it won't be given to us, so it's okay.
- for (auto& it : pool_) {
- std::unique_ptr<TextureWrapper> texture = std::move(it.second);
- if (!texture)
- continue;
-
- // If we can't make the context current, then notify all the textures that
- // they can't delete the underlying platform textures.
- if (!have_context)
- texture->ForceContextLost();
-
- // |texture| will be destroyed.
- }
-}
-
void TexturePool::OnWillDestroyStub(bool have_context) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(helper_);
- DestroyAllPlatformTextures(have_context);
+ // TODO(liberato): Should we drop all unrendered codec buffers here? It seems
+ // like a good idea, just to release the resources. However, they won't block
+ // decoding, since decoding requires the stub too. More generally, it might
+ // be worthwhile to have a callback on AbstractTexture that's called when it
+ // transitions to not owning a texture.
+
+ // Note that we don't have to do anything with |pool_|, since AbstractTextures
+ // can outlive the stub that created them. They just don't have a texture.
helper_ = nullptr;
}
diff --git a/chromium/media/gpu/android/texture_pool.h b/chromium/media/gpu/android/texture_pool.h
index 8db87c3e159..0b9651a4400 100644
--- a/chromium/media/gpu/android/texture_pool.h
+++ b/chromium/media/gpu/android/texture_pool.h
@@ -5,8 +5,8 @@
#ifndef MEDIA_GPU_ANDROID_TEXTURE_POOL_H_
#define MEDIA_GPU_ANDROID_TEXTURE_POOL_H_
-#include <map>
-
+#include "base/containers/flat_set.h"
+#include "base/containers/unique_ptr_adapters.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
@@ -14,10 +14,15 @@
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/gpu/media_gpu_export.h"
+namespace gpu {
+namespace gles2 {
+class AbstractTexture;
+} // namespace gles2
+} // namespace gpu
+
namespace media {
class CommandBufferHelper;
-class TextureWrapper;
// Owns Textures that are used to hold decoded video frames. Allows them to
// outlive the decoder that created them, since decoders are torn down when the
@@ -35,39 +40,34 @@ class MEDIA_GPU_EXPORT TexturePool : public base::RefCounted<TexturePool> {
// Note that if we were really a pool this would mean "add |texture| into the
// pool of available textures". There would be some other call to allocate
// a texture from the pool.
- void AddTexture(std::unique_ptr<TextureWrapper> texture);
+ void AddTexture(std::unique_ptr<gpu::gles2::AbstractTexture> texture);
// Release a texture back into the pool. |texture| must have been added to
// the pool previously, and not released. Otherwise, this is undefined.
// Note: since we don't actually pool things, this just forgets |texture|.
// It's okay if this is called after we've lost |stub_|. If |sync_token| is
// not null, then we'll wait for that token before taking any action.
- void ReleaseTexture(TextureWrapper* texture,
+ void ReleaseTexture(gpu::gles2::AbstractTexture* texture,
const gpu::SyncToken& sync_token);
protected:
virtual ~TexturePool();
// Called after a sync token has been released, to free |texture|.
- void OnSyncTokenReleased(TextureWrapper* texture);
+ void OnSyncTokenReleased(gpu::gles2::AbstractTexture* texture);
// Called when |stub_| notifies us that the underlying stub will be destroyed.
void OnWillDestroyStub(bool have_context);
- // When called, we will destroy any platform textures if we have a context,
- // or mark them as "lost context" if we don't. This will not actually remove
- // entries in |pool_|, but will instead clear the unique_ptr to delete the
- // texture. Assuming that nobody adds textures after our stub is destroyed,
- // this is still alias-free.
- void DestroyAllPlatformTextures(bool have_context);
-
private:
friend class base::RefCounted<TexturePool>;
THREAD_CHECKER(thread_checker_);
scoped_refptr<CommandBufferHelper> helper_;
- std::map<TextureWrapper*, std::unique_ptr<TextureWrapper>> pool_;
+ base::flat_set<std::unique_ptr<gpu::gles2::AbstractTexture>,
+ base::UniquePtrComparator>
+ pool_;
base::WeakPtrFactory<TexturePool> weak_factory_;
};
diff --git a/chromium/media/gpu/android/texture_pool_unittest.cc b/chromium/media/gpu/android/texture_pool_unittest.cc
index d0d20d35e72..762b6e99c0b 100644
--- a/chromium/media/gpu/android/texture_pool_unittest.cc
+++ b/chromium/media/gpu/android/texture_pool_unittest.cc
@@ -12,27 +12,38 @@
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/command_buffer/common/command_buffer_id.h"
#include "gpu/command_buffer/common/constants.h"
+#include "gpu/command_buffer/service/abstract_texture.h"
#include "gpu/command_buffer/service/sequence_id.h"
#include "gpu/ipc/common/gpu_messages.h"
-#include "media/gpu/android/texture_wrapper.h"
#include "media/gpu/fake_command_buffer_helper.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
+using gpu::gles2::AbstractTexture;
using testing::_;
using testing::NiceMock;
using testing::Return;
// SupportsWeakPtr so it's easy to tell when it has been destroyed.
-class MockTextureWrapper : public NiceMock<TextureWrapper>,
- public base::SupportsWeakPtr<MockTextureWrapper> {
+class MockAbstractTexture : public NiceMock<AbstractTexture>,
+ public base::SupportsWeakPtr<MockAbstractTexture> {
public:
- MockTextureWrapper() {}
- ~MockTextureWrapper() override {}
+ MockAbstractTexture() {}
+ ~MockAbstractTexture() override {}
MOCK_METHOD0(ForceContextLost, void());
+ MOCK_CONST_METHOD0(GetTextureBase, gpu::TextureBase*());
+ MOCK_METHOD2(SetParameteri, void(GLenum pname, GLint param));
+ MOCK_METHOD2(BindStreamTextureImage,
+ void(gpu::gles2::GLStreamTextureImage* image,
+ GLuint service_id));
+ MOCK_METHOD2(BindImage, void(gl::GLImage* image, bool client_managed));
+ MOCK_METHOD0(ReleaseImage, void());
+ MOCK_CONST_METHOD0(GetImage, gl::GLImage*());
+ MOCK_METHOD0(SetCleared, void());
+ MOCK_METHOD1(SetCleanupCallback, void(CleanupCallback));
};
class TexturePoolTest : public testing::Test {
@@ -52,11 +63,11 @@ class TexturePoolTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
- using WeakTexture = base::WeakPtr<MockTextureWrapper>;
+ using WeakTexture = base::WeakPtr<MockAbstractTexture>;
WeakTexture CreateAndAddTexture() {
- std::unique_ptr<MockTextureWrapper> texture =
- std::make_unique<MockTextureWrapper>();
+ std::unique_ptr<MockAbstractTexture> texture =
+ std::make_unique<MockAbstractTexture>();
WeakTexture texture_weak = texture->AsWeakPtr();
texture_pool_->AddTexture(std::move(texture));
@@ -77,8 +88,6 @@ class TexturePoolTest : public testing::Test {
TEST_F(TexturePoolTest, AddAndReleaseTexturesWithContext) {
// Test that adding then deleting a texture destroys it.
WeakTexture texture = CreateAndAddTexture();
- // The texture should not be notified that the context was lost.
- EXPECT_CALL(*texture.get(), ForceContextLost()).Times(0);
texture_pool_->ReleaseTexture(texture.get(), sync_token_);
// The texture should still exist until the sync token is cleared.
@@ -92,11 +101,10 @@ TEST_F(TexturePoolTest, AddAndReleaseTexturesWithContext) {
}
TEST_F(TexturePoolTest, AddAndReleaseTexturesWithoutContext) {
- // Test that adding then deleting a texture destroys it, and marks that the
- // context is lost, if the context can't be made current.
+ // Test that adding then deleting a texture destroys it, even if the context
+ // was lost.
WeakTexture texture = CreateAndAddTexture();
helper_->ContextLost();
- EXPECT_CALL(*texture, ForceContextLost()).Times(1);
texture_pool_->ReleaseTexture(texture.get(), sync_token_);
ASSERT_TRUE(texture);
@@ -105,53 +113,6 @@ TEST_F(TexturePoolTest, AddAndReleaseTexturesWithoutContext) {
ASSERT_FALSE(texture);
}
-TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithContext) {
- // Add multiple textures, and test that they're all destroyed when the stub
- // says that it's destroyed.
- std::vector<TextureWrapper*> raw_textures;
- std::vector<WeakTexture> textures;
-
- for (int i = 0; i < 3; i++) {
- textures.push_back(CreateAndAddTexture());
- raw_textures.push_back(textures.back().get());
- // The context should not be lost.
- EXPECT_CALL(*textures.back(), ForceContextLost()).Times(0);
- }
-
- helper_->StubLost();
-
- // TextureWrappers should be destroyed.
- for (auto& texture : textures)
- ASSERT_FALSE(texture);
-
- // It should be okay to release the textures after they're destroyed, and
- // nothing should crash.
- for (auto* raw_texture : raw_textures)
- texture_pool_->ReleaseTexture(raw_texture, sync_token_);
-}
-
-TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithoutContext) {
- std::vector<TextureWrapper*> raw_textures;
- std::vector<WeakTexture> textures;
-
- for (int i = 0; i < 3; i++) {
- textures.push_back(CreateAndAddTexture());
- raw_textures.push_back(textures.back().get());
- EXPECT_CALL(*textures.back(), ForceContextLost()).Times(1);
- }
-
- helper_->ContextLost();
- helper_->StubLost();
-
- for (auto& texture : textures)
- ASSERT_FALSE(texture);
-
- // It should be okay to release the textures after they're destroyed, and
- // nothing should crash.
- for (auto* raw_texture : raw_textures)
- texture_pool_->ReleaseTexture(raw_texture, sync_token_);
-}
-
TEST_F(TexturePoolTest, NonEmptyPoolAfterStubDestructionDoesntCrash) {
// Make sure that we can delete the stub, and verify that pool teardown still
// works (doesn't crash) even though the pool is not empty.
diff --git a/chromium/media/gpu/android/texture_wrapper.cc b/chromium/media/gpu/android/texture_wrapper.cc
deleted file mode 100644
index cb7ce99fcf3..00000000000
--- a/chromium/media/gpu/android/texture_wrapper.cc
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/texture_wrapper.h"
-
-#include "gpu/command_buffer/service/texture_manager.h"
-
-namespace media {
-
-TextureWrapperImpl::TextureWrapperImpl(
- scoped_refptr<gpu::gles2::TextureRef> texture_ref)
- : texture_ref_(std::move(texture_ref)) {}
-
-TextureWrapperImpl::~TextureWrapperImpl() {}
-
-void TextureWrapperImpl::ForceContextLost() {
- if (texture_ref_)
- texture_ref_->ForceContextLost();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/texture_wrapper.h b/chromium/media/gpu/android/texture_wrapper.h
deleted file mode 100644
index eeb77f0578b..00000000000
--- a/chromium/media/gpu/android/texture_wrapper.h
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_TEXTURE_WRAPPER_H_
-#define MEDIA_GPU_ANDROID_TEXTURE_WRAPPER_H_
-
-#include "base/memory/scoped_refptr.h"
-
-namespace gpu {
-namespace gles2 {
-class TextureRef;
-} // namespace gles2
-} // namespace gpu
-
-namespace media {
-
-// Temporary class to allow mocking a TextureRef, which has no virtual methods.
-// It is expected that this will be replaced by gpu::gles2::AbstractTexture in
-// the near future, will will support mocks directly.
-class TextureWrapper {
- public:
- virtual ~TextureWrapper() = default;
- virtual void ForceContextLost() = 0;
-};
-
-// Since these are temporary classes, the impl might as well go in the same
-// file for easier cleanup later.
-class TextureWrapperImpl : public TextureWrapper {
- public:
- TextureWrapperImpl(scoped_refptr<gpu::gles2::TextureRef> texture_ref);
- ~TextureWrapperImpl() override;
-
- // TextureWrapper
- void ForceContextLost() override;
-
- private:
- scoped_refptr<gpu::gles2::TextureRef> texture_ref_;
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_TEXTURE_WRAPPER_H_
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index e83f0d65ebd..698d74307d5 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -22,7 +22,6 @@
#include "media/gpu/android/codec_image_group.h"
#include "media/gpu/android/codec_wrapper.h"
#include "media/gpu/android/texture_pool.h"
-#include "media/gpu/android/texture_wrapper.h"
#include "media/gpu/command_buffer_helper.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "ui/gl/android/surface_texture.h"
@@ -37,6 +36,8 @@ bool MakeContextCurrent(gpu::CommandBufferStub* stub) {
} // namespace
+using gpu::gles2::AbstractTexture;
+
VideoFrameFactoryImpl::VideoFrameFactoryImpl(
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
GetStubCb get_stub_cb)
@@ -139,7 +140,7 @@ scoped_refptr<TextureOwner> GpuVideoFrameFactory::Initialize(
texture_pool_ = new TexturePool(CommandBufferHelper::Create(stub_));
decoder_helper_ = GLES2DecoderHelper::Create(stub_->decoder_context());
- return SurfaceTextureGLOwner::Create();
+ return TextureOwner::Create();
}
void GpuVideoFrameFactory::CreateVideoFrame(
@@ -152,28 +153,44 @@ void GpuVideoFrameFactory::CreateVideoFrame(
scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
scoped_refptr<VideoFrame> frame;
- scoped_refptr<gpu::gles2::TextureRef> texture_ref;
+ std::unique_ptr<AbstractTexture> texture;
+ CodecImage* codec_image = nullptr;
CreateVideoFrameInternal(std::move(output_buffer), std::move(texture_owner_),
timestamp, natural_size,
- std::move(promotion_hint_cb), &frame, &texture_ref);
- if (!frame || !texture_ref)
+ std::move(promotion_hint_cb), &frame, &texture,
+ &codec_image);
+ if (!frame || !texture)
return;
// Try to render this frame if possible.
internal::MaybeRenderEarly(&images_);
- std::unique_ptr<TextureWrapper> texture_wrapper =
- std::make_unique<TextureWrapperImpl>(std::move(texture_ref));
+ // Callback to notify us when |texture| is going to drop its ref to the
+ // underlying texture. This happens when we (a) are notified that |frame|
+ // has been released by the renderer and the sync token has cleared, or (b)
+ // when the stub is destroyed. In the former case, we want to release any
+ // codec resources as quickly as possible so that we can re-use them. In
+ // the latter case, decoding has stopped and we want to release any buffers
+ // so that the MediaCodec instance can clean up. Note that the texture will
+ // remain renderable, but it won't necessarily refer to the frame it was
+ // supposed to; it'll be the most recently rendered frame.
+ auto cleanup_cb = base::BindOnce([](AbstractTexture* texture) {
+ gl::GLImage* image = texture->GetImage();
+ if (image)
+ static_cast<CodecImage*>(image)->ReleaseCodecBuffer();
+ });
+ texture->SetCleanupCallback(std::move(cleanup_cb));
+
// Note that this keeps the pool around while any texture is.
auto drop_texture_ref = base::BindOnce(
- [](scoped_refptr<TexturePool> texture_pool,
- TextureWrapper* texture_wrapper, const gpu::SyncToken& sync_token) {
- texture_pool->ReleaseTexture(texture_wrapper, sync_token);
+ [](scoped_refptr<TexturePool> texture_pool, AbstractTexture* texture,
+ const gpu::SyncToken& sync_token) {
+ texture_pool->ReleaseTexture(texture, sync_token);
},
- texture_pool_, base::Unretained(texture_wrapper.get()));
- texture_pool_->AddTexture(std::move(texture_wrapper));
+ texture_pool_, base::Unretained(texture.get()));
+ texture_pool_->AddTexture(std::move(texture));
- // Guarantee that the TextureRef is released even if the VideoFrame is
+ // Guarantee that the AbstractTexture is released even if the VideoFrame is
// dropped. Otherwise we could keep TextureRefs we don't need alive.
auto release_cb = mojo::WrapCallbackWithDefaultInvokeIfNotRun(
BindToCurrentLoop(std::move(drop_texture_ref)), gpu::SyncToken());
@@ -188,7 +205,8 @@ void GpuVideoFrameFactory::CreateVideoFrameInternal(
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
scoped_refptr<VideoFrame>* video_frame_out,
- scoped_refptr<gpu::gles2::TextureRef>* texture_ref_out) {
+ std::unique_ptr<AbstractTexture>* texture_out,
+ CodecImage** codec_image_out) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (!MakeContextCurrent(stub_))
return;
@@ -214,36 +232,34 @@ void GpuVideoFrameFactory::CreateVideoFrameInternal(
}
// Create a Texture and a CodecImage to back it.
- scoped_refptr<gpu::gles2::TextureRef> texture_ref =
- decoder_helper_->CreateTexture(GL_TEXTURE_EXTERNAL_OES, GL_RGBA,
- size.width(), size.height(), GL_RGBA,
- GL_UNSIGNED_BYTE);
+ std::unique_ptr<AbstractTexture> texture = decoder_helper_->CreateTexture(
+ GL_TEXTURE_EXTERNAL_OES, GL_RGBA, size.width(), size.height(), GL_RGBA,
+ GL_UNSIGNED_BYTE);
auto image = base::MakeRefCounted<CodecImage>(
std::move(output_buffer), texture_owner_, std::move(promotion_hint_cb));
images_.push_back(image.get());
+ *codec_image_out = image.get();
- // Add |image| to our current image group. This makes suer that any overlay
+ // Add |image| to our current image group. This makes sure that any overlay
// lasts as long as the images. For TextureOwner, it doesn't do much.
image_group_->AddCodecImage(image.get());
// Attach the image to the texture.
- // If we're attaching a TextureOwner backed image, we set the state to
- // UNBOUND. This ensures that the implementation will call CopyTexImage()
- // which lets us update the texture owner at the right time.
- // For overlays we set the state to BOUND because it's required for
- // ScheduleOverlayPlane() to be called. If something tries to sample from an
- // overlay texture it won't work, but there's no way to make that work.
- auto image_state = texture_owner_ ? gpu::gles2::Texture::UNBOUND
- : gpu::gles2::Texture::BOUND;
+ // Either way, we expect this to be UNBOUND (i.e., decoder-managed). For
+ // overlays, BindTexImage will return true, causing it to transition to the
+ // BOUND state, and thus receive ScheduleOverlayPlane calls. For TextureOwner
+ // backed images, BindTexImage will return false, and CopyTexImage will be
+ // tried next.
+ // TODO(liberato): consider not binding this as a StreamTextureImage if we're
+ // using an overlay. There's no advantage. We'd likely want to create (and
+ // initialize to a 1x1 texture) a 2D texture above in that case, in case
+ // somebody tries to sample from it. Be sure that promotion hints still
+ // work properly, though -- they might require a stream texture image.
GLuint texture_owner_service_id =
texture_owner_ ? texture_owner_->GetTextureId() : 0;
- texture_manager->SetLevelStreamTextureImage(
- texture_ref.get(), GL_TEXTURE_EXTERNAL_OES, 0, image.get(), image_state,
- texture_owner_service_id);
- texture_manager->SetLevelCleared(texture_ref.get(), GL_TEXTURE_EXTERNAL_OES,
- 0, true);
+ texture->BindStreamTextureImage(image.get(), texture_owner_service_id);
- gpu::Mailbox mailbox = decoder_helper_->CreateMailbox(texture_ref.get());
+ gpu::Mailbox mailbox = decoder_helper_->CreateMailbox(texture.get());
gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
mailbox_holders[0] =
gpu::MailboxHolder(mailbox, gpu::SyncToken(), GL_TEXTURE_EXTERNAL_OES);
@@ -270,7 +286,7 @@ void GpuVideoFrameFactory::CreateVideoFrameInternal(
!!texture_owner_);
*video_frame_out = std::move(frame);
- *texture_ref_out = std::move(texture_ref);
+ *texture_out = std::move(texture);
}
void GpuVideoFrameFactory::OnWillDestroyStub(bool have_context) {
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.h b/chromium/media/gpu/android/video_frame_factory_impl.h
index db6f659784c..b1ba011db89 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.h
+++ b/chromium/media/gpu/android/video_frame_factory_impl.h
@@ -7,6 +7,7 @@
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
+#include "gpu/command_buffer/service/abstract_texture.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/texture_manager.h"
#include "gpu/ipc/service/command_buffer_stub.h"
@@ -88,7 +89,7 @@ class GpuVideoFrameFactory
void SetImageGroup(scoped_refptr<CodecImageGroup> image_group);
private:
- // Creates a TextureRef and VideoFrame.
+ // Creates an AbstractTexture and VideoFrame.
void CreateVideoFrameInternal(
std::unique_ptr<CodecOutputBuffer> output_buffer,
scoped_refptr<TextureOwner> texture_owner,
@@ -96,7 +97,8 @@ class GpuVideoFrameFactory
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
scoped_refptr<VideoFrame>* video_frame_out,
- scoped_refptr<gpu::gles2::TextureRef>* texture_ref_out);
+ std::unique_ptr<gpu::gles2::AbstractTexture>* texture_out,
+ CodecImage** codec_image_out);
void OnWillDestroyStub(bool have_context) override;
diff --git a/chromium/media/gpu/codec_picture.h b/chromium/media/gpu/codec_picture.h
index 15020236433..89eba81f512 100644
--- a/chromium/media/gpu/codec_picture.h
+++ b/chromium/media/gpu/codec_picture.h
@@ -10,6 +10,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "media/base/decrypt_config.h"
+#include "media/base/video_color_space.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gfx/geometry/rect.h"
@@ -40,6 +41,10 @@ class MEDIA_GPU_EXPORT CodecPicture
decrypt_config_ = std::move(config);
}
+ // Populate with an unspecified colorspace by default.
+ VideoColorSpace get_colorspace() const { return colorspace_; }
+ void set_colorspace(VideoColorSpace colorspace) { colorspace_ = colorspace; }
+
protected:
friend class base::RefCountedThreadSafe<CodecPicture>;
virtual ~CodecPicture();
@@ -48,6 +53,7 @@ class MEDIA_GPU_EXPORT CodecPicture
int32_t bitstream_id_ = -1;
gfx::Rect visible_rect_;
std::unique_ptr<DecryptConfig> decrypt_config_;
+ VideoColorSpace colorspace_;
DISALLOW_COPY_AND_ASSIGN(CodecPicture);
};
diff --git a/chromium/media/gpu/command_buffer_helper.cc b/chromium/media/gpu/command_buffer_helper.cc
index 106cbab8a79..e39d291fe0d 100644
--- a/chromium/media/gpu/command_buffer_helper.cc
+++ b/chromium/media/gpu/command_buffer_helper.cc
@@ -79,11 +79,11 @@ class CommandBufferHelperImpl
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(stub_->decoder_context()->GetGLContext()->IsCurrent(nullptr));
- scoped_refptr<gpu::gles2::TextureRef> texture_ref =
+ std::unique_ptr<gpu::gles2::AbstractTexture> texture =
decoder_helper_->CreateTexture(target, internal_format, width, height,
format, type);
- GLuint service_id = texture_ref->service_id();
- texture_refs_[service_id] = std::move(texture_ref);
+ GLuint service_id = texture->service_id();
+ textures_[service_id] = std::move(texture);
return service_id;
}
@@ -91,34 +91,27 @@ class CommandBufferHelperImpl
DVLOG(2) << __func__ << "(" << service_id << ")";
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(stub_->decoder_context()->GetGLContext()->IsCurrent(nullptr));
- DCHECK(texture_refs_.count(service_id));
+ DCHECK(textures_.count(service_id));
- texture_refs_.erase(service_id);
+ textures_.erase(service_id);
}
void SetCleared(GLuint service_id) override {
DVLOG(2) << __func__ << "(" << service_id << ")";
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!decoder_helper_)
- return;
-
- DCHECK(texture_refs_.count(service_id));
- decoder_helper_->SetCleared(texture_refs_[service_id].get());
+ DCHECK(textures_.count(service_id));
+ textures_[service_id]->SetCleared();
}
bool BindImage(GLuint service_id,
gl::GLImage* image,
- bool can_bind_to_sampler) override {
+ bool client_managed) override {
DVLOG(2) << __func__ << "(" << service_id << ")";
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!decoder_helper_)
- return false;
-
- DCHECK(texture_refs_.count(service_id));
- decoder_helper_->BindImage(texture_refs_[service_id].get(), image,
- can_bind_to_sampler);
+ DCHECK(textures_.count(service_id));
+ textures_[service_id]->BindImage(image, client_managed);
return true;
}
@@ -129,8 +122,8 @@ class CommandBufferHelperImpl
if (!decoder_helper_)
return gpu::Mailbox();
- DCHECK(texture_refs_.count(service_id));
- return decoder_helper_->CreateMailbox(texture_refs_[service_id].get());
+ DCHECK(textures_.count(service_id));
+ return decoder_helper_->CreateMailbox(textures_[service_id].get());
}
void WaitForSyncToken(gpu::SyncToken sync_token,
@@ -159,33 +152,14 @@ class CommandBufferHelperImpl
DVLOG(1) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!stub_)
- return;
-
- // Try to drop TextureRefs with the context current, so that the platform
- // textures can be deleted.
- //
- // Note: Since we don't know what stack we are on, it might not be safe to
- // change the context. In practice we can be reasonably sure that our last
- // owner isn't doing work in a different context.
- //
- // TODO(sandersd): We should restore the previous context.
- if (!texture_refs_.empty() && MakeContextCurrent())
- texture_refs_.clear();
-
- DestroyStub();
+ if (stub_)
+ DestroyStub();
}
void OnWillDestroyStub(bool have_context) override {
DVLOG(1) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- // If we don't have a context, then tell the textures.
- if (!have_context) {
- for (auto iter : texture_refs_)
- iter.second->ForceContextLost();
- }
-
// In case |will_destroy_stub_cb_| drops the last reference to |this|, make
// sure that we're around a bit longer.
scoped_refptr<CommandBufferHelper> thiz(this);
@@ -193,10 +167,6 @@ class CommandBufferHelperImpl
if (will_destroy_stub_cb_)
std::move(will_destroy_stub_cb_).Run(have_context);
- // OnWillDestroyStub() is called with the context current if possible. Drop
- // the TextureRefs now while the platform textures can still be deleted.
- texture_refs_.clear();
-
DestroyStub();
}
@@ -204,6 +174,10 @@ class CommandBufferHelperImpl
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ // Drop all textures. Note that it's okay if the context isn't current,
+ // since AbstractTexture handles that case.
+ textures_.clear();
+
decoder_helper_ = nullptr;
// If the last reference to |this| is in a |done_cb|, destroying the wait
@@ -222,7 +196,7 @@ class CommandBufferHelperImpl
gpu::SequenceId wait_sequence_id_;
// TODO(sandersd): Merge GLES2DecoderHelper implementation into this class.
std::unique_ptr<GLES2DecoderHelper> decoder_helper_;
- std::map<GLuint, scoped_refptr<gpu::gles2::TextureRef>> texture_refs_;
+ std::map<GLuint, std::unique_ptr<gpu::gles2::AbstractTexture>> textures_;
WillDestroyStubCB will_destroy_stub_cb_;
diff --git a/chromium/media/gpu/command_buffer_helper.h b/chromium/media/gpu/command_buffer_helper.h
index 8adfee188b5..8bd44e32ec6 100644
--- a/chromium/media/gpu/command_buffer_helper.h
+++ b/chromium/media/gpu/command_buffer_helper.h
@@ -63,6 +63,9 @@ class MEDIA_GPU_EXPORT CommandBufferHelper
// edge. If |target| is GL_TEXTURE_2D, storage will be allocated but not
// initialized.
//
+ // It is up to the caller to initialize the texture before providing it to the
+ // renderer, else the results are undefined.
+ //
// The context must be current.
//
// TODO(sandersd): Is really necessary to allocate storage? GpuVideoDecoder
@@ -84,15 +87,12 @@ class MEDIA_GPU_EXPORT CommandBufferHelper
// Binds level 0 of the texture to an image.
//
- // If the sampler binding already exists, set |can_bind_to_sampler| to true.
+ // If the sampler binding already exists, set |client_managed| to true.
// Otherwise set it to false, and BindTexImage()/CopyTexImage() will be called
// when the texture is used.
- //
- // TODO(sandersd): Should we expose ImageState directly, rather than
- // |can_bind_to_sampler|?
virtual bool BindImage(GLuint service_id,
gl::GLImage* image,
- bool can_bind_to_sampler) = 0;
+ bool client_managed) = 0;
// Creates a mailbox for a texture.
//
diff --git a/chromium/media/gpu/fake_command_buffer_helper.cc b/chromium/media/gpu/fake_command_buffer_helper.cc
index 1851d810c59..cb54b700136 100644
--- a/chromium/media/gpu/fake_command_buffer_helper.cc
+++ b/chromium/media/gpu/fake_command_buffer_helper.cc
@@ -106,7 +106,7 @@ void FakeCommandBufferHelper::SetCleared(GLuint service_id) {
bool FakeCommandBufferHelper::BindImage(GLuint service_id,
gl::GLImage* image,
- bool can_bind_to_sampler) {
+ bool client_managed) {
DVLOG(2) << __func__ << "(" << service_id << ")";
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(service_ids_.count(service_id));
diff --git a/chromium/media/gpu/fake_command_buffer_helper.h b/chromium/media/gpu/fake_command_buffer_helper.h
index d3c0169d003..da97e4c17b0 100644
--- a/chromium/media/gpu/fake_command_buffer_helper.h
+++ b/chromium/media/gpu/fake_command_buffer_helper.h
@@ -51,7 +51,7 @@ class FakeCommandBufferHelper : public CommandBufferHelper {
void SetCleared(GLuint service_id) override;
bool BindImage(GLuint service_id,
gl::GLImage* image,
- bool can_bind_to_sampler) override;
+ bool client_managed) override;
gpu::Mailbox CreateMailbox(GLuint service_id) override;
void WaitForSyncToken(gpu::SyncToken sync_token,
base::OnceClosure done_cb) override;
diff --git a/chromium/media/gpu/fake_jpeg_decode_accelerator.cc b/chromium/media/gpu/fake_jpeg_decode_accelerator.cc
index cfaba2b7bb8..f8777a90945 100644
--- a/chromium/media/gpu/fake_jpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/fake_jpeg_decode_accelerator.cc
@@ -7,7 +7,7 @@
#include "base/bind.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "media/gpu/shared_memory_region.h"
+#include "media/base/unaligned_shared_memory.h"
namespace media {
@@ -41,10 +41,13 @@ void FakeJpegDecodeAccelerator::Decode(
const scoped_refptr<VideoFrame>& video_frame) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- // SharedMemoryRegion will take over the |bitstream_buffer.handle()|.
- std::unique_ptr<SharedMemoryRegion> src_shm(
- new SharedMemoryRegion(bitstream_buffer, true));
- if (!src_shm->Map()) {
+ std::unique_ptr<WritableUnalignedMapping> src_shm(
+ new WritableUnalignedMapping(bitstream_buffer.handle(),
+ bitstream_buffer.size(),
+ bitstream_buffer.offset()));
+ // The handle is no longer needed.
+ bitstream_buffer.handle().Close();
+ if (!src_shm->IsValid()) {
DLOG(ERROR) << "Unable to map shared memory in FakeJpegDecodeAccelerator";
NotifyError(bitstream_buffer.id(), JpegDecodeAccelerator::UNREADABLE_INPUT);
return;
@@ -60,7 +63,7 @@ void FakeJpegDecodeAccelerator::Decode(
void FakeJpegDecodeAccelerator::DecodeOnDecoderThread(
const BitstreamBuffer& bitstream_buffer,
const scoped_refptr<VideoFrame>& video_frame,
- std::unique_ptr<SharedMemoryRegion> src_shm) {
+ std::unique_ptr<WritableUnalignedMapping> src_shm) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
// Do not actually decode the Jpeg data.
diff --git a/chromium/media/gpu/fake_jpeg_decode_accelerator.h b/chromium/media/gpu/fake_jpeg_decode_accelerator.h
index 0a25dbb729c..25273bd0558 100644
--- a/chromium/media/gpu/fake_jpeg_decode_accelerator.h
+++ b/chromium/media/gpu/fake_jpeg_decode_accelerator.h
@@ -22,8 +22,6 @@ class SingleThreadTaskRunner;
namespace media {
-class SharedMemoryRegion;
-
// Uses software-based decoding. The purpose of this class is to enable testing
// of communication to the JpegDecodeAccelerator without requiring an actual
// hardware decoder.
@@ -43,7 +41,7 @@ class MEDIA_GPU_EXPORT FakeJpegDecodeAccelerator
private:
void DecodeOnDecoderThread(const BitstreamBuffer& bitstream_buffer,
const scoped_refptr<VideoFrame>& video_frame,
- std::unique_ptr<SharedMemoryRegion> src_shm);
+ std::unique_ptr<WritableUnalignedMapping> src_shm);
void NotifyError(int32_t bitstream_buffer_id, Error error);
void NotifyErrorOnClientThread(int32_t bitstream_buffer_id, Error error);
void OnDecodeDoneOnClientThread(int32_t input_buffer_id);
diff --git a/chromium/media/gpu/gles2_decoder_helper.cc b/chromium/media/gpu/gles2_decoder_helper.cc
index 551d335f181..a789e5e1ee7 100644
--- a/chromium/media/gpu/gles2_decoder_helper.cc
+++ b/chromium/media/gpu/gles2_decoder_helper.cc
@@ -17,6 +17,8 @@
#include "gpu/command_buffer/service/texture_manager.h"
#include "ui/gl/gl_context.h"
+using gpu::gles2::AbstractTexture;
+
namespace media {
class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
@@ -37,60 +39,29 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
return decoder_->MakeCurrent();
}
- scoped_refptr<gpu::gles2::TextureRef> CreateTexture(GLenum target,
- GLenum internal_format,
- GLsizei width,
- GLsizei height,
- GLenum format,
- GLenum type) override {
+ std::unique_ptr<AbstractTexture> CreateTexture(GLenum target,
+ GLenum internal_format,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type) override {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(decoder_->GetGLContext()->IsCurrent(nullptr));
- // We can't use texture_manager->CreateTexture(), since it requires a unique
- // |client_id|. Instead we create the texture directly, and create our own
- // TextureRef for it.
- GLuint texture_id;
- glGenTextures(1, &texture_id);
- glBindTexture(target, texture_id);
-
- // Mark external textures as clear, since nobody is going to take any action
- // that would "clear" them.
- // TODO(liberato): should we make the client do this when it binds an image?
- gfx::Rect cleared_rect = (target == GL_TEXTURE_EXTERNAL_OES)
- ? gfx::Rect(width, height)
- : gfx::Rect();
-
- scoped_refptr<gpu::gles2::TextureRef> texture_ref =
- gpu::gles2::TextureRef::Create(texture_manager_, 0, texture_id);
- texture_manager_->SetTarget(texture_ref.get(), target);
- texture_manager_->SetLevelInfo(texture_ref.get(), // ref
- target, // target
- 0, // level
- internal_format, // internal_format
- width, // width
- height, // height
- 1, // depth
- 0, // border
- format, // format
- type, // type
- cleared_rect); // cleared_rect
-
- texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_MAG_FILTER,
- GL_LINEAR);
- texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_MIN_FILTER,
- GL_LINEAR);
-
- texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_WRAP_S,
- GL_CLAMP_TO_EDGE);
- texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_WRAP_T,
- GL_CLAMP_TO_EDGE);
+ std::unique_ptr<AbstractTexture> texture =
+ decoder_->CreateAbstractTexture(target, internal_format, width, height,
+ 1, // depth
+ 0, // border
+ format, type);
+
+ texture->SetParameteri(GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ texture->SetParameteri(GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ texture->SetParameteri(GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ texture->SetParameteri(GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// TODO(sandersd): Do we always want to allocate for GL_TEXTURE_2D?
if (target == GL_TEXTURE_2D) {
+ glBindTexture(target, texture->service_id());
glTexImage2D(target, // target
0, // level
internal_format, // internal_format
@@ -100,28 +71,10 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
format, // format
type, // type
nullptr); // data
+ decoder_->RestoreActiveTextureUnitBinding(target);
}
- decoder_->RestoreActiveTextureUnitBinding(target);
- return texture_ref;
- }
-
- void SetCleared(gpu::gles2::TextureRef* texture_ref) override {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- texture_manager_->SetLevelCleared(
- texture_ref, texture_ref->texture()->target(), 0, true);
- }
-
- void BindImage(gpu::gles2::TextureRef* texture_ref,
- gl::GLImage* image,
- bool can_bind_to_sampler) override {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- GLenum target = gpu::gles2::GLES2Util::GLFaceTargetToTextureTarget(
- texture_ref->texture()->target());
- gpu::gles2::Texture::ImageState state = can_bind_to_sampler
- ? gpu::gles2::Texture::BOUND
- : gpu::gles2::Texture::UNBOUND;
- texture_manager_->SetLevelImage(texture_ref, target, 0, image, state);
+ return texture;
}
gl::GLContext* GetGLContext() override {
@@ -129,10 +82,10 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
return decoder_->GetGLContext();
}
- gpu::Mailbox CreateMailbox(gpu::gles2::TextureRef* texture_ref) override {
+ gpu::Mailbox CreateMailbox(AbstractTexture* texture) override {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
gpu::Mailbox mailbox = gpu::Mailbox::Generate();
- mailbox_manager_->ProduceTexture(mailbox, texture_ref->texture());
+ mailbox_manager_->ProduceTexture(mailbox, texture->GetTextureBase());
return mailbox;
}
diff --git a/chromium/media/gpu/gles2_decoder_helper.h b/chromium/media/gpu/gles2_decoder_helper.h
index ee816217673..6c712c332d1 100644
--- a/chromium/media/gpu/gles2_decoder_helper.h
+++ b/chromium/media/gpu/gles2_decoder_helper.h
@@ -18,13 +18,12 @@ namespace gpu {
class DecoderContext;
struct Mailbox;
namespace gles2 {
-class TextureRef;
+class AbstractTexture;
} // namespace gles2
} // namespace gpu
namespace gl {
class GLContext;
-class GLImage;
} // namespace gl
namespace media {
@@ -42,12 +41,14 @@ class MEDIA_GPU_EXPORT GLES2DecoderHelper {
virtual bool MakeContextCurrent() = 0;
// Creates a texture and configures it as a video frame (linear filtering,
- // clamp to edge). The context must be current.
+ // clamp to edge). The context must be current. It is up to the caller to
+ // ensure that the entire texture is initialized before providing it to the
+ // renderer. For th
//
// See glTexImage2D() for parameter definitions.
//
// Returns nullptr on failure, but there are currently no failure paths.
- virtual scoped_refptr<gpu::gles2::TextureRef> CreateTexture(
+ virtual std::unique_ptr<gpu::gles2::AbstractTexture> CreateTexture(
GLenum target,
GLenum internal_format,
GLsizei width,
@@ -55,19 +56,12 @@ class MEDIA_GPU_EXPORT GLES2DecoderHelper {
GLenum format,
GLenum type) = 0;
- // Sets the cleared flag on level 0 of the texture.
- virtual void SetCleared(gpu::gles2::TextureRef* texture_ref) = 0;
-
- // Binds level 0 of the texture to an image.
- virtual void BindImage(gpu::gles2::TextureRef* texture_ref,
- gl::GLImage* image,
- bool can_bind_to_sampler) = 0;
-
// Gets the associated GLContext.
virtual gl::GLContext* GetGLContext() = 0;
// Creates a mailbox for a texture.
- virtual gpu::Mailbox CreateMailbox(gpu::gles2::TextureRef* texture_ref) = 0;
+ virtual gpu::Mailbox CreateMailbox(
+ gpu::gles2::AbstractTexture* texture_ref) = 0;
};
} // namespace media
diff --git a/chromium/media/gpu/gpu_video_accelerator_util.cc b/chromium/media/gpu/gpu_video_accelerator_util.cc
index edd8bb15634..0ebcbbfbc4d 100644
--- a/chromium/media/gpu/gpu_video_accelerator_util.cc
+++ b/chromium/media/gpu/gpu_video_accelerator_util.cc
@@ -33,7 +33,9 @@ STATIC_ASSERT_ENUM_MATCH(VP9PROFILE_PROFILE3);
STATIC_ASSERT_ENUM_MATCH(HEVCPROFILE_MAIN);
STATIC_ASSERT_ENUM_MATCH(HEVCPROFILE_MAIN10);
STATIC_ASSERT_ENUM_MATCH(HEVCPROFILE_MAIN_STILL_PICTURE);
-STATIC_ASSERT_ENUM_MATCH(AV1PROFILE_PROFILE0);
+STATIC_ASSERT_ENUM_MATCH(AV1PROFILE_PROFILE_MAIN);
+STATIC_ASSERT_ENUM_MATCH(AV1PROFILE_PROFILE_HIGH);
+STATIC_ASSERT_ENUM_MATCH(AV1PROFILE_PROFILE_PRO);
STATIC_ASSERT_ENUM_MATCH(VIDEO_CODEC_PROFILE_MAX);
// static
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
index 4c8f67c20b2..85d2edb507e 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
@@ -9,7 +9,7 @@
#include "base/memory/ptr_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/media_switches.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/gpu_video_accelerator_util.h"
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
index 70eac2b126f..3c94bdcdc45 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
@@ -9,9 +9,9 @@
#include "base/callback.h"
#include "base/threading/thread_checker.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_info.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/media_gpu_export.h"
diff --git a/chromium/media/gpu/gpu_video_encode_accelerator_factory.h b/chromium/media/gpu/gpu_video_encode_accelerator_factory.h
index 761370dee5e..9984f3e51f2 100644
--- a/chromium/media/gpu/gpu_video_encode_accelerator_factory.h
+++ b/chromium/media/gpu/gpu_video_encode_accelerator_factory.h
@@ -7,7 +7,7 @@
#include <memory>
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/video_encode_accelerator.h"
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index 05e0608362e..260849b3e15 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -20,8 +20,10 @@ H264Decoder::H264Accelerator::H264Accelerator() = default;
H264Decoder::H264Accelerator::~H264Accelerator() = default;
-H264Decoder::H264Decoder(std::unique_ptr<H264Accelerator> accelerator)
+H264Decoder::H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
+ const VideoColorSpace& container_color_space)
: state_(kNeedStreamMetadata),
+ container_color_space_(container_color_space),
max_frame_num_(0),
max_pic_num_(0),
max_long_term_frame_idx_(0),
@@ -89,10 +91,9 @@ bool H264Decoder::ModifyReferencePicLists(const H264SliceHeader* slice_hdr,
return true;
}
-bool H264Decoder::DecodePicture() {
+H264Decoder::H264Accelerator::Status H264Decoder::DecodePicture() {
DCHECK(curr_pic_.get());
- DVLOG(4) << "Decoding POC " << curr_pic_->pic_order_cnt;
return accelerator_->SubmitDecode(curr_pic_);
}
@@ -621,6 +622,12 @@ void H264Decoder::OutputPic(scoped_refptr<H264Picture> pic) {
DCHECK(!pic->outputted);
pic->outputted = true;
+ VideoColorSpace colorspace_for_frame = container_color_space_;
+ const H264SPS* sps = parser_.GetSPS(curr_sps_id_);
+ if (sps && sps->GetColorSpace().IsSpecified())
+ colorspace_for_frame = sps->GetColorSpace();
+ pic->set_colorspace(colorspace_for_frame);
+
if (pic->nonexisting) {
DVLOG(4) << "Skipping output, non-existing frame_num: " << pic->frame_num;
return;
@@ -666,7 +673,8 @@ bool H264Decoder::Flush() {
return true;
}
-bool H264Decoder::StartNewFrame(const H264SliceHeader* slice_hdr) {
+H264Decoder::H264Accelerator::Status H264Decoder::StartNewFrame(
+ const H264SliceHeader* slice_hdr) {
// TODO posciak: add handling of max_num_ref_frames per spec.
CHECK(curr_pic_.get());
DCHECK(slice_hdr);
@@ -674,12 +682,12 @@ bool H264Decoder::StartNewFrame(const H264SliceHeader* slice_hdr) {
curr_pps_id_ = slice_hdr->pic_parameter_set_id;
const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
if (!pps)
- return false;
+ return H264Accelerator::Status::kFail;
curr_sps_id_ = pps->seq_parameter_set_id;
const H264SPS* sps = parser_.GetSPS(curr_sps_id_);
if (!sps)
- return false;
+ return H264Accelerator::Status::kFail;
max_frame_num_ = 1 << (sps->log2_max_frame_num_minus4 + 4);
int frame_num = slice_hdr->frame_num;
@@ -690,21 +698,18 @@ bool H264Decoder::StartNewFrame(const H264SliceHeader* slice_hdr) {
if (frame_num != prev_ref_frame_num_ &&
frame_num != (prev_ref_frame_num_ + 1) % max_frame_num_) {
if (!HandleFrameNumGap(frame_num))
- return false;
+ return H264Accelerator::Status::kFail;
}
if (!InitCurrPicture(slice_hdr))
- return false;
+ return H264Accelerator::Status::kFail;
UpdatePicNums(frame_num);
PrepareRefPicLists(slice_hdr);
- if (!accelerator_->SubmitFrameMetadata(sps, pps, dpb_, ref_pic_list_p0_,
- ref_pic_list_b0_, ref_pic_list_b1_,
- curr_pic_.get()))
- return false;
-
- return true;
+ return accelerator_->SubmitFrameMetadata(sps, pps, dpb_, ref_pic_list_p0_,
+ ref_pic_list_b0_, ref_pic_list_b1_,
+ curr_pic_.get());
}
bool H264Decoder::HandleMemoryManagementOps(scoped_refptr<H264Picture> pic) {
@@ -1110,18 +1115,20 @@ bool H264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) {
return true;
}
-bool H264Decoder::FinishPrevFrameIfPresent() {
+H264Decoder::H264Accelerator::Status H264Decoder::FinishPrevFrameIfPresent() {
// If we already have a frame waiting to be decoded, decode it and finish.
if (curr_pic_) {
- if (!DecodePicture())
- return false;
+ H264Accelerator::Status result = DecodePicture();
+ if (result != H264Accelerator::Status::kOk)
+ return result;
scoped_refptr<H264Picture> pic = curr_pic_;
curr_pic_ = nullptr;
- return FinishPicture(pic);
+ if (!FinishPicture(pic))
+ return H264Accelerator::Status::kFail;
}
- return true;
+ return H264Accelerator::Status::kOk;
}
bool H264Decoder::HandleFrameNumGap(int frame_num) {
@@ -1156,22 +1163,23 @@ bool H264Decoder::HandleFrameNumGap(int frame_num) {
return true;
}
-bool H264Decoder::PreprocessCurrentSlice() {
+H264Decoder::H264Accelerator::Status H264Decoder::PreprocessCurrentSlice() {
const H264SliceHeader* slice_hdr = curr_slice_hdr_.get();
DCHECK(slice_hdr);
if (IsNewPrimaryCodedPicture(curr_pic_.get(), curr_pps_id_,
parser_.GetSPS(curr_sps_id_), *slice_hdr)) {
// New picture, so first finish the previous one before processing it.
- if (!FinishPrevFrameIfPresent())
- return false;
+ H264Accelerator::Status result = FinishPrevFrameIfPresent();
+ if (result != H264Accelerator::Status::kOk)
+ return result;
DCHECK(!curr_pic_);
if (slice_hdr->first_mb_in_slice != 0) {
DVLOG(1) << "ASO/invalid stream, first_mb_in_slice: "
<< slice_hdr->first_mb_in_slice;
- return false;
+ return H264Accelerator::Status::kFail;
}
// If the new picture is an IDR, flush DPB.
@@ -1180,17 +1188,17 @@ bool H264Decoder::PreprocessCurrentSlice() {
// not to do so.
if (!slice_hdr->no_output_of_prior_pics_flag) {
if (!Flush())
- return false;
+ return H264Accelerator::Status::kFail;
}
dpb_.Clear();
last_output_poc_ = std::numeric_limits<int>::min();
}
}
- return true;
+ return H264Accelerator::Status::kOk;
}
-bool H264Decoder::ProcessCurrentSlice() {
+H264Decoder::H264Accelerator::Status H264Decoder::ProcessCurrentSlice() {
DCHECK(curr_pic_);
const H264SliceHeader* slice_hdr = curr_slice_hdr_.get();
@@ -1203,18 +1211,16 @@ bool H264Decoder::ProcessCurrentSlice() {
H264Picture::Vector ref_pic_list0, ref_pic_list1;
if (!ModifyReferencePicLists(slice_hdr, &ref_pic_list0, &ref_pic_list1))
- return false;
+ return H264Accelerator::Status::kFail;
const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
if (!pps)
- return false;
-
- if (!accelerator_->SubmitSlice(pps, slice_hdr, ref_pic_list0, ref_pic_list1,
- curr_pic_.get(), slice_hdr->nalu_data,
- slice_hdr->nalu_size))
- return false;
+ return H264Accelerator::Status::kFail;
- return true;
+ return accelerator_->SubmitSlice(pps, slice_hdr, ref_pic_list0, ref_pic_list1,
+ curr_pic_.get(), slice_hdr->nalu_data,
+ slice_hdr->nalu_size,
+ parser_.GetCurrentSubsamples());
}
#define SET_ERROR_AND_RETURN() \
@@ -1224,22 +1230,37 @@ bool H264Decoder::ProcessCurrentSlice() {
return H264Decoder::kDecodeError; \
} while (0)
+#define CHECK_ACCELERATOR_RESULT(func) \
+ do { \
+ H264Accelerator::Status result = (func); \
+ switch (result) { \
+ case H264Accelerator::Status::kOk: \
+ break; \
+ case H264Accelerator::Status::kTryAgain: \
+ DVLOG(1) << #func " needs to try again"; \
+ return H264Decoder::kTryAgain; \
+ case H264Accelerator::Status::kFail: \
+ SET_ERROR_AND_RETURN(); \
+ } \
+ } while (0)
+
void H264Decoder::SetStream(int32_t id,
const uint8_t* ptr,
size_t size,
const DecryptConfig* decrypt_config) {
DCHECK(ptr);
DCHECK(size);
- if (decrypt_config) {
- NOTIMPLEMENTED();
- state_ = kError;
- return;
- }
DVLOG(4) << "New input stream id: " << id << " at: " << (void*)ptr
<< " size: " << size;
stream_id_ = id;
- parser_.SetStream(ptr, size);
+ if (decrypt_config) {
+ parser_.SetEncryptedStream(ptr, size, decrypt_config->subsamples());
+ current_decrypt_config_ = decrypt_config->Clone();
+ } else {
+ parser_.SetStream(ptr, size);
+ current_decrypt_config_ = nullptr;
+ }
}
H264Decoder::DecodeResult H264Decoder::Decode() {
@@ -1265,7 +1286,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
switch (curr_nalu_->nal_unit_type) {
case H264NALU::kNonIDRSlice:
// We can't resume from a non-IDR slice.
- if (state_ != kDecoding)
+ if (state_ == kError || state_ == kAfterReset)
break;
FALLTHROUGH;
@@ -1278,8 +1299,13 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
break;
}
- // If after reset, we should be able to recover from an IDR.
- state_ = kDecoding;
+ // If after reset or waiting for a key, we should be able to recover
+ // from an IDR. |state_|, |curr_slice_hdr_|, and |curr_pic_| are used
+ // to keep track of what has previously been attempted, so that after
+ // a retryable result is returned, subsequent calls to Decode() retry
+ // the call that failed previously. If it succeeds (it may not if no
+ // additional key has been provided, for example), then the remaining
+ // steps will be executed.
if (!curr_slice_hdr_) {
curr_slice_hdr_.reset(new H264SliceHeader());
@@ -1288,34 +1314,47 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
if (par_res != H264Parser::kOk)
SET_ERROR_AND_RETURN();
- if (!PreprocessCurrentSlice())
- SET_ERROR_AND_RETURN();
+ state_ = kTryPreprocessCurrentSlice;
}
- if (!curr_pic_) {
- // New picture/finished previous one, try to start a new one
- // or tell the client we need more surfaces.
- curr_pic_ = accelerator_->CreateH264Picture();
- if (!curr_pic_)
- return kRanOutOfSurfaces;
+ if (state_ == kTryPreprocessCurrentSlice) {
+ CHECK_ACCELERATOR_RESULT(PreprocessCurrentSlice());
+ state_ = kEnsurePicture;
+ }
- if (!StartNewFrame(curr_slice_hdr_.get()))
- SET_ERROR_AND_RETURN();
+ if (state_ == kEnsurePicture) {
+ if (curr_pic_) {
+ // |curr_pic_| already exists, so skip to ProcessCurrentSlice().
+ state_ = kTryCurrentSlice;
+ } else {
+ // New picture/finished previous one, try to start a new one
+ // or tell the client we need more surfaces.
+ curr_pic_ = accelerator_->CreateH264Picture();
+ if (!curr_pic_)
+ return kRanOutOfSurfaces;
+ if (current_decrypt_config_)
+ curr_pic_->set_decrypt_config(current_decrypt_config_->Clone());
+
+ state_ = kTryNewFrame;
+ }
}
- if (!ProcessCurrentSlice())
- SET_ERROR_AND_RETURN();
+ if (state_ == kTryNewFrame) {
+ CHECK_ACCELERATOR_RESULT(StartNewFrame(curr_slice_hdr_.get()));
+ state_ = kTryCurrentSlice;
+ }
+ DCHECK_EQ(state_, kTryCurrentSlice);
+ CHECK_ACCELERATOR_RESULT(ProcessCurrentSlice());
curr_slice_hdr_.reset();
+ state_ = kDecoding;
break;
}
case H264NALU::kSPS: {
int sps_id;
- if (!FinishPrevFrameIfPresent())
- SET_ERROR_AND_RETURN();
-
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
par_res = parser_.ParseSPS(&sps_id);
if (par_res != H264Parser::kOk)
SET_ERROR_AND_RETURN();
@@ -1342,9 +1381,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
case H264NALU::kPPS: {
int pps_id;
- if (!FinishPrevFrameIfPresent())
- SET_ERROR_AND_RETURN();
-
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
par_res = parser_.ParsePPS(&pps_id);
if (par_res != H264Parser::kOk)
SET_ERROR_AND_RETURN();
@@ -1358,9 +1395,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
if (state_ != kDecoding)
break;
- if (!FinishPrevFrameIfPresent())
- SET_ERROR_AND_RETURN();
-
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
break;
default:
diff --git a/chromium/media/gpu/h264_decoder.h b/chromium/media/gpu/h264_decoder.h
index dff6d936e60..f29dfd3b952 100644
--- a/chromium/media/gpu/h264_decoder.h
+++ b/chromium/media/gpu/h264_decoder.h
@@ -14,6 +14,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "media/base/limits.h"
+#include "media/base/subsample_entry.h"
#include "media/gpu/accelerated_video_decoder.h"
#include "media/gpu/h264_dpb.h"
#include "media/gpu/media_gpu_export.h"
@@ -33,6 +34,22 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
public:
class MEDIA_GPU_EXPORT H264Accelerator {
public:
+ // Methods may return kTryAgain if they need additional data (provided
+ // independently) in order to proceed. Examples are things like not having
+ // an appropriate key to decode encrypted content, or needing to wait
+ // until hardware buffers are available. This is not considered an
+ // unrecoverable error, but rather a pause to allow an application to
+ // independently provide the required data. When H264Decoder::Decode()
+ // is called again, it will attempt to resume processing of the stream
+ // by calling the same method again.
+ enum class Status {
+ kOk, // Operation completed successfully.
+ kFail, // Operation failed.
+ kTryAgain, // Operation failed because some external data is missing.
+ // Retry the same operation later, once the data has been
+ // provided.
+ };
+
H264Accelerator();
virtual ~H264Accelerator();
@@ -53,37 +70,44 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Note that this does not run decode in the accelerator and the decoder
// is expected to follow this call with one or more SubmitSlice() calls
// before calling SubmitDecode().
- // Return true if successful.
- virtual bool SubmitFrameMetadata(const H264SPS* sps,
- const H264PPS* pps,
- const H264DPB& dpb,
- const H264Picture::Vector& ref_pic_listp0,
- const H264Picture::Vector& ref_pic_listb0,
- const H264Picture::Vector& ref_pic_listb1,
- const scoped_refptr<H264Picture>& pic) = 0;
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitFrameMetadata(
+ const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ const scoped_refptr<H264Picture>& pic) = 0;
// Submit one slice for the current frame, passing the current |pps| and
// |pic| (same as in SubmitFrameMetadata()), the parsed header for the
// current slice in |slice_hdr|, and the reordered |ref_pic_listX|,
// as per H264 spec.
- // |data| pointing to the full slice (including the unparsed header| of
+ // |data| pointing to the full slice (including the unparsed header) of
// |size| in bytes.
+ // |subsamples| specifies which part of the slice data is encrypted.
// This must be called one or more times per frame, before SubmitDecode().
// Note that |data| does not have to remain valid after this call returns.
- // Return true if successful.
- virtual bool SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) = 0;
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitSlice(
+ const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) = 0;
// Execute the decode in hardware for |pic|, using all the slices and
// metadata submitted via SubmitFrameMetadata() and SubmitSlice() since
// the previous call to SubmitDecode().
- // Return true if successful.
- virtual bool SubmitDecode(const scoped_refptr<H264Picture>& pic) = 0;
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitDecode(const scoped_refptr<H264Picture>& pic) = 0;
// Schedule output (display) of |pic|. Note that returning from this
// method does not mean that |pic| has already been outputted (displayed),
@@ -101,7 +125,8 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
DISALLOW_COPY_AND_ASSIGN(H264Accelerator);
};
- explicit H264Decoder(std::unique_ptr<H264Accelerator> accelerator);
+ H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
+ const VideoColorSpace& container_color_space = VideoColorSpace());
~H264Decoder() override;
// AcceleratedVideoDecoder implementation.
@@ -141,19 +166,32 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Internal state of the decoder.
enum State {
- kNeedStreamMetadata, // After initialization, need an SPS.
- kDecoding, // Ready to decode from any point.
- kAfterReset, // After Reset(), need a resume point.
- kError, // Error in decode, can't continue.
+ // After initialization, need an SPS.
+ kNeedStreamMetadata,
+ // Ready to decode from any point.
+ kDecoding,
+ // After Reset(), need a resume point.
+ kAfterReset,
+ // The following keep track of what step is next in Decode() processing
+ // in order to resume properly after H264Decoder::kTryAgain (or another
+ // retryable error) is returned. The next time Decode() is called the call
+ // that previously failed will be retried and execution continues from
+ // there (if possible).
+ kTryPreprocessCurrentSlice,
+ kEnsurePicture,
+ kTryNewFrame,
+ kTryCurrentSlice,
+ // Error in decode, can't continue.
+ kError,
};
// Process H264 stream structures.
bool ProcessSPS(int sps_id, bool* need_new_buffers);
// Process current slice header to discover if we need to start a new picture,
// finishing up the current one.
- bool PreprocessCurrentSlice();
+ H264Accelerator::Status PreprocessCurrentSlice();
// Process current slice as a slice of the current picture.
- bool ProcessCurrentSlice();
+ H264Accelerator::Status ProcessCurrentSlice();
// Initialize the current picture according to data in |slice_hdr|.
bool InitCurrPicture(const H264SliceHeader* slice_hdr);
@@ -208,10 +246,10 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
bool HandleFrameNumGap(int frame_num);
// Start processing a new frame.
- bool StartNewFrame(const H264SliceHeader* slice_hdr);
+ H264Accelerator::Status StartNewFrame(const H264SliceHeader* slice_hdr);
// All data for a frame received, process it and decode.
- bool FinishPrevFrameIfPresent();
+ H264Accelerator::Status FinishPrevFrameIfPresent();
// Called after we are done processing |pic|. Performs all operations to be
// done after decoding, including DPB management, reference picture marking
@@ -226,7 +264,7 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
void ClearDPB();
// Commits all pending data for HW decoder and starts HW decoder.
- bool DecodePicture();
+ H264Accelerator::Status DecodePicture();
// Notifies client that a picture is ready for output.
void OutputPic(scoped_refptr<H264Picture> pic);
@@ -237,9 +275,15 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Decoder state.
State state_;
+ // The colorspace for the h264 container.
+ const VideoColorSpace container_color_space_;
+
// Parser in use.
H264Parser parser_;
+ // Decrypting config for the most recent data passed to SetStream().
+ std::unique_ptr<DecryptConfig> current_decrypt_config_;
+
// DPB in use.
H264DPB dpb_;
diff --git a/chromium/media/gpu/h264_decoder_unittest.cc b/chromium/media/gpu/h264_decoder_unittest.cc
index d7e80256193..5d4db60e069 100644
--- a/chromium/media/gpu/h264_decoder_unittest.cc
+++ b/chromium/media/gpu/h264_decoder_unittest.cc
@@ -18,6 +18,7 @@
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
+using ::testing::Args;
using ::testing::Expectation;
using ::testing::InSequence;
using ::testing::Invoke;
@@ -40,34 +41,49 @@ const std::string kHighFrame1 = "bear-320x192-high-frame-1.h264";
const std::string kHighFrame2 = "bear-320x192-high-frame-2.h264";
const std::string kHighFrame3 = "bear-320x192-high-frame-3.h264";
+// Checks whether the decrypt config in the picture matches the decrypt config
+// passed to this matcher.
+MATCHER_P(DecryptConfigMatches, decrypt_config, "") {
+ const scoped_refptr<H264Picture>& pic = arg;
+ return pic->decrypt_config()->Matches(*decrypt_config);
+}
+
+MATCHER(SubsampleSizeMatches, "Verify subsample sizes match buffer size") {
+ const size_t buffer_size = ::testing::get<0>(arg);
+ const std::vector<SubsampleEntry>& subsamples = ::testing::get<1>(arg);
+ size_t subsample_total_size = 0;
+ for (const auto& sample : subsamples) {
+ subsample_total_size += sample.cypher_bytes;
+ subsample_total_size += sample.clear_bytes;
+ }
+ return subsample_total_size == buffer_size;
+}
+
class MockH264Accelerator : public H264Decoder::H264Accelerator {
public:
MockH264Accelerator() = default;
MOCK_METHOD0(CreateH264Picture, scoped_refptr<H264Picture>());
- MOCK_METHOD1(SubmitDecode, bool(const scoped_refptr<H264Picture>& pic));
+ MOCK_METHOD1(SubmitDecode, Status(const scoped_refptr<H264Picture>& pic));
+ MOCK_METHOD7(SubmitFrameMetadata,
+ Status(const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ const scoped_refptr<H264Picture>& pic));
+ MOCK_METHOD8(SubmitSlice,
+ Status(const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples));
MOCK_METHOD1(OutputPicture, bool(const scoped_refptr<H264Picture>& pic));
- bool SubmitFrameMetadata(const H264SPS* sps,
- const H264PPS* pps,
- const H264DPB& dpb,
- const H264Picture::Vector& ref_pic_listp0,
- const H264Picture::Vector& ref_pic_listb0,
- const H264Picture::Vector& ref_pic_listb1,
- const scoped_refptr<H264Picture>& pic) override {
- return true;
- }
-
- bool SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) override {
- return true;
- }
-
void Reset() override {}
};
@@ -106,8 +122,14 @@ void H264DecoderTest::SetUp() {
ON_CALL(*accelerator_, CreateH264Picture()).WillByDefault(Invoke([]() {
return new H264Picture();
}));
- ON_CALL(*accelerator_, SubmitDecode(_)).WillByDefault(Return(true));
+ ON_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _))
+ .WillByDefault(Return(H264Decoder::H264Accelerator::Status::kOk));
+ ON_CALL(*accelerator_, SubmitDecode(_))
+ .WillByDefault(Return(H264Decoder::H264Accelerator::Status::kOk));
ON_CALL(*accelerator_, OutputPicture(_)).WillByDefault(Return(true));
+ ON_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _))
+ .With(Args<6, 7>(SubsampleSizeMatches()))
+ .WillByDefault(Return(H264Decoder::H264Accelerator::Status::kOk));
}
void H264DecoderTest::SetInputFrameFiles(
@@ -173,6 +195,8 @@ TEST_F(H264DecoderTest, DecodeSingleFrame) {
{
InSequence sequence;
EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
EXPECT_CALL(*accelerator_, SubmitDecode(_));
EXPECT_CALL(*accelerator_, OutputPicture(_));
}
@@ -188,6 +212,8 @@ TEST_F(H264DecoderTest, SkipNonIDRFrames) {
{
InSequence sequence;
EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
EXPECT_CALL(*accelerator_, SubmitDecode(_));
EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
}
@@ -204,6 +230,9 @@ TEST_F(H264DecoderTest, DecodeProfileBaseline) {
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
+
Expectation decode_poc0, decode_poc2, decode_poc4, decode_poc6;
{
InSequence decode_order;
@@ -232,6 +261,9 @@ TEST_F(H264DecoderTest, DecodeProfileHigh) {
// Two pictures will be kept in DPB for reordering. The first picture should
// be outputted after feeding the third frame.
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
+
Expectation decode_poc0, decode_poc2, decode_poc4, decode_poc6;
{
InSequence decode_order;
@@ -259,9 +291,11 @@ TEST_F(H264DecoderTest, SwitchBaselineToHigh) {
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
- EXPECT_CALL(*accelerator_, CreateH264Picture());
{
InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
EXPECT_CALL(*accelerator_, SubmitDecode(_));
EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
}
@@ -272,6 +306,9 @@ TEST_F(H264DecoderTest, SwitchBaselineToHigh) {
ASSERT_TRUE(Mock::VerifyAndClearExpectations(&*accelerator_));
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
+
Expectation decode_poc0, decode_poc2, decode_poc4, decode_poc6;
{
InSequence decode_order;
@@ -300,9 +337,11 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_LE(16u, decoder_->GetRequiredNumOfPictures());
- EXPECT_CALL(*accelerator_, CreateH264Picture());
{
InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
EXPECT_CALL(*accelerator_, SubmitDecode(_));
EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
}
@@ -313,6 +352,9 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
ASSERT_TRUE(Mock::VerifyAndClearExpectations(&*accelerator_));
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
+
Expectation decode_poc0, decode_poc2, decode_poc4, decode_poc6;
{
InSequence decode_order;
@@ -332,5 +374,153 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
ASSERT_TRUE(decoder_->Flush());
}
+// Verify that the decryption config is passed to the accelerator.
+TEST_F(H264DecoderTest, SetEncryptedStream) {
+ std::string bitstream;
+ auto input_file = GetTestDataFilePath(kBaselineFrame0);
+ CHECK(base::ReadFileToString(input_file, &bitstream));
+
+ const char kAnyKeyId[] = "any_16byte_keyid";
+ const char kAnyIv[] = "any_16byte_iv___";
+ const std::vector<SubsampleEntry> subsamples = {
+ // No encrypted bytes. This test only checks whether the data is passed
+ // thru to the acclerator so making this completely clear.
+ {bitstream.size(), 0},
+ };
+
+ std::unique_ptr<DecryptConfig> decrypt_config =
+ DecryptConfig::CreateCencConfig(kAnyKeyId, kAnyIv, subsamples);
+ EXPECT_CALL(*accelerator_,
+ SubmitFrameMetadata(_, _, _, _, _, _,
+ DecryptConfigMatches(decrypt_config.get())))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kOk));
+ EXPECT_CALL(*accelerator_,
+ SubmitDecode(DecryptConfigMatches(decrypt_config.get())))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kOk));
+
+ decoder_->SetStream(0, reinterpret_cast<const uint8_t*>(bitstream.data()),
+ bitstream.size(), decrypt_config.get());
+ EXPECT_EQ(AcceleratedVideoDecoder::kAllocateNewSurfaces, decoder_->Decode());
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, decoder_->Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H264DecoderTest, SubmitFrameMetadataRetry) {
+ SetInputFrameFiles({kBaselineFrame0});
+ ASSERT_EQ(AcceleratedVideoDecoder::kAllocateNewSurfaces, Decode());
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitFrameMetadata()
+ // should be called again.
+ EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should proceed.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
+ }
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H264DecoderTest, SubmitSliceRetry) {
+ SetInputFrameFiles({kBaselineFrame0});
+ ASSERT_EQ(AcceleratedVideoDecoder::kAllocateNewSurfaces, Decode());
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitSlice() should be
+ // called again.
+ EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should proceed.
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
+ }
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H264DecoderTest, SubmitDecodeRetry) {
+ SetInputFrameFiles({kBaselineFrame0, kBaselineFrame1});
+ ASSERT_EQ(AcceleratedVideoDecoder::kAllocateNewSurfaces, Decode());
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(_))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitDecode() should be
+ // called again.
+ EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitDecode(_))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should output
+ // the first frame.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(2)));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(2)));
+ }
+ ASSERT_TRUE(decoder_->Flush());
+}
+
} // namespace
} // namespace media
diff --git a/chromium/media/gpu/image_processor.h b/chromium/media/gpu/image_processor.h
new file mode 100644
index 00000000000..aadfec333fc
--- /dev/null
+++ b/chromium/media/gpu/image_processor.h
@@ -0,0 +1,74 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_IMAGE_PROCESSOR_H_
+#define MEDIA_GPU_IMAGE_PROCESSOR_H_
+
+#include <vector>
+
+#include "base/callback.h"
+#include "base/files/scoped_file.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+// An image processor is used to convert from one image format to another (e.g.
+// I420 to NV12) while optionally scaling. It is useful in situations where
+// a given video hardware (e.g. decoder or encoder) accepts or produces data
+// in a format different from what the rest of the pipeline expects.
+//
+// This class exposes the interface that an image processor should implement.
+class ImageProcessor {
+ public:
+ // Initializes the processor to convert from |input_format| to |output_format|
+ // and/or scale from |input_visible_size| to |output_visible_size|.
+ // Request the input buffers to be of at least |input_allocated_size| and the
+ // output buffers to be of at least |output_allocated_size|. The number of
+ // input buffers and output buffers will be |num_buffers|. Provided |error_cb|
+ // will be posted to the child thread if an error occurs after initialization.
+ // Return true if the requested configuration is supported.
+ virtual bool Initialize(VideoPixelFormat input_format,
+ VideoPixelFormat output_format,
+ gfx::Size input_visible_size,
+ gfx::Size input_allocated_size,
+ gfx::Size output_visible_size,
+ gfx::Size output_allocated_size,
+ int num_buffers,
+ const base::Closure& error_cb) = 0;
+
+ // Returns input allocated size required by the processor to be fed with.
+ virtual gfx::Size input_allocated_size() const = 0;
+
+ // Returns output allocated size required by the processor.
+ virtual gfx::Size output_allocated_size() const = 0;
+
+ // Callback to be used to return the index of a processed image to the
+ // client. After the client is done with the frame, call Process with the
+ // index to return the output buffer to the image processor.
+ using FrameReadyCB = base::OnceCallback<void(scoped_refptr<VideoFrame>)>;
+
+ // Called by client to process |frame|. The resulting processed frame will be
+ // stored in |output_buffer_index| output buffer and notified via |cb|. The
+ // processor will drop all its references to |frame| after it finishes
+ // accessing it. If the input buffers are DMA-backed, the caller
+ // should pass non-empty |output_dmabuf_fds| and the processed frame will be
+ // stored in those buffers. If the number of |output_dmabuf_fds| is not
+ // expected, this function will return false.
+ virtual bool Process(const scoped_refptr<VideoFrame>& frame,
+ int output_buffer_index,
+ std::vector<base::ScopedFD> output_dmabuf_fds,
+ FrameReadyCB cb) = 0;
+
+ // Reset all processing frames. After this method returns, no more callbacks
+ // will be invoked. ImageProcessor is ready to process more frames.
+ virtual bool Reset() = 0;
+
+ virtual ~ImageProcessor() = default;
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_IMAGE_PROCESSOR_H_
diff --git a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
index e06a485f388..f6681428978 100644
--- a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
+++ b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
@@ -16,7 +16,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "gpu/command_buffer/common/command_buffer.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "gpu/ipc/service/gpu_channel_manager.h"
#include "ipc/ipc_message_macros.h"
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
index c91e3d9c5c6..beff80c89ae 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -12,9 +12,9 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_info.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_log.h"
diff --git a/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc b/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
index d5e0a0b649e..13ed8075db1 100644
--- a/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
@@ -18,6 +18,7 @@
#include "base/files/file_util.h"
#include "base/logging.h"
#include "base/macros.h"
+#include "base/numerics/safe_conversions.h"
#include "base/path_service.h"
#include "base/strings/string_piece.h"
#include "base/strings/string_split.h"
@@ -32,6 +33,7 @@
#include "media/video/jpeg_decode_accelerator.h"
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/codec/jpeg_codec.h"
+#include "ui/gfx/codec/png_codec.h"
#if BUILDFLAG(USE_VAAPI)
#include "media/gpu/vaapi/vaapi_wrapper.h"
@@ -43,6 +45,11 @@ namespace {
// Default test image file.
const base::FilePath::CharType* kDefaultJpegFilename =
FILE_PATH_LITERAL("peach_pi-1280x720.jpg");
+// Images with at least one odd dimension.
+const base::FilePath::CharType* kOddJpegFilenames[] = {
+ FILE_PATH_LITERAL("peach_pi-40x23.jpg"),
+ FILE_PATH_LITERAL("peach_pi-41x22.jpg"),
+ FILE_PATH_LITERAL("peach_pi-41x23.jpg")};
int kDefaultPerfDecodeTimes = 600;
// Decide to save decode results to files or not. Output files will be saved
// in the same directory with unittest. File name is like input file but
@@ -68,6 +75,7 @@ struct TestImageFile {
JpegParseResult parse_result;
gfx::Size visible_size;
+ gfx::Size coded_size;
size_t output_size;
};
@@ -80,12 +88,12 @@ enum ClientState {
class JpegClient : public JpegDecodeAccelerator::Client {
public:
+ // JpegClient takes ownership of |note|.
JpegClient(const std::vector<TestImageFile*>& test_image_files,
- ClientStateNotification<ClientState>* note,
+ std::unique_ptr<ClientStateNotification<ClientState>> note,
bool is_skip);
~JpegClient() override;
void CreateJpegDecoder();
- void DestroyJpegDecoder();
void StartDecode(int32_t bitstream_buffer_id, bool do_prepare_memory = true);
void PrepareMemory(int32_t bitstream_buffer_id);
bool GetSoftwareDecodeResult(int32_t bitstream_buffer_id);
@@ -95,23 +103,31 @@ class JpegClient : public JpegDecodeAccelerator::Client {
void NotifyError(int32_t bitstream_buffer_id,
JpegDecodeAccelerator::Error error) override;
+ // Accessors.
+ ClientStateNotification<ClientState>* note() const { return note_.get(); }
+
private:
+ FRIEND_TEST_ALL_PREFIXES(JpegClientTest, GetMeanAbsoluteDifference);
+
void SetState(ClientState new_state);
- void SaveToFile(int32_t bitstream_buffer_id);
+
+ // Save a video frame that contains a decoded JPEG. The output is a PNG file.
+ // The suffix will be added before the .png extension.
+ void SaveToFile(int32_t bitstream_buffer_id,
+ const scoped_refptr<VideoFrame>& in_frame,
+ const std::string& suffix = "");
// Calculate mean absolute difference of hardware and software decode results
// to check the similarity.
- double GetMeanAbsoluteDifference(int32_t bitstream_buffer_id);
+ double GetMeanAbsoluteDifference();
// JpegClient doesn't own |test_image_files_|.
const std::vector<TestImageFile*>& test_image_files_;
- std::unique_ptr<JpegDecodeAccelerator> decoder_;
ClientState state_;
- // Used to notify another thread about the state. JpegClient does not own
- // this.
- ClientStateNotification<ClientState>* note_;
+ // Used to notify another thread about the state. JpegClient owns this.
+ std::unique_ptr<ClientStateNotification<ClientState>> note_;
// Skip JDA decode result. Used for testing performance.
bool is_skip_;
@@ -120,18 +136,43 @@ class JpegClient : public JpegDecodeAccelerator::Client {
std::unique_ptr<base::SharedMemory> in_shm_;
// Mapped memory of output buffer from hardware decoder.
std::unique_ptr<base::SharedMemory> hw_out_shm_;
+ // Video frame corresponding to the output of the hardware decoder.
+ scoped_refptr<VideoFrame> hw_out_frame_;
// Mapped memory of output buffer from software decoder.
std::unique_ptr<base::SharedMemory> sw_out_shm_;
+ // Video frame corresponding to the output of the software decoder.
+ scoped_refptr<VideoFrame> sw_out_frame_;
+
+ // This should be the first member to get destroyed because |decoder_|
+ // potentially uses other members in the JpegClient instance. For example,
+ // as decode tasks finish in a new thread spawned by |decoder_|, |hw_out_shm_|
+ // can be accessed.
+ std::unique_ptr<JpegDecodeAccelerator> decoder_;
DISALLOW_COPY_AND_ASSIGN(JpegClient);
};
-JpegClient::JpegClient(const std::vector<TestImageFile*>& test_image_files,
- ClientStateNotification<ClientState>* note,
- bool is_skip)
+// Returns a base::ScopedClosureRunner that can be used to automatically destroy
+// an instance of JpegClient in a given task runner. Takes ownership of
+// |client|.
+base::ScopedClosureRunner CreateClientDestroyer(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ std::unique_ptr<JpegClient> client) {
+ return base::ScopedClosureRunner(base::BindOnce(
+ [](scoped_refptr<base::SingleThreadTaskRunner> destruction_runner,
+ std::unique_ptr<JpegClient> client_to_delete) {
+ destruction_runner->DeleteSoon(FROM_HERE, std::move(client_to_delete));
+ },
+ task_runner, std::move(client)));
+}
+
+JpegClient::JpegClient(
+ const std::vector<TestImageFile*>& test_image_files,
+ std::unique_ptr<ClientStateNotification<ClientState>> note,
+ bool is_skip)
: test_image_files_(test_image_files),
state_(CS_CREATED),
- note_(note),
+ note_(std::move(note)),
is_skip_(is_skip) {}
JpegClient::~JpegClient() {}
@@ -166,10 +207,6 @@ void JpegClient::CreateJpegDecoder() {
SetState(CS_INITIALIZED);
}
-void JpegClient::DestroyJpegDecoder() {
- decoder_.reset();
-}
-
void JpegClient::VideoFrameReady(int32_t bitstream_buffer_id) {
if (is_skip_) {
SetState(CS_DECODE_PASS);
@@ -181,10 +218,11 @@ void JpegClient::VideoFrameReady(int32_t bitstream_buffer_id) {
return;
}
if (g_save_to_file) {
- SaveToFile(bitstream_buffer_id);
+ SaveToFile(bitstream_buffer_id, hw_out_frame_, "_hw");
+ SaveToFile(bitstream_buffer_id, sw_out_frame_, "_sw");
}
- double difference = GetMeanAbsoluteDifference(bitstream_buffer_id);
+ double difference = GetMeanAbsoluteDifference();
if (difference <= kDecodeSimilarityThreshold) {
SetState(CS_DECODE_PASS);
} else {
@@ -232,26 +270,81 @@ void JpegClient::SetState(ClientState new_state) {
state_ = new_state;
}
-void JpegClient::SaveToFile(int32_t bitstream_buffer_id) {
+void JpegClient::SaveToFile(int32_t bitstream_buffer_id,
+ const scoped_refptr<VideoFrame>& in_frame,
+ const std::string& suffix) {
+ LOG_ASSERT(in_frame.get());
TestImageFile* image_file = test_image_files_[bitstream_buffer_id];
- base::FilePath in_filename(image_file->filename);
- base::FilePath out_filename = in_filename.ReplaceExtension(".yuv");
- int size = base::checked_cast<int>(image_file->output_size);
- ASSERT_EQ(size,
- base::WriteFile(out_filename,
- static_cast<char*>(hw_out_shm_->memory()), size));
+ // First convert to ARGB format. Note that in our case, the coded size and the
+ // visible size will be the same.
+ scoped_refptr<VideoFrame> argb_out_frame = VideoFrame::CreateFrame(
+ VideoPixelFormat::PIXEL_FORMAT_ARGB, image_file->visible_size,
+ gfx::Rect(image_file->visible_size), image_file->visible_size,
+ base::TimeDelta());
+ LOG_ASSERT(argb_out_frame.get());
+ LOG_ASSERT(in_frame->visible_rect() == argb_out_frame->visible_rect());
+
+ // Note that we use J420ToARGB instead of I420ToARGB so that the
+ // kYuvJPEGConstants YUV-to-RGB conversion matrix is used.
+ const int conversion_status =
+ libyuv::J420ToARGB(in_frame->data(VideoFrame::kYPlane),
+ in_frame->stride(VideoFrame::kYPlane),
+ in_frame->data(VideoFrame::kUPlane),
+ in_frame->stride(VideoFrame::kUPlane),
+ in_frame->data(VideoFrame::kVPlane),
+ in_frame->stride(VideoFrame::kVPlane),
+ argb_out_frame->data(VideoFrame::kARGBPlane),
+ argb_out_frame->stride(VideoFrame::kARGBPlane),
+ argb_out_frame->visible_rect().width(),
+ argb_out_frame->visible_rect().height());
+ LOG_ASSERT(conversion_status == 0);
+
+ // Save as a PNG.
+ std::vector<uint8_t> png_output;
+ const bool png_encode_status = gfx::PNGCodec::Encode(
+ argb_out_frame->data(VideoFrame::kARGBPlane), gfx::PNGCodec::FORMAT_BGRA,
+ argb_out_frame->visible_rect().size(),
+ argb_out_frame->stride(VideoFrame::kARGBPlane),
+ true, /* discard_transparency */
+ std::vector<gfx::PNGCodec::Comment>(), &png_output);
+ LOG_ASSERT(png_encode_status);
+ const base::FilePath in_filename(image_file->filename);
+ const base::FilePath out_filename =
+ in_filename.ReplaceExtension(".png").InsertBeforeExtension(suffix);
+ const int size = base::checked_cast<int>(png_output.size());
+ const int file_written_bytes = base::WriteFile(
+ out_filename, reinterpret_cast<char*>(png_output.data()), size);
+ LOG_ASSERT(file_written_bytes == size);
}
-double JpegClient::GetMeanAbsoluteDifference(int32_t bitstream_buffer_id) {
- TestImageFile* image_file = test_image_files_[bitstream_buffer_id];
-
- double total_difference = 0;
- uint8_t* hw_ptr = static_cast<uint8_t*>(hw_out_shm_->memory());
- uint8_t* sw_ptr = static_cast<uint8_t*>(sw_out_shm_->memory());
- for (size_t i = 0; i < image_file->output_size; i++)
- total_difference += std::abs(hw_ptr[i] - sw_ptr[i]);
- return total_difference / image_file->output_size;
+double JpegClient::GetMeanAbsoluteDifference() {
+ double mean_abs_difference = 0;
+ size_t num_samples = 0;
+ const size_t planes[] = {VideoFrame::kYPlane, VideoFrame::kUPlane,
+ VideoFrame::kVPlane};
+ for (size_t plane : planes) {
+ const uint8_t* hw_data = hw_out_frame_->data(plane);
+ const uint8_t* sw_data = sw_out_frame_->data(plane);
+ LOG_ASSERT(hw_out_frame_->visible_rect() == sw_out_frame_->visible_rect());
+ const size_t rows = VideoFrame::Rows(
+ plane, PIXEL_FORMAT_I420, hw_out_frame_->visible_rect().height());
+ const size_t columns = VideoFrame::Columns(
+ plane, PIXEL_FORMAT_I420, hw_out_frame_->visible_rect().width());
+ LOG_ASSERT(hw_out_frame_->stride(plane) == sw_out_frame_->stride(plane));
+ const int stride = hw_out_frame_->stride(plane);
+ for (size_t row = 0; row < rows; ++row) {
+ for (size_t col = 0; col < columns; ++col) {
+ mean_abs_difference += std::abs(hw_data[col] - sw_data[col]);
+ }
+ hw_data += stride;
+ sw_data += stride;
+ }
+ num_samples += rows * columns;
+ }
+ LOG_ASSERT(num_samples > 0);
+ mean_abs_difference /= num_samples;
+ return mean_abs_difference;
}
void JpegClient::StartDecode(int32_t bitstream_buffer_id,
@@ -267,48 +360,37 @@ void JpegClient::StartDecode(int32_t bitstream_buffer_id,
dup_handle = base::SharedMemory::DuplicateHandle(in_shm_->handle());
BitstreamBuffer bitstream_buffer(bitstream_buffer_id, dup_handle,
image_file->data_str.size());
- scoped_refptr<VideoFrame> out_frame_ = VideoFrame::WrapExternalSharedMemory(
- PIXEL_FORMAT_I420, image_file->visible_size,
+ hw_out_frame_ = VideoFrame::WrapExternalSharedMemory(
+ PIXEL_FORMAT_I420, image_file->coded_size,
gfx::Rect(image_file->visible_size), image_file->visible_size,
static_cast<uint8_t*>(hw_out_shm_->memory()), image_file->output_size,
hw_out_shm_->handle(), 0, base::TimeDelta());
- LOG_ASSERT(out_frame_.get());
- decoder_->Decode(bitstream_buffer, out_frame_);
+ LOG_ASSERT(hw_out_frame_.get());
+ decoder_->Decode(bitstream_buffer, hw_out_frame_);
}
bool JpegClient::GetSoftwareDecodeResult(int32_t bitstream_buffer_id) {
- VideoPixelFormat format = PIXEL_FORMAT_I420;
TestImageFile* image_file = test_image_files_[bitstream_buffer_id];
-
- uint8_t* yplane = static_cast<uint8_t*>(sw_out_shm_->memory());
- uint8_t* uplane = yplane +
- VideoFrame::PlaneSize(format, VideoFrame::kYPlane,
- image_file->visible_size)
- .GetArea();
- uint8_t* vplane = uplane +
- VideoFrame::PlaneSize(format, VideoFrame::kUPlane,
- image_file->visible_size)
- .GetArea();
- int yplane_stride = image_file->visible_size.width();
- int uv_plane_stride = yplane_stride / 2;
-
- if (libyuv::ConvertToI420(
- static_cast<uint8_t*>(in_shm_->memory()),
- image_file->data_str.size(),
- yplane,
- yplane_stride,
- uplane,
- uv_plane_stride,
- vplane,
- uv_plane_stride,
- 0,
- 0,
- image_file->visible_size.width(),
- image_file->visible_size.height(),
- image_file->visible_size.width(),
- image_file->visible_size.height(),
- libyuv::kRotate0,
- libyuv::FOURCC_MJPG) != 0) {
+ sw_out_frame_ = VideoFrame::WrapExternalSharedMemory(
+ PIXEL_FORMAT_I420, image_file->coded_size,
+ gfx::Rect(image_file->visible_size), image_file->visible_size,
+ static_cast<uint8_t*>(sw_out_shm_->memory()), image_file->output_size,
+ sw_out_shm_->handle(), 0, base::TimeDelta());
+ LOG_ASSERT(sw_out_shm_.get());
+
+ if (libyuv::ConvertToI420(static_cast<uint8_t*>(in_shm_->memory()),
+ image_file->data_str.size(),
+ sw_out_frame_->data(VideoFrame::kYPlane),
+ sw_out_frame_->stride(VideoFrame::kYPlane),
+ sw_out_frame_->data(VideoFrame::kUPlane),
+ sw_out_frame_->stride(VideoFrame::kUPlane),
+ sw_out_frame_->data(VideoFrame::kVPlane),
+ sw_out_frame_->stride(VideoFrame::kVPlane), 0, 0,
+ sw_out_frame_->visible_rect().width(),
+ sw_out_frame_->visible_rect().height(),
+ sw_out_frame_->visible_rect().width(),
+ sw_out_frame_->visible_rect().height(),
+ libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
LOG(ERROR) << "Software decode " << image_file->filename << " failed.";
return false;
}
@@ -348,6 +430,8 @@ class JpegDecodeAcceleratorTestEnvironment : public ::testing::Environment {
std::unique_ptr<TestImageFile> image_data_1280x720_default_;
// Parsed data of failure image.
std::unique_ptr<TestImageFile> image_data_invalid_;
+ // Parsed data for images with at least one odd dimension.
+ std::vector<std::unique_ptr<TestImageFile>> image_data_odd_;
// Parsed data from command line.
std::vector<std::unique_ptr<TestImageFile>> image_data_user_;
// Decode times for performance measurement.
@@ -395,8 +479,17 @@ void JpegDecodeAcceleratorTestEnvironment::SetUp() {
image_data_invalid_.reset(new TestImageFile("failure.jpg"));
image_data_invalid_->data_str.resize(100, 0);
image_data_invalid_->visible_size.SetSize(1280, 720);
+ image_data_invalid_->coded_size = image_data_invalid_->visible_size;
image_data_invalid_->output_size = VideoFrame::AllocationSize(
- PIXEL_FORMAT_I420, image_data_invalid_->visible_size);
+ PIXEL_FORMAT_I420, image_data_invalid_->coded_size);
+
+ // Load test images with at least one odd dimension.
+ for (const auto* filename : kOddJpegFilenames) {
+ base::FilePath input_file = GetOriginalOrTestDataFilePath(filename);
+ auto image_data = std::make_unique<TestImageFile>(filename);
+ ASSERT_NO_FATAL_FAILURE(ReadTestJpegImage(input_file, image_data.get()));
+ image_data_odd_.push_back(std::move(image_data));
+ }
// |user_jpeg_filenames_| may include many files and use ';' as delimiter.
std::vector<base::FilePath::StringType> filenames = base::SplitString(
@@ -448,8 +541,17 @@ void JpegDecodeAcceleratorTestEnvironment::ReadTestJpegImage(
image_data->visible_size.SetSize(
image_data->parse_result.frame_header.visible_width,
image_data->parse_result.frame_header.visible_height);
+ // The parse result yields a coded size that rounds up to a whole MCU.
+ // However, we can use a smaller coded size for the decode result. Here, we
+ // simply round up to the next even dimension. That way, when we are building
+ // the video frame to hold the result of the decoding, the strides and
+ // pointers for the UV planes are computed correctly for JPEGs that require
+ // even-sized allocation (see VideoFrame::RequiresEvenSizeAllocation()) and
+ // whose visible size has at least one odd dimension.
+ image_data->coded_size.SetSize((image_data->visible_size.width() + 1) & ~1,
+ (image_data->visible_size.height() + 1) & ~1);
image_data->output_size =
- VideoFrame::AllocationSize(PIXEL_FORMAT_I420, image_data->visible_size);
+ VideoFrame::AllocationSize(PIXEL_FORMAT_I420, image_data->coded_size);
}
base::FilePath
@@ -487,38 +589,43 @@ void JpegDecodeAcceleratorTest::TestDecode(size_t num_concurrent_decoders) {
base::Thread decoder_thread("DecoderThread");
ASSERT_TRUE(decoder_thread.Start());
- std::vector<std::unique_ptr<ClientStateNotification<ClientState>>> notes;
- std::vector<std::unique_ptr<JpegClient>> clients;
+ // The raw pointer to a client should not be used after a task to destroy the
+ // client is posted to |decoder_thread| by the corresponding element in
+ // |client_destroyers|. It's necessary to destroy the client in that thread
+ // because |client->decoder_| expects to be destroyed in the thread in which
+ // it was created.
+ std::vector<JpegClient*> clients;
+ std::vector<base::ScopedClosureRunner> client_destroyers;
for (size_t i = 0; i < num_concurrent_decoders; i++) {
- notes.push_back(std::make_unique<ClientStateNotification<ClientState>>());
- clients.push_back(std::make_unique<JpegClient>(test_image_files_,
- notes.back().get(), false));
+ auto client = std::make_unique<JpegClient>(
+ test_image_files_,
+ std::make_unique<ClientStateNotification<ClientState>>(),
+ false /* is_skip */);
+ clients.push_back(client.get());
+ client_destroyers.emplace_back(
+ CreateClientDestroyer(decoder_thread.task_runner(), std::move(client)));
decoder_thread.task_runner()->PostTask(
FROM_HERE, base::Bind(&JpegClient::CreateJpegDecoder,
- base::Unretained(clients.back().get())));
- ASSERT_EQ(notes[i]->Wait(), CS_INITIALIZED);
+ base::Unretained(clients.back())));
+ ASSERT_EQ(clients[i]->note()->Wait(), CS_INITIALIZED);
}
for (size_t index = 0; index < test_image_files_.size(); index++) {
for (size_t i = 0; i < num_concurrent_decoders; i++) {
decoder_thread.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(&JpegClient::StartDecode,
- base::Unretained(clients[i].get()), index, true));
+ FROM_HERE, base::BindOnce(&JpegClient::StartDecode,
+ base::Unretained(clients[i]), index, true));
}
if (index < expected_status_.size()) {
for (size_t i = 0; i < num_concurrent_decoders; i++) {
- ASSERT_EQ(notes[i]->Wait(), expected_status_[index]);
+ ASSERT_EQ(clients[i]->note()->Wait(), expected_status_[index]);
}
}
}
- for (size_t i = 0; i < num_concurrent_decoders; i++) {
- decoder_thread.task_runner()->PostTask(
- FROM_HERE, base::Bind(&JpegClient::DestroyJpegDecoder,
- base::Unretained(clients[i].get())));
- }
+ // Doing this will destroy each client in the right thread (|decoder_thread|).
+ client_destroyers.clear();
decoder_thread.Stop();
}
@@ -527,39 +634,46 @@ void JpegDecodeAcceleratorTest::PerfDecodeByJDA(int decode_times) {
base::Thread decoder_thread("DecoderThread");
ASSERT_TRUE(decoder_thread.Start());
- std::unique_ptr<ClientStateNotification<ClientState>> note =
- std::make_unique<ClientStateNotification<ClientState>>();
- std::unique_ptr<JpegClient> client =
- std::make_unique<JpegClient>(test_image_files_, note.get(), true);
+ auto client = std::make_unique<JpegClient>(
+ test_image_files_,
+ std::make_unique<ClientStateNotification<ClientState>>(),
+ true /* is_skip */);
+
+ // The raw pointer to the client should not be used after a task to destroy
+ // the client is posted to |decoder_thread| by the |client_destroyer|. It's
+ // necessary to destroy the client in that thread because |client->decoder_|
+ // expects to be destroyed in the thread in which it was created.
+ JpegClient* client_raw = client.get();
+ base::ScopedClosureRunner client_destroyer =
+ CreateClientDestroyer(decoder_thread.task_runner(), std::move(client));
decoder_thread.task_runner()->PostTask(
- FROM_HERE, base::Bind(&JpegClient::CreateJpegDecoder,
- base::Unretained(client.get())));
- ASSERT_EQ(note->Wait(), CS_INITIALIZED);
+ FROM_HERE, base::BindOnce(&JpegClient::CreateJpegDecoder,
+ base::Unretained(client_raw)));
+ ASSERT_EQ(client_raw->note()->Wait(), CS_INITIALIZED);
const int32_t bitstream_buffer_id = 0;
- client->PrepareMemory(bitstream_buffer_id);
+ client_raw->PrepareMemory(bitstream_buffer_id);
for (int index = 0; index < decode_times; index++) {
decoder_thread.task_runner()->PostTask(
FROM_HERE,
- base::Bind(&JpegClient::StartDecode, base::Unretained(client.get()),
- bitstream_buffer_id, false));
- ASSERT_EQ(note->Wait(), CS_DECODE_PASS);
+ base::BindOnce(&JpegClient::StartDecode, base::Unretained(client_raw),
+ bitstream_buffer_id, false));
+ ASSERT_EQ(client_raw->note()->Wait(), CS_DECODE_PASS);
}
- decoder_thread.task_runner()->PostTask(
- FROM_HERE, base::Bind(&JpegClient::DestroyJpegDecoder,
- base::Unretained(client.get())));
+ // Doing this will destroy the client in the right thread (|decoder_thread|).
+ client_destroyer.RunAndReset();
decoder_thread.Stop();
}
void JpegDecodeAcceleratorTest::PerfDecodeBySW(int decode_times) {
LOG_ASSERT(test_image_files_.size() == 1);
- std::unique_ptr<ClientStateNotification<ClientState>> note =
- std::make_unique<ClientStateNotification<ClientState>>();
- std::unique_ptr<JpegClient> client =
- std::make_unique<JpegClient>(test_image_files_, note.get(), true);
+ std::unique_ptr<JpegClient> client = std::make_unique<JpegClient>(
+ test_image_files_,
+ std::make_unique<ClientStateNotification<ClientState>>(),
+ true /* is_skip */);
const int32_t bitstream_buffer_id = 0;
client->PrepareMemory(bitstream_buffer_id);
@@ -568,12 +682,75 @@ void JpegDecodeAcceleratorTest::PerfDecodeBySW(int decode_times) {
}
}
+// Return a VideoFrame that contains YUV data using 4:2:0 subsampling. The
+// visible size is 3x3, and the coded size is 4x4 which is 3x3 rounded up to the
+// next even dimensions.
+scoped_refptr<VideoFrame> GetTestDecodedData() {
+ scoped_refptr<VideoFrame> frame = VideoFrame::CreateZeroInitializedFrame(
+ PIXEL_FORMAT_I420, gfx::Size(4, 4) /* coded_size */,
+ gfx::Rect(3, 3) /* visible_rect */, gfx::Size(3, 3) /* natural_size */,
+ base::TimeDelta());
+ LOG_ASSERT(frame.get());
+ uint8_t* y_data = frame->data(VideoFrame::kYPlane);
+ int y_stride = frame->stride(VideoFrame::kYPlane);
+ uint8_t* u_data = frame->data(VideoFrame::kUPlane);
+ int u_stride = frame->stride(VideoFrame::kUPlane);
+ uint8_t* v_data = frame->data(VideoFrame::kVPlane);
+ int v_stride = frame->stride(VideoFrame::kVPlane);
+
+ // Data for the Y plane.
+ memcpy(&y_data[0 * y_stride], "\x01\x02\x03", 3);
+ memcpy(&y_data[1 * y_stride], "\x04\x05\x06", 3);
+ memcpy(&y_data[2 * y_stride], "\x07\x08\x09", 3);
+
+ // Data for the U plane.
+ memcpy(&u_data[0 * u_stride], "\x0A\x0B", 2);
+ memcpy(&u_data[1 * u_stride], "\x0C\x0D", 2);
+
+ // Data for the V plane.
+ memcpy(&v_data[0 * v_stride], "\x0E\x0F", 2);
+ memcpy(&v_data[1 * v_stride], "\x10\x11", 2);
+
+ return frame;
+}
+
+TEST(JpegClientTest, GetMeanAbsoluteDifference) {
+ JpegClient client(std::vector<TestImageFile*>(), nullptr, false);
+ client.hw_out_frame_ = GetTestDecodedData();
+ client.sw_out_frame_ = GetTestDecodedData();
+
+ uint8_t* y_data = client.sw_out_frame_->data(VideoFrame::kYPlane);
+ const int y_stride = client.sw_out_frame_->stride(VideoFrame::kYPlane);
+ uint8_t* u_data = client.sw_out_frame_->data(VideoFrame::kUPlane);
+ const int u_stride = client.sw_out_frame_->stride(VideoFrame::kUPlane);
+ uint8_t* v_data = client.sw_out_frame_->data(VideoFrame::kVPlane);
+ const int v_stride = client.sw_out_frame_->stride(VideoFrame::kVPlane);
+
+ // Change some visible data in the software decoding result.
+ double expected_abs_mean_diff = 0;
+ y_data[0] = 0xF0; // Previously 0x01.
+ expected_abs_mean_diff += 0xF0 - 0x01;
+ y_data[y_stride + 1] = 0x8A; // Previously 0x05.
+ expected_abs_mean_diff += 0x8A - 0x05;
+ u_data[u_stride] = 0x02; // Previously 0x0C.
+ expected_abs_mean_diff += 0x0C - 0x02;
+ v_data[v_stride + 1] = 0x54; // Previously 0x11.
+ expected_abs_mean_diff += 0x54 - 0x11;
+ expected_abs_mean_diff /= 3 * 3 + 2 * 2 * 2;
+ EXPECT_NEAR(expected_abs_mean_diff, client.GetMeanAbsoluteDifference(), 1e-7);
+
+ // Change some non-visible data in the software decoding result, i.e., part of
+ // the stride padding. This should not affect the absolute mean difference.
+ y_data[3] = 0xAB;
+ EXPECT_NEAR(expected_abs_mean_diff, client.GetMeanAbsoluteDifference(), 1e-7);
+}
+
TEST_F(JpegDecodeAcceleratorTest, SimpleDecode) {
for (auto& image : g_env->image_data_user_) {
test_image_files_.push_back(image.get());
expected_status_.push_back(CS_DECODE_PASS);
}
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, MultipleDecoders) {
@@ -581,7 +758,15 @@ TEST_F(JpegDecodeAcceleratorTest, MultipleDecoders) {
test_image_files_.push_back(image.get());
expected_status_.push_back(CS_DECODE_PASS);
}
- TestDecode(3);
+ TestDecode(3 /* num_concurrent_decoders */);
+}
+
+TEST_F(JpegDecodeAcceleratorTest, OddDimensions) {
+ for (auto& image : g_env->image_data_odd_) {
+ test_image_files_.push_back(image.get());
+ expected_status_.push_back(CS_DECODE_PASS);
+ }
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, InputSizeChange) {
@@ -592,7 +777,7 @@ TEST_F(JpegDecodeAcceleratorTest, InputSizeChange) {
test_image_files_.push_back(g_env->image_data_1280x720_black_.get());
for (size_t i = 0; i < test_image_files_.size(); i++)
expected_status_.push_back(CS_DECODE_PASS);
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, ResolutionChange) {
@@ -601,19 +786,19 @@ TEST_F(JpegDecodeAcceleratorTest, ResolutionChange) {
test_image_files_.push_back(g_env->image_data_640x368_black_.get());
for (size_t i = 0; i < test_image_files_.size(); i++)
expected_status_.push_back(CS_DECODE_PASS);
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, CodedSizeAlignment) {
test_image_files_.push_back(g_env->image_data_640x360_black_.get());
expected_status_.push_back(CS_DECODE_PASS);
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, FailureJpeg) {
test_image_files_.push_back(g_env->image_data_invalid_.get());
expected_status_.push_back(CS_ERROR);
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, KeepDecodeAfterFailure) {
@@ -621,7 +806,7 @@ TEST_F(JpegDecodeAcceleratorTest, KeepDecodeAfterFailure) {
test_image_files_.push_back(g_env->image_data_1280x720_default_.get());
expected_status_.push_back(CS_ERROR);
expected_status_.push_back(CS_DECODE_PASS);
- TestDecode(1);
+ TestDecode(1 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, Abort) {
@@ -632,7 +817,7 @@ TEST_F(JpegDecodeAcceleratorTest, Abort) {
// decoding. Then destroy the first decoder when it is still decoding. The
// kernel should not crash during this test.
expected_status_.push_back(CS_DECODE_PASS);
- TestDecode(2);
+ TestDecode(2 /* num_concurrent_decoders */);
}
TEST_F(JpegDecodeAcceleratorTest, PerfJDA) {
diff --git a/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc b/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
index 63818b44750..3485fb1b187 100644
--- a/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
@@ -412,7 +412,7 @@ bool JpegClient::CompareHardwareAndSoftwareResults(int width,
int u_stride = width / 2;
int v_stride = u_stride;
if (libyuv::ConvertToI420(
- static_cast<const uint8*>(hw_out_shm_->memory()), hw_encoded_size,
+ static_cast<const uint8_t*>(hw_out_shm_->memory()), hw_encoded_size,
hw_yuv_result, y_stride, hw_yuv_result + y_stride * height, u_stride,
hw_yuv_result + y_stride * height + u_stride * height / 2, v_stride,
0, 0, width, height, width, height, libyuv::kRotate0,
@@ -422,7 +422,7 @@ bool JpegClient::CompareHardwareAndSoftwareResults(int width,
uint8_t* sw_yuv_result = new uint8_t[yuv_size];
if (libyuv::ConvertToI420(
- static_cast<const uint8*>(sw_out_shm_->memory()), sw_encoded_size,
+ static_cast<const uint8_t*>(sw_out_shm_->memory()), sw_encoded_size,
sw_yuv_result, y_stride, sw_yuv_result + y_stride * height, u_stride,
sw_yuv_result + y_stride * height + u_stride * height / 2, v_stride,
0, 0, width, height, width, height, libyuv::kRotate0,
diff --git a/chromium/media/gpu/shared_memory_region.cc b/chromium/media/gpu/shared_memory_region.cc
deleted file mode 100644
index 6db362d69bc..00000000000
--- a/chromium/media/gpu/shared_memory_region.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (c) 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/shared_memory_region.h"
-
-namespace media {
-
-SharedMemoryRegion::SharedMemoryRegion(const base::SharedMemoryHandle& handle,
- off_t offset,
- size_t size,
- bool read_only)
- : shm_(handle, read_only), offset_(offset), size_(size) {}
-
-SharedMemoryRegion::SharedMemoryRegion(const BitstreamBuffer& bitstream_buffer,
- bool read_only)
- : SharedMemoryRegion(bitstream_buffer.handle(),
- bitstream_buffer.offset(),
- bitstream_buffer.size(),
- read_only) {}
-
-bool SharedMemoryRegion::Map() {
- return shm_.MapAt(offset_, size_);
-}
-
-void* SharedMemoryRegion::memory() {
- return shm_.memory();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/shared_memory_region.h b/chromium/media/gpu/shared_memory_region.h
deleted file mode 100644
index 48c8098619e..00000000000
--- a/chromium/media/gpu/shared_memory_region.h
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright (c) 2015 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_SHARED_MEMORY_REGION_H_
-#define MEDIA_GPU_SHARED_MEMORY_REGION_H_
-
-#include "base/memory/shared_memory_handle.h"
-#include "media/base/bitstream_buffer.h"
-#include "media/base/unaligned_shared_memory.h"
-
-namespace media {
-
-// Helper class to access a region of a SharedMemory. Different from
-// SharedMemory, in which the |offset| of function MapAt() must be aligned to
-// the value of |SysInfo::VMAllocationGranularity()|, the |offset| of a
-// SharedMemoryRegion needs not to be aligned, this class hides the details
-// and returns the mapped address of the given offset.
-//
-// TODO(sandersd): This is now a trivial wrapper around
-// media::UnalignedSharedMemory. Switch all users over and delete
-// SharedMemoryRegion.
-class SharedMemoryRegion {
- public:
- // Creates a SharedMemoryRegion.
- // The mapped memory region begins at |offset| bytes from the start of the
- // shared memory and the length is |size|. It will take the ownership of
- // the |handle| and release the resource when being destroyed. Different
- // from SharedMemory, the |offset| needs not to be aligned to the value of
- // |SysInfo::VMAllocationGranularity()|.
- SharedMemoryRegion(const base::SharedMemoryHandle& handle,
- off_t offset,
- size_t size,
- bool read_only);
-
- // Creates a SharedMemoryRegion from the given |bistream_buffer|.
- SharedMemoryRegion(const BitstreamBuffer& bitstream_buffer, bool read_only);
-
- // Maps the shared memory into the caller's address space.
- // Return true on success, false otherwise.
- bool Map();
-
- // Gets a pointer to the mapped region if it has been mapped via Map().
- // Returns |nullptr| if it is not mapped. The returned pointer points
- // to the memory at the offset previously passed to the constructor.
- void* memory();
-
- size_t size() const { return size_; }
-
- private:
- UnalignedSharedMemory shm_;
- off_t offset_;
- size_t size_;
-
- DISALLOW_COPY_AND_ASSIGN(SharedMemoryRegion);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_SHARED_MEMORY_REGION_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor.cc b/chromium/media/gpu/v4l2/v4l2_image_processor.cc
index 62cd5b23584..45a0e02875f 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor.cc
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor.cc
@@ -17,6 +17,7 @@
#include "base/numerics/safe_conversions.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/scopedfd_helper.h"
#include "media/gpu/v4l2/v4l2_image_processor.h"
#define DVLOGF(level) DVLOG(level) << __func__ << "(): "
@@ -62,11 +63,13 @@ V4L2ImageProcessor::JobRecord::JobRecord() : output_buffer_index(-1) {}
V4L2ImageProcessor::JobRecord::~JobRecord() {}
-V4L2ImageProcessor::V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device)
+V4L2ImageProcessor::V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device,
+ v4l2_memory input_memory_type,
+ v4l2_memory output_memory_type)
: input_format_(PIXEL_FORMAT_UNKNOWN),
output_format_(PIXEL_FORMAT_UNKNOWN),
- input_memory_type_(V4L2_MEMORY_USERPTR),
- output_memory_type_(V4L2_MEMORY_MMAP),
+ input_memory_type_(input_memory_type),
+ output_memory_type_(output_memory_type),
input_format_fourcc_(0),
output_format_fourcc_(0),
input_planes_count_(0),
@@ -81,11 +84,18 @@ V4L2ImageProcessor::V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device)
output_buffer_queued_count_(0),
num_buffers_(0),
weak_this_factory_(this) {
+ DCHECK(input_memory_type == V4L2_MEMORY_USERPTR ||
+ input_memory_type == V4L2_MEMORY_DMABUF);
+ DCHECK(output_memory_type == V4L2_MEMORY_MMAP ||
+ output_memory_type == V4L2_MEMORY_DMABUF);
weak_this_ = weak_this_factory_.GetWeakPtr();
}
V4L2ImageProcessor::~V4L2ImageProcessor() {
DCHECK(child_task_runner_->BelongsToCurrentThread());
+
+ Destroy();
+
DCHECK(!device_thread_.IsRunning());
DCHECK(!device_poll_thread_.IsRunning());
@@ -109,8 +119,6 @@ void V4L2ImageProcessor::NotifyErrorOnChildThread(
bool V4L2ImageProcessor::Initialize(VideoPixelFormat input_format,
VideoPixelFormat output_format,
- v4l2_memory input_memory_type,
- v4l2_memory output_memory_type,
gfx::Size input_visible_size,
gfx::Size input_allocated_size,
gfx::Size output_visible_size,
@@ -120,10 +128,6 @@ bool V4L2ImageProcessor::Initialize(VideoPixelFormat input_format,
VLOGF(2);
DCHECK(!error_cb.is_null());
DCHECK_GT(num_buffers, 0);
- DCHECK(input_memory_type == V4L2_MEMORY_USERPTR ||
- input_memory_type == V4L2_MEMORY_DMABUF);
- DCHECK(output_memory_type == V4L2_MEMORY_MMAP ||
- output_memory_type == V4L2_MEMORY_DMABUF);
error_cb_ = error_cb;
input_format_ = input_format;
@@ -138,8 +142,6 @@ bool V4L2ImageProcessor::Initialize(VideoPixelFormat input_format,
return false;
}
- input_memory_type_ = input_memory_type;
- output_memory_type_ = output_memory_type;
input_visible_size_ = input_visible_size;
input_allocated_size_ = input_allocated_size;
output_visible_size_ = output_visible_size;
@@ -189,15 +191,6 @@ bool V4L2ImageProcessor::Initialize(VideoPixelFormat input_format,
return true;
}
-std::vector<base::ScopedFD> V4L2ImageProcessor::GetDmabufsForOutputBuffer(
- int output_buffer_index) {
- DCHECK_GE(output_buffer_index, 0);
- DCHECK_LT(output_buffer_index, num_buffers_);
- return device_->GetDmabufsForV4L2Buffer(output_buffer_index,
- output_planes_count_,
- V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
-}
-
// static
bool V4L2ImageProcessor::IsSupported() {
scoped_refptr<V4L2Device> device = V4L2Device::Create();
@@ -254,40 +247,70 @@ bool V4L2ImageProcessor::TryOutputFormat(uint32_t input_pixelformat,
return true;
}
+gfx::Size V4L2ImageProcessor::input_allocated_size() const {
+ return input_allocated_size_;
+}
+
+gfx::Size V4L2ImageProcessor::output_allocated_size() const {
+ return output_allocated_size_;
+}
+
bool V4L2ImageProcessor::Process(const scoped_refptr<VideoFrame>& frame,
int output_buffer_index,
std::vector<base::ScopedFD> output_dmabuf_fds,
- const FrameReadyCB& cb) {
+ FrameReadyCB cb) {
DVLOGF(4) << "ts=" << frame->timestamp().InMilliseconds();
- size_t expected_num_fds =
- (output_memory_type_ == V4L2_MEMORY_DMABUF ? output_planes_count_ : 0);
- if (expected_num_fds != output_dmabuf_fds.size()) {
- VLOGF(1) << "wrong number of output fds. Expected " << expected_num_fds
+
+ switch (output_memory_type_) {
+ case V4L2_MEMORY_MMAP:
+ if (!output_dmabuf_fds.empty()) {
+ VLOGF(1) << "output_dmabuf_fds must be empty for MMAP output mode";
+ return false;
+ }
+ output_dmabuf_fds =
+ DuplicateFDs(output_buffer_map_[output_buffer_index].dmabuf_fds);
+ break;
+
+ case V4L2_MEMORY_DMABUF:
+ break;
+
+ default:
+ NOTREACHED();
+ return false;
+ }
+
+ if (output_dmabuf_fds.size() != output_planes_count_) {
+ VLOGF(1) << "wrong number of output fds. Expected " << output_planes_count_
<< ", actual " << output_dmabuf_fds.size();
return false;
}
std::unique_ptr<JobRecord> job_record(new JobRecord());
- job_record->frame = frame;
+ job_record->input_frame = frame;
job_record->output_buffer_index = output_buffer_index;
- job_record->output_dmabuf_fds = std::move(output_dmabuf_fds);
- job_record->ready_cb = cb;
+ job_record->ready_cb = std::move(cb);
+
+ // Create the output frame
+ job_record->output_frame = VideoFrame::WrapExternalDmabufs(
+ output_format_, output_allocated_size_, gfx::Rect(output_visible_size_),
+ output_visible_size_, std::move(output_dmabuf_fds),
+ job_record->input_frame->timestamp());
+
+ if (!job_record->output_frame)
+ return false;
device_thread_.task_runner()->PostTask(
- FROM_HERE, base::Bind(&V4L2ImageProcessor::ProcessTask,
- base::Unretained(this), base::Passed(&job_record)));
+ FROM_HERE, base::BindOnce(&V4L2ImageProcessor::ProcessTask,
+ base::Unretained(this), std::move(job_record)));
return true;
}
void V4L2ImageProcessor::ProcessTask(std::unique_ptr<JobRecord> job_record) {
- int index = job_record->output_buffer_index;
- DVLOGF(4) << "Reusing output buffer, index=" << index;
+ DVLOGF(4) << "Reusing output buffer, index="
+ << job_record->output_buffer_index;
DCHECK(device_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(output_buffer_map_[index].dmabuf_fds.empty());
- output_buffer_map_[index].dmabuf_fds =
- std::move(job_record->output_dmabuf_fds);
- EnqueueOutput(index);
+ EnqueueOutput(job_record.get());
input_queue_.emplace(std::move(job_record));
EnqueueInput();
}
@@ -331,8 +354,6 @@ void V4L2ImageProcessor::Destroy() {
// Otherwise DestroyTask() is not needed.
DCHECK(!device_poll_thread_.IsRunning());
}
-
- delete this;
}
bool V4L2ImageProcessor::CreateInputBuffers() {
@@ -473,6 +494,19 @@ bool V4L2ImageProcessor::CreateOutputBuffers() {
DCHECK(output_buffer_map_.empty());
output_buffer_map_.resize(reqbufs.count);
+ // Get the DMA-BUF FDs for MMAP buffers
+ if (output_memory_type_ == V4L2_MEMORY_MMAP) {
+ for (unsigned int i = 0; i < output_buffer_map_.size(); i++) {
+ OutputRecord& output_record = output_buffer_map_[i];
+ output_record.dmabuf_fds = device_->GetDmabufsForV4L2Buffer(
+ i, output_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+ if (output_record.dmabuf_fds.empty()) {
+ VLOGF(1) << "failed to get fds of output buffer";
+ return false;
+ }
+ }
+ }
+
return true;
}
@@ -497,14 +531,14 @@ void V4L2ImageProcessor::DestroyOutputBuffers() {
DCHECK(child_task_runner_->BelongsToCurrentThread());
DCHECK(!output_streamon_);
+ output_buffer_map_.clear();
+
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
reqbufs.memory = output_memory_type_;
IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
-
- output_buffer_map_.clear();
}
void V4L2ImageProcessor::DevicePollTask(bool poll_device) {
@@ -583,12 +617,12 @@ void V4L2ImageProcessor::EnqueueInput() {
}
}
-void V4L2ImageProcessor::EnqueueOutput(int index) {
+void V4L2ImageProcessor::EnqueueOutput(const JobRecord* job_record) {
DVLOGF(4);
DCHECK(device_thread_.task_runner()->BelongsToCurrentThread());
const int old_outputs_queued = output_buffer_queued_count_;
- if (!EnqueueOutputRecord(index))
+ if (!EnqueueOutputRecord(job_record))
return;
if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
@@ -662,7 +696,6 @@ void V4L2ImageProcessor::Dequeue() {
OutputRecord& output_record = output_buffer_map_[dqbuf.index];
DCHECK(output_record.at_device);
output_record.at_device = false;
- output_record.dmabuf_fds.clear();
output_buffer_queued_count_--;
// Jobs are always processed in FIFO order.
@@ -673,8 +706,9 @@ void V4L2ImageProcessor::Dequeue() {
DVLOGF(4) << "Processing finished, returning frame, index=" << dqbuf.index;
child_task_runner_->PostTask(
- FROM_HERE, base::Bind(&V4L2ImageProcessor::FrameReady, weak_this_,
- job_record->ready_cb, dqbuf.index));
+ FROM_HERE, base::BindOnce(&V4L2ImageProcessor::FrameReady, weak_this_,
+ std::move(job_record->ready_cb),
+ job_record->output_frame));
}
}
@@ -689,7 +723,7 @@ bool V4L2ImageProcessor::EnqueueInputRecord() {
const int index = free_input_buffers_.back();
InputRecord& input_record = input_buffer_map_[index];
DCHECK(!input_record.at_device);
- input_record.frame = job_record->frame;
+ input_record.frame = job_record->input_frame;
struct v4l2_buffer qbuf;
struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
memset(&qbuf, 0, sizeof(qbuf));
@@ -699,24 +733,40 @@ bool V4L2ImageProcessor::EnqueueInputRecord() {
qbuf.memory = input_memory_type_;
qbuf.m.planes = qbuf_planes;
qbuf.length = input_planes_count_;
+
+ std::vector<int> fds;
+ if (input_memory_type_ == V4L2_MEMORY_DMABUF) {
+ fds = input_record.frame->DmabufFds();
+ if (fds.size() != input_planes_count_) {
+ VLOGF(1) << "Invalid number of planes in the frame";
+ return false;
+ }
+ }
for (size_t i = 0; i < input_planes_count_; ++i) {
qbuf.m.planes[i].bytesused =
VideoFrame::PlaneSize(input_record.frame->format(), i,
input_allocated_size_)
.GetArea();
qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused;
- if (input_memory_type_ == V4L2_MEMORY_USERPTR) {
- qbuf.m.planes[i].m.userptr =
- reinterpret_cast<unsigned long>(input_record.frame->data(i));
- } else {
- qbuf.m.planes[i].m.fd = input_record.frame->DmabufFd(i);
+ switch (input_memory_type_) {
+ case V4L2_MEMORY_USERPTR:
+ qbuf.m.planes[i].m.userptr =
+ reinterpret_cast<unsigned long>(input_record.frame->data(i));
+ break;
+ case V4L2_MEMORY_DMABUF:
+ qbuf.m.planes[i].m.fd = fds[i];
+ break;
+ default:
+ NOTREACHED();
+ return false;
}
}
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
input_record.at_device = true;
DVLOGF(4) << "enqueued frame ts="
- << job_record->frame->timestamp().InMilliseconds() << " to device.";
+ << job_record->input_frame->timestamp().InMilliseconds()
+ << " to device.";
running_jobs_.emplace(std::move(job_record));
free_input_buffers_.pop_back();
@@ -725,8 +775,9 @@ bool V4L2ImageProcessor::EnqueueInputRecord() {
return true;
}
-bool V4L2ImageProcessor::EnqueueOutputRecord(int index) {
+bool V4L2ImageProcessor::EnqueueOutputRecord(const JobRecord* job_record) {
DVLOGF(4);
+ int index = job_record->output_buffer_index;
DCHECK_GE(index, 0);
DCHECK_LT(static_cast<size_t>(index), output_buffer_map_.size());
// Enqueue an output (VIDEO_CAPTURE) buffer.
@@ -740,8 +791,13 @@ bool V4L2ImageProcessor::EnqueueOutputRecord(int index) {
qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
qbuf.memory = output_memory_type_;
if (output_memory_type_ == V4L2_MEMORY_DMABUF) {
- for (size_t i = 0; i < output_record.dmabuf_fds.size(); ++i)
- qbuf_planes[i].m.fd = output_record.dmabuf_fds[i].get();
+ std::vector<int> fds = job_record->output_frame->DmabufFds();
+ if (fds.size() != output_planes_count_) {
+ VLOGF(1) << "Invalid number of FDs in output record";
+ return false;
+ }
+ for (size_t i = 0; i < fds.size(); ++i)
+ qbuf_planes[i].m.fd = fds[i];
}
qbuf.m.planes = qbuf_planes;
qbuf.length = output_planes_count_;
@@ -815,15 +871,15 @@ void V4L2ImageProcessor::StopDevicePoll() {
}
input_buffer_queued_count_ = 0;
- output_buffer_map_.clear();
- output_buffer_map_.resize(num_buffers_);
+ for (auto& output_buffer : output_buffer_map_)
+ output_buffer.at_device = false;
output_buffer_queued_count_ = 0;
}
-void V4L2ImageProcessor::FrameReady(const FrameReadyCB& cb,
- int output_buffer_index) {
+void V4L2ImageProcessor::FrameReady(FrameReadyCB cb,
+ scoped_refptr<VideoFrame> frame) {
DCHECK(child_task_runner_->BelongsToCurrentThread());
- cb.Run(output_buffer_index);
+ std::move(cb).Run(frame);
}
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor.h b/chromium/media/gpu/v4l2/v4l2_image_processor.h
index 509e564995e..bfd6d61a3a8 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor.h
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor.h
@@ -17,6 +17,7 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread.h"
#include "media/base/video_frame.h"
+#include "media/gpu/image_processor.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/v4l2/v4l2_device.h"
@@ -25,10 +26,12 @@ namespace media {
// Handles image processing accelerators that expose a V4L2 memory-to-memory
// interface. The threading model of this class is the same as for other V4L2
// hardware accelerators (see V4L2VideoDecodeAccelerator) for more details.
-class MEDIA_GPU_EXPORT V4L2ImageProcessor {
+class MEDIA_GPU_EXPORT V4L2ImageProcessor : public ImageProcessor {
public:
- explicit V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device);
- virtual ~V4L2ImageProcessor();
+ explicit V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device,
+ v4l2_memory input_memory_type,
+ v4l2_memory output_memory_type);
+ ~V4L2ImageProcessor() override;
// Initializes the processor to convert from |input_format| to |output_format|
// and/or scale from |input_visible_size| to |output_visible_size|.
@@ -39,20 +42,12 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
// configuration is supported.
bool Initialize(VideoPixelFormat input_format,
VideoPixelFormat output_format,
- v4l2_memory input_memory_type,
- v4l2_memory output_memory_type,
gfx::Size input_visible_size,
gfx::Size input_allocated_size,
gfx::Size output_visible_size,
gfx::Size output_allocated_size,
int num_buffers,
- const base::Closure& error_cb);
-
- // Returns a vector of dmabuf file descriptors, exported for V4L2 output
- // buffer with |index|. The size of vector will be the number of planes of the
- // buffer. Return an empty vector on failure.
- std::vector<base::ScopedFD> GetDmabufsForOutputBuffer(
- int output_buffer_index);
+ const base::Closure& error_cb) override;
// Returns true if image processing is supported on this platform.
static bool IsSupported();
@@ -72,16 +67,8 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
gfx::Size* size,
size_t* num_planes);
- // Returns input allocated size required by the processor to be fed with.
- gfx::Size input_allocated_size() const { return input_allocated_size_; }
-
- // Returns output allocated size required by the processor.
- gfx::Size output_allocated_size() const { return output_allocated_size_; }
-
- // Callback to be used to return the index of a processed image to the
- // client. After the client is done with the frame, call Process with the
- // index to return the output buffer to the image processor.
- typedef base::Callback<void(int output_buffer_index)> FrameReadyCB;
+ gfx::Size input_allocated_size() const override;
+ gfx::Size output_allocated_size() const override;
// Called by client to process |frame|. The resulting processed frame will be
// stored in |output_buffer_index| output buffer and notified via |cb|. The
@@ -93,16 +80,11 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
bool Process(const scoped_refptr<VideoFrame>& frame,
int output_buffer_index,
std::vector<base::ScopedFD> output_dmabuf_fds,
- const FrameReadyCB& cb);
+ FrameReadyCB cb) override;
// Reset all processing frames. After this method returns, no more callbacks
// will be invoked. V4L2ImageProcessor is ready to process more frames.
- bool Reset();
-
- // Stop all processing and clean up. After this method returns no more
- // callbacks will be invoked. Deletes |this| unconditionally, so make sure
- // to drop all pointers to it!
- void Destroy();
+ bool Reset() override;
private:
// Record for input buffers.
@@ -120,8 +102,8 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
OutputRecord(OutputRecord&&);
~OutputRecord();
bool at_device;
- // The processed frame will be stored in these buffers if
- // |output_memory_type_| is V4L2_MEMORY_DMABUF
+ // The exported FDs of the frame will be stored here if
+ // |output_memory_type_| is V4L2_MEMORY_MMAP
std::vector<base::ScopedFD> dmabuf_fds;
};
@@ -134,17 +116,18 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
struct JobRecord {
JobRecord();
~JobRecord();
- scoped_refptr<VideoFrame> frame;
+ scoped_refptr<VideoFrame> input_frame;
int output_buffer_index;
+ scoped_refptr<VideoFrame> output_frame;
std::vector<base::ScopedFD> output_dmabuf_fds;
FrameReadyCB ready_cb;
};
void EnqueueInput();
- void EnqueueOutput(int index);
+ void EnqueueOutput(const JobRecord* job_record);
void Dequeue();
bool EnqueueInputRecord();
- bool EnqueueOutputRecord(int index);
+ bool EnqueueOutputRecord(const JobRecord* job_record);
bool CreateInputBuffers();
bool CreateOutputBuffers();
void DestroyInputBuffers();
@@ -164,7 +147,11 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessor {
void DevicePollTask(bool poll_device);
// A processed frame is ready.
- void FrameReady(const FrameReadyCB& cb, int output_buffer_index);
+ void FrameReady(FrameReadyCB cb, scoped_refptr<VideoFrame> frame);
+
+ // Stop all processing and clean up. After this method returns no more
+ // callbacks will be invoked.
+ void Destroy();
// Size and format-related members remain constant after initialization.
// The visible/allocated sizes of the input frame.
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.cc
index 3c9bf099ead..6cb64f5a87f 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.cc
@@ -133,7 +133,8 @@ V4L2JpegDecodeAccelerator::JobRecord::JobRecord(
const BitstreamBuffer& bitstream_buffer,
scoped_refptr<VideoFrame> video_frame)
: bitstream_buffer_id(bitstream_buffer.id()),
- shm(bitstream_buffer, true),
+ shm(bitstream_buffer.handle(), bitstream_buffer.size(), true),
+ offset(bitstream_buffer.offset()),
out_frame(video_frame) {}
V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {}
@@ -288,7 +289,7 @@ bool V4L2JpegDecodeAccelerator::IsSupported() {
void V4L2JpegDecodeAccelerator::DecodeTask(
std::unique_ptr<JobRecord> job_record) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- if (!job_record->shm.Map()) {
+ if (!job_record->shm.MapAt(job_record->offset, job_record->shm.size())) {
VPLOGF(1) << "could not map bitstream_buffer";
PostNotifyError(job_record->bitstream_buffer_id, UNREADABLE_INPUT);
return;
@@ -680,6 +681,21 @@ bool V4L2JpegDecodeAccelerator::ConvertOutputImage(
size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane);
size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane);
+ // It is assumed that |dst_frame| is backed by enough memory that it is safe
+ // to store an I420 frame of |dst_width|x|dst_height| in it using the data
+ // pointers and strides from above.
+ int dst_width = dst_frame->coded_size().width();
+ int dst_height = dst_frame->coded_size().height();
+
+ // The video frame's coded dimensions should be even for the I420 format.
+ DCHECK_EQ(0, dst_width % 2);
+ DCHECK_EQ(0, dst_height % 2);
+
+ // The coded size of the hardware buffer should be at least as large as the
+ // video frame's coded size.
+ DCHECK_GE(output_buffer_coded_size_.width(), dst_width);
+ DCHECK_GE(output_buffer_coded_size_.height(), dst_height);
+
if (output_buffer_num_planes_ == 1) {
// Use ConvertToI420 to convert all splane buffers.
// If the source format is I420, ConvertToI420 will simply copy the frame.
@@ -691,9 +707,8 @@ bool V4L2JpegDecodeAccelerator::ConvertOutputImage(
static_cast<uint8_t*>(output_buffer.address[0]), src_size, dst_y,
dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0,
output_buffer_coded_size_.width(),
- output_buffer_coded_size_.height(), dst_frame->coded_size().width(),
- dst_frame->coded_size().height(), libyuv::kRotate0,
- output_buffer_pixelformat_)) {
+ output_buffer_coded_size_.height(), dst_width, dst_height,
+ libyuv::kRotate0, output_buffer_pixelformat_)) {
VLOGF(1) << "ConvertToI420 failed. Source format: "
<< output_buffer_pixelformat_;
return false;
@@ -709,18 +724,16 @@ bool V4L2JpegDecodeAccelerator::ConvertOutputImage(
if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
src_v_stride, dst_y, dst_y_stride, dst_u,
- dst_u_stride, dst_v, dst_v_stride,
- output_buffer_coded_size_.width(),
- output_buffer_coded_size_.height())) {
+ dst_u_stride, dst_v, dst_v_stride, dst_width,
+ dst_height)) {
VLOGF(1) << "I420Copy failed";
return false;
}
} else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M
if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v,
src_v_stride, dst_y, dst_y_stride, dst_u,
- dst_u_stride, dst_v, dst_v_stride,
- output_buffer_coded_size_.width(),
- output_buffer_coded_size_.height())) {
+ dst_u_stride, dst_v, dst_v_stride, dst_width,
+ dst_height)) {
VLOGF(1) << "I422ToI420 failed";
return false;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.h
index 05afb04c31b..f68c213c6b3 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_decode_accelerator.h
@@ -19,9 +19,9 @@
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
#include "media/base/bitstream_buffer.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_frame.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/v4l2/v4l2_device.h"
#include "media/video/jpeg_decode_accelerator.h"
@@ -67,7 +67,9 @@ class MEDIA_GPU_EXPORT V4L2JpegDecodeAccelerator
// Input image buffer ID.
int32_t bitstream_buffer_id;
// Memory mapped from |bitstream_buffer|.
- SharedMemoryRegion shm;
+ UnalignedSharedMemory shm;
+ // Offset used for shm.
+ off_t offset;
// Output frame buffer.
scoped_refptr<VideoFrame> out_frame;
};
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 91650ff6e97..95d1681adb9 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -28,7 +28,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
-#include "media/gpu/shared_memory_region.h"
+#include "media/base/unaligned_shared_memory.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_image.h"
#include "ui/gl/scoped_binders.h"
@@ -196,12 +196,13 @@ struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
BitstreamBufferRef(
base::WeakPtr<VideoDecodeAccelerator::Client>& client,
const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
- SharedMemoryRegion* shm,
+ const BitstreamBuffer* buffer,
int32_t input_id);
~BitstreamBufferRef();
const base::WeakPtr<VideoDecodeAccelerator::Client> client;
const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
- const std::unique_ptr<SharedMemoryRegion> shm;
+ const std::unique_ptr<UnalignedSharedMemory> shm;
+ off_t offset;
off_t bytes_used;
const int32_t input_id;
};
@@ -209,11 +210,15 @@ struct V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef {
V4L2SliceVideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
base::WeakPtr<VideoDecodeAccelerator::Client>& client,
const scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
- SharedMemoryRegion* shm,
+ const BitstreamBuffer* buffer,
int32_t input_id)
: client(client),
client_task_runner(client_task_runner),
- shm(shm),
+ shm(buffer ? std::make_unique<UnalignedSharedMemory>(buffer->handle(),
+ buffer->size(),
+ true)
+ : nullptr),
+ offset(buffer ? buffer->offset() : 0),
bytes_used(0),
input_id(input_id) {}
@@ -257,29 +262,32 @@ V4L2SliceVideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
class V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator
: public H264Decoder::H264Accelerator {
public:
+ using Status = H264Decoder::H264Accelerator::Status;
+
explicit V4L2H264Accelerator(V4L2SliceVideoDecodeAccelerator* v4l2_dec);
~V4L2H264Accelerator() override;
// H264Decoder::H264Accelerator implementation.
scoped_refptr<H264Picture> CreateH264Picture() override;
- bool SubmitFrameMetadata(const H264SPS* sps,
- const H264PPS* pps,
- const H264DPB& dpb,
- const H264Picture::Vector& ref_pic_listp0,
- const H264Picture::Vector& ref_pic_listb0,
- const H264Picture::Vector& ref_pic_listb1,
- const scoped_refptr<H264Picture>& pic) override;
-
- bool SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) override;
-
- bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
+ Status SubmitFrameMetadata(const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ const scoped_refptr<H264Picture>& pic) override;
+
+ Status SubmitSlice(const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) override;
+
+ Status SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
void Reset() override;
@@ -1337,15 +1345,16 @@ void V4L2SliceVideoDecodeAccelerator::DecodeTask(
<< " size=" << bitstream_buffer.size();
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
- decode_client_, decode_task_runner_,
- new SharedMemoryRegion(bitstream_buffer, true), bitstream_buffer.id()));
+ std::unique_ptr<BitstreamBufferRef> bitstream_record(
+ new BitstreamBufferRef(decode_client_, decode_task_runner_,
+ &bitstream_buffer, bitstream_buffer.id()));
// Skip empty buffer.
if (bitstream_buffer.size() == 0)
return;
- if (!bitstream_record->shm->Map()) {
+ if (!bitstream_record->shm->MapAt(bitstream_record->offset,
+ bitstream_record->shm->size())) {
VLOGF(1) << "Could not map bitstream_buffer";
NOTIFY_ERROR(UNREADABLE_INPUT);
return;
@@ -1431,7 +1440,7 @@ void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
- case AcceleratedVideoDecoder::kNoKey:
+ case AcceleratedVideoDecoder::kTryAgain:
NOTREACHED() << "Should not reach here unless this class accepts "
"encrypted streams.";
DVLOGF(4) << "No key for decoding stream.";
@@ -2135,7 +2144,8 @@ void V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::H264DPBToV4L2DPB(
}
}
-bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
+H264Decoder::H264Accelerator::Status
+V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
const H264SPS* sps,
const H264PPS* pps,
const H264DPB& dpb,
@@ -2310,20 +2320,22 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitFrameMetadata(
H264DPBToV4L2DPB(dpb, &ref_surfaces);
dec_surface->SetReferenceSurfaces(ref_surfaces);
- return true;
+ return Status::kOk;
}
-bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
+H264Decoder::H264Accelerator::Status
+V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
const H264PPS* pps,
const H264SliceHeader* slice_hdr,
const H264Picture::Vector& ref_pic_list0,
const H264Picture::Vector& ref_pic_list1,
const scoped_refptr<H264Picture>& pic,
const uint8_t* data,
- size_t size) {
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) {
if (num_slices_ == kMaxSlices) {
VLOGF(1) << "Over limit of supported slices per frame";
- return false;
+ return Status::kFail;
}
struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
@@ -2431,7 +2443,9 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitSlice(
data_copy[2] = 0x01;
memcpy(data_copy.get() + 3, data, size);
return v4l2_dec_->SubmitSlice(dec_surface->input_record(), data_copy.get(),
- data_copy_size);
+ data_copy_size)
+ ? Status::kOk
+ : Status::kFail;
}
bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(int index,
@@ -2477,7 +2491,8 @@ bool V4L2SliceVideoDecodeAccelerator::IsCtrlExposed(uint32_t ctrl_id) {
return (device_->Ioctl(VIDIOC_QUERYCTRL, &query_ctrl) == 0);
}
-bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
+H264Decoder::H264Accelerator::Status
+V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
const scoped_refptr<H264Picture>& pic) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic);
@@ -2508,12 +2523,12 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::SubmitDecode(
ext_ctrls.controls = &ctrls[0];
ext_ctrls.config_store = dec_surface->config_store();
if (!v4l2_dec_->SubmitExtControls(&ext_ctrls))
- return false;
+ return Status::kFail;
Reset();
v4l2_dec_->DecodeSurface(dec_surface);
- return true;
+ return Status::kOk;
}
bool V4L2SliceVideoDecodeAccelerator::V4L2H264Accelerator::OutputPicture(
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index 3458b4a92d2..7cac10b1422 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -23,7 +23,9 @@
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
-#include "media/gpu/shared_memory_region.h"
+#include "media/base/scopedfd_helper.h"
+#include "media/base/unaligned_shared_memory.h"
+#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/video/h264_parser.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gl/gl_context.h"
@@ -71,12 +73,13 @@ struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
BitstreamBufferRef(
base::WeakPtr<Client>& client,
scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
- std::unique_ptr<SharedMemoryRegion> shm,
+ const BitstreamBuffer* buffer,
int32_t input_id);
~BitstreamBufferRef();
const base::WeakPtr<Client> client;
const scoped_refptr<base::SingleThreadTaskRunner> client_task_runner;
- const std::unique_ptr<SharedMemoryRegion> shm;
+ const std::unique_ptr<UnalignedSharedMemory> shm;
+ off_t offset;
size_t bytes_used;
const int32_t input_id;
};
@@ -91,11 +94,15 @@ struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
base::WeakPtr<Client>& client,
scoped_refptr<base::SingleThreadTaskRunner>& client_task_runner,
- std::unique_ptr<SharedMemoryRegion> shm,
+ const BitstreamBuffer* buffer,
int32_t input_id)
: client(client),
client_task_runner(client_task_runner),
- shm(std::move(shm)),
+ shm(buffer ? std::make_unique<UnalignedSharedMemory>(buffer->handle(),
+ buffer->size(),
+ true)
+ : nullptr),
+ offset(buffer ? buffer->offset() : 0),
bytes_used(0),
input_id(input_id) {}
@@ -768,17 +775,16 @@ void V4L2VideoDecodeAccelerator::DecodeTask(
TRACE_EVENT1("media,gpu", "V4L2VDA::DecodeTask", "input_id",
bitstream_buffer.id());
- std::unique_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
- decode_client_, decode_task_runner_,
- std::unique_ptr<SharedMemoryRegion>(
- new SharedMemoryRegion(bitstream_buffer, true)),
- bitstream_buffer.id()));
+ std::unique_ptr<BitstreamBufferRef> bitstream_record(
+ new BitstreamBufferRef(decode_client_, decode_task_runner_,
+ &bitstream_buffer, bitstream_buffer.id()));
// Skip empty buffer.
if (bitstream_buffer.size() == 0)
return;
- if (!bitstream_record->shm->Map()) {
+ if (!bitstream_record->shm->MapAt(bitstream_record->offset,
+ bitstream_record->shm->size())) {
VLOGF(1) << "could not map bitstream_buffer";
NOTIFY_ERROR(UNREADABLE_INPUT);
return;
@@ -1849,8 +1855,7 @@ void V4L2VideoDecodeAccelerator::DestroyTask() {
decoder_input_queue_.pop();
decoder_flushing_ = false;
- if (image_processor_)
- image_processor_.release()->Destroy();
+ image_processor_ = nullptr;
// Set our state to kError. Just in case.
decoder_state_ = kError;
@@ -1971,8 +1976,7 @@ void V4L2VideoDecodeAccelerator::StartResolutionChange() {
return;
}
- if (image_processor_)
- image_processor_.release()->Destroy();
+ image_processor_ = nullptr;
if (!DestroyOutputBuffers()) {
VLOGF(1) << "Failed destroying output buffers.";
@@ -2389,17 +2393,18 @@ bool V4L2VideoDecodeAccelerator::ResetImageProcessor() {
bool V4L2VideoDecodeAccelerator::CreateImageProcessor() {
VLOGF(2);
DCHECK(!image_processor_);
- image_processor_.reset(new V4L2ImageProcessor(image_processor_device_));
v4l2_memory output_memory_type =
(output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
: V4L2_MEMORY_DMABUF);
+ image_processor_.reset(new V4L2ImageProcessor(
+ image_processor_device_, V4L2_MEMORY_DMABUF, output_memory_type));
// Unretained is safe because |this| owns image processor and there will be
// no callbacks after processor destroys.
if (!image_processor_->Initialize(
V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
- V4L2_MEMORY_DMABUF, output_memory_type, visible_size_, coded_size_,
- visible_size_, egl_image_size_, output_buffer_map_.size(),
+ visible_size_, coded_size_, visible_size_, egl_image_size_,
+ output_buffer_map_.size(),
base::Bind(&V4L2VideoDecodeAccelerator::ImageProcessorError,
base::Unretained(this)))) {
VLOGF(1) << "Initialize image processor failed";
@@ -2429,31 +2434,30 @@ bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
DCHECK_EQ(output_record.state, kAtDevice);
output_record.state = kAtProcessor;
image_processor_bitstream_buffer_ids_.push(bitstream_buffer_id);
- std::vector<int> processor_input_fds;
- for (auto& fd : output_record.processor_input_fds) {
- processor_input_fds.push_back(fd.get());
- }
+
scoped_refptr<VideoFrame> input_frame = VideoFrame::WrapExternalDmabufs(
V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
- coded_size_, gfx::Rect(visible_size_), visible_size_, processor_input_fds,
- base::TimeDelta());
+ coded_size_, gfx::Rect(visible_size_), visible_size_,
+ DuplicateFDs(output_record.processor_input_fds), base::TimeDelta());
+
+ if (!input_frame) {
+ VLOGF(1) << "Failed wrapping input frame!";
+ return false;
+ }
std::vector<base::ScopedFD> output_fds;
if (output_mode_ == Config::OutputMode::IMPORT) {
- for (auto& fd : output_record.output_fds) {
- output_fds.push_back(base::ScopedFD(HANDLE_EINTR(dup(fd.get()))));
- if (!output_fds.back().is_valid()) {
- VPLOGF(1) << "Failed duplicating a dmabuf fd";
- return false;
- }
- }
+ output_fds = DuplicateFDs(output_record.output_fds);
+ if (output_fds.empty())
+ return false;
}
// Unretained is safe because |this| owns image processor and there will
// be no callbacks after processor destroys.
image_processor_->Process(
input_frame, output_buffer_index, std::move(output_fds),
- base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
- base::Unretained(this), bitstream_buffer_id));
+ base::BindOnce(&V4L2VideoDecodeAccelerator::FrameProcessed,
+ base::Unretained(this), bitstream_buffer_id,
+ output_buffer_index));
return true;
}
@@ -2642,8 +2646,10 @@ void V4L2VideoDecodeAccelerator::PictureCleared() {
SendPictureReady();
}
-void V4L2VideoDecodeAccelerator::FrameProcessed(int32_t bitstream_buffer_id,
- int output_buffer_index) {
+void V4L2VideoDecodeAccelerator::FrameProcessed(
+ int32_t bitstream_buffer_id,
+ int output_buffer_index,
+ scoped_refptr<VideoFrame> frame) {
DVLOGF(4) << "output_buffer_index=" << output_buffer_index
<< ", bitstream_buffer_id=" << bitstream_buffer_id;
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
index 21203f6ad4e..9cd9fa3f4c6 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
@@ -25,9 +25,9 @@
#include "media/base/limits.h"
#include "media/base/video_decoder_config.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
+#include "media/gpu/image_processor.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/v4l2/v4l2_device.h"
-#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/video/picture.h"
#include "media/video/video_decode_accelerator.h"
#include "ui/gfx/geometry/size.h"
@@ -403,9 +403,12 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// Callback that indicates a picture has been cleared.
void PictureCleared();
- // Image processor returns a processed frame. Its id is |bitstream_buffer_id|
- // and stored in |output_buffer_index| buffer of image processor.
- void FrameProcessed(int32_t bitstream_buffer_id, int output_buffer_index);
+ // Image processor returns a processed |frame|. Its id is
+ // |bitstream_buffer_id| and stored in |output_buffer_index| buffer of
+ // image processor.
+ void FrameProcessed(int32_t bitstream_buffer_id,
+ int output_buffer_index,
+ scoped_refptr<VideoFrame> frame);
// Image processor notifies an error.
void ImageProcessorError();
@@ -563,7 +566,7 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// Image processor device, if one is in use.
scoped_refptr<V4L2Device> image_processor_device_;
// Image processor. Accessed on |decoder_thread_|.
- std::unique_ptr<V4L2ImageProcessor> image_processor_;
+ std::unique_ptr<ImageProcessor> image_processor_;
// The V4L2Device EGLImage is created from.
scoped_refptr<V4L2Device> egl_image_device_;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index 56f193cb1fb..425022e5a2b 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -23,7 +23,9 @@
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/bitstream_buffer.h"
-#include "media/gpu/shared_memory_region.h"
+#include "media/base/scopedfd_helper.h"
+#include "media/base/unaligned_shared_memory.h"
+#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/video/h264_parser.h"
#define VLOGF(level) VLOG(level) << __func__ << "(): "
@@ -87,10 +89,10 @@ static void CopyNALUPrependingStartCode(const uint8_t* src,
namespace media {
struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
- BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm)
+ BitstreamBufferRef(int32_t id, std::unique_ptr<UnalignedSharedMemory> shm)
: id(id), shm(std::move(shm)) {}
const int32_t id;
- const std::unique_ptr<SharedMemoryRegion> shm;
+ const std::unique_ptr<UnalignedSharedMemory> shm;
};
V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {}
@@ -203,16 +205,16 @@ bool V4L2VideoEncodeAccelerator::Initialize(VideoPixelFormat input_format,
}
scoped_refptr<V4L2Device> device = V4L2Device::Create();
- image_processor_.reset(new V4L2ImageProcessor(device));
+ image_processor_.reset(
+ new V4L2ImageProcessor(device, V4L2_MEMORY_USERPTR, V4L2_MEMORY_MMAP));
// Convert from input_format to device_input_format_, keeping the size
// at visible_size_ and requiring the output buffers to be of at least
// input_allocated_size_. Unretained is safe because |this| owns image
// processor and there will be no callbacks after processor destroys.
if (!image_processor_->Initialize(
- input_format, device_input_format_, V4L2_MEMORY_USERPTR,
- V4L2_MEMORY_MMAP, visible_size_, visible_size_, visible_size_,
- input_allocated_size_, kImageProcBufferCount,
+ input_format, device_input_format_, visible_size_, visible_size_,
+ visible_size_, input_allocated_size_, kImageProcBufferCount,
base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError,
base::Unretained(this)))) {
VLOGF(1) << "Failed initializing image processor";
@@ -234,16 +236,8 @@ bool V4L2VideoEncodeAccelerator::Initialize(VideoPixelFormat input_format,
return false;
}
- for (int i = 0; i < kImageProcBufferCount; i++) {
- std::vector<base::ScopedFD> fds =
- image_processor_->GetDmabufsForOutputBuffer(i);
- if (fds.size() == 0) {
- VLOGF(1) << "failed to get fds of image processor.";
- return false;
- }
- image_processor_output_buffer_map_.push_back(std::move(fds));
+ for (int i = 0; i < kImageProcBufferCount; i++)
free_image_processor_output_buffers_.push_back(i);
- }
}
if (!InitControls())
@@ -289,9 +283,9 @@ void V4L2VideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame,
// be no callbacks after processor destroys.
if (!image_processor_->Process(
frame, output_buffer_index, std::vector<base::ScopedFD>(),
- base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed,
- base::Unretained(this), force_keyframe,
- frame->timestamp()))) {
+ base::BindOnce(&V4L2VideoEncodeAccelerator::FrameProcessed,
+ base::Unretained(this), force_keyframe,
+ frame->timestamp(), output_buffer_index))) {
NOTIFY_ERROR(kPlatformFailureError);
}
} else {
@@ -314,9 +308,9 @@ void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer(
return;
}
- std::unique_ptr<SharedMemoryRegion> shm(
- new SharedMemoryRegion(buffer, false));
- if (!shm->Map()) {
+ auto shm = std::make_unique<UnalignedSharedMemory>(buffer.handle(),
+ buffer.size(), false);
+ if (!shm->MapAt(buffer.offset(), buffer.size())) {
NOTIFY_ERROR(kPlatformFailureError);
return;
}
@@ -350,8 +344,7 @@ void V4L2VideoEncodeAccelerator::Destroy() {
client_ptr_factory_.reset();
weak_this_ptr_factory_.InvalidateWeakPtrs();
- if (image_processor_.get())
- image_processor_.release()->Destroy();
+ image_processor_ = nullptr;
// If the encoder thread is running, destroy using posted task.
if (encoder_thread_.IsRunning()) {
@@ -410,37 +403,23 @@ V4L2VideoEncodeAccelerator::GetSupportedProfiles() {
return device->GetSupportedEncodeProfiles();
}
-void V4L2VideoEncodeAccelerator::FrameProcessed(bool force_keyframe,
- base::TimeDelta timestamp,
- int output_buffer_index) {
+void V4L2VideoEncodeAccelerator::FrameProcessed(
+ bool force_keyframe,
+ base::TimeDelta timestamp,
+ int output_buffer_index,
+ scoped_refptr<VideoFrame> frame) {
DCHECK(child_task_runner_->BelongsToCurrentThread());
DVLOGF(4) << "force_keyframe=" << force_keyframe
<< ", output_buffer_index=" << output_buffer_index;
DCHECK_GE(output_buffer_index, 0);
- DCHECK_LT(static_cast<size_t>(output_buffer_index),
- image_processor_output_buffer_map_.size());
- std::vector<base::ScopedFD>& scoped_fds =
- image_processor_output_buffer_map_[output_buffer_index];
- std::vector<int> fds;
- for (auto& fd : scoped_fds) {
- fds.push_back(fd.get());
- }
- scoped_refptr<VideoFrame> output_frame = VideoFrame::WrapExternalDmabufs(
- device_input_format_, image_processor_->output_allocated_size(),
- gfx::Rect(visible_size_), visible_size_, fds, timestamp);
- if (!output_frame) {
- NOTIFY_ERROR(kPlatformFailureError);
- return;
- }
- output_frame->AddDestructionObserver(BindToCurrentLoop(
+ frame->AddDestructionObserver(BindToCurrentLoop(
base::Bind(&V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer,
weak_this_, output_buffer_index)));
encoder_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
- base::Unretained(this), output_frame, force_keyframe));
+ FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
+ base::Unretained(this), frame, force_keyframe));
}
void V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer(
@@ -758,12 +737,14 @@ void V4L2VideoEncodeAccelerator::Dequeue() {
<< ", size=" << output_data_size << ", key_frame=" << key_frame;
child_task_runner_->PostTask(
- FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_,
- bitstream_buffer_id, output_data_size, key_frame,
- base::TimeDelta::FromMicroseconds(
- dqbuf.timestamp.tv_usec +
- dqbuf.timestamp.tv_sec *
- base::Time::kMicrosecondsPerSecond)));
+ FROM_HERE,
+ base::Bind(&Client::BitstreamBufferReady, client_, bitstream_buffer_id,
+ BitstreamBufferMetadata(
+ output_data_size, key_frame,
+ base::TimeDelta::FromMicroseconds(
+ dqbuf.timestamp.tv_usec +
+ dqbuf.timestamp.tv_sec *
+ base::Time::kMicrosecondsPerSecond))));
if ((encoder_state_ == kFlushing) && (dqbuf.flags & V4L2_BUF_FLAG_LAST)) {
// Notify client that flush has finished successfully. The flush callback
// should be called after notifying the last buffer is ready.
@@ -821,6 +802,16 @@ bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
frame->timestamp().InSeconds() * base::Time::kMicrosecondsPerSecond;
DCHECK_EQ(device_input_format_, frame->format());
+
+ std::vector<int> fds;
+ if (input_memory_type_ == V4L2_MEMORY_DMABUF) {
+ fds = frame->DmabufFds();
+ if (fds.size() != input_planes_count_) {
+ VLOGF(1) << "Invalid number of planes in the frame";
+ return false;
+ }
+ }
+
for (size_t i = 0; i < input_planes_count_; ++i) {
qbuf.m.planes[i].bytesused = base::checked_cast<__u32>(
VideoFrame::PlaneSize(frame->format(), i, input_allocated_size_)
@@ -835,7 +826,7 @@ bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
break;
case V4L2_MEMORY_DMABUF:
- qbuf.m.planes[i].m.fd = frame->DmabufFd(i);
+ qbuf.m.planes[i].m.fd = fds[i];
DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
break;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
index 4b89ddb7fdf..d90b9f40199 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
@@ -18,9 +18,9 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread.h"
#include "base/time/time.h"
+#include "media/gpu/image_processor.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/v4l2/v4l2_device.h"
-#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/video/video_encode_accelerator.h"
#include "ui/gfx/geometry/size.h"
@@ -36,7 +36,7 @@ namespace media {
// device exposed by the codec hardware driver. The threading model of this
// class is the same as in the V4L2VideoDecodeAccelerator (from which class this
// was designed).
-// This class may try to instantiate and use a V4L2ImageProcessor for input
+// This class may try to instantiate and use a ImageProcessor for input
// format conversion, if the input format requested via Initialize() is not
// accepted by the hardware codec.
class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
@@ -116,10 +116,12 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
// Callbacks for the image processor, if one is used.
//
- // Callback run by the image processor when a frame is ready for us to encode.
+ // Callback run by the image processor when a |frame| is ready for us to
+ // encode.
void FrameProcessed(bool force_keyframe,
base::TimeDelta timestamp,
- int output_buffer_index);
+ int output_buffer_index,
+ scoped_refptr<VideoFrame> frame);
// Error callback for handling image processor errors.
void ImageProcessorError();
@@ -190,7 +192,7 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
// Try to set up the device to the input format we were Initialized() with,
// or if the device doesn't support it, use one it can support, so that we
- // can later instantiate a V4L2ImageProcessor to convert to it.
+ // can later instantiate an ImageProcessor to convert to it.
bool NegotiateInputFormat(VideoPixelFormat input_format);
// Set up the device to the output format requested in Initialize().
@@ -294,14 +296,12 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
FlushCallback flush_callback_;
// Image processor, if one is in use.
- std::unique_ptr<V4L2ImageProcessor> image_processor_;
+ std::unique_ptr<ImageProcessor> image_processor_;
// Indexes of free image processor output buffers. Only accessed on child
// thread.
std::vector<int> free_image_processor_output_buffers_;
// Video frames ready to be processed. Only accessed on child thread.
base::queue<InputFrameInfo> image_processor_input_queue_;
- // Mapping of int index to fds of image processor output buffer.
- std::vector<std::vector<base::ScopedFD>> image_processor_output_buffer_map_;
// This thread services tasks posted from the VEA API entry points by the
// child thread and device service callbacks posted from the device thread.
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
index 0334356e0ff..17e3236e338 100644
--- a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
@@ -5,6 +5,7 @@
#include "media/gpu/vaapi/accelerated_video_encoder.h"
#include "media/base/video_frame.h"
+#include "media/video/video_encode_accelerator.h"
namespace media {
@@ -26,6 +27,12 @@ VaapiEncodeJob* AcceleratedVideoEncoder::EncodeJob::AsVaapiEncodeJob() {
return nullptr;
}
+BitstreamBufferMetadata AcceleratedVideoEncoder::EncodeJob::Metadata(
+ size_t payload_size) const {
+ return BitstreamBufferMetadata(payload_size, IsKeyframeRequested(),
+ timestamp());
+}
+
void AcceleratedVideoEncoder::EncodeJob::AddSetupCallback(
base::OnceClosure cb) {
DCHECK(!cb.is_null());
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.h b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
index 3564b248a32..c9a9a390cb0 100644
--- a/chromium/media/gpu/vaapi/accelerated_video_encoder.h
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
@@ -12,12 +12,14 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/time/time.h"
+#include "media/base/video_bitrate_allocation.h"
#include "media/base/video_codecs.h"
#include "media/gpu/codec_picture.h"
#include "ui/gfx/geometry/size.h"
namespace media {
+struct BitstreamBufferMetadata;
class VaapiEncodeJob;
class VideoFrame;
@@ -83,6 +85,8 @@ class AcceleratedVideoEncoder {
// Returns the timestamp associated with this job.
base::TimeDelta timestamp() const { return timestamp_; }
+ virtual BitstreamBufferMetadata Metadata(size_t payload_size) const;
+
virtual VaapiEncodeJob* AsVaapiEncodeJob();
protected:
@@ -122,8 +126,9 @@ class AcceleratedVideoEncoder {
uint32_t initial_framerate) = 0;
// Updates current framerate and/or bitrate to |framerate| in FPS
- // and |bitrate| in bps.
- virtual bool UpdateRates(uint32_t bitrate, uint32_t framerate) = 0;
+ // and the specified video bitrate allocation.
+ virtual bool UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) = 0;
// Returns coded size for the input buffers required to encode, in pixels;
// typically visible size adjusted to match codec alignment requirements.
diff --git a/chromium/media/gpu/vaapi/h264_encoder.cc b/chromium/media/gpu/vaapi/h264_encoder.cc
index d865ac8b31e..7dc0540985b 100644
--- a/chromium/media/gpu/vaapi/h264_encoder.cc
+++ b/chromium/media/gpu/vaapi/h264_encoder.cc
@@ -11,10 +11,10 @@
namespace media {
namespace {
-// An IDR every 2048 frames, an I frame every 256 and no B frames.
+// An IDR every 2048 frames, no I frames and no B frames.
// We choose IDR period to equal MaxFrameNum so it must be a power of 2.
constexpr int kIDRPeriod = 2048;
-constexpr int kIPeriod = 256;
+constexpr int kIPeriod = 0;
constexpr int kIPPeriod = 1;
constexpr int kDefaultQP = 26;
@@ -88,7 +88,9 @@ bool H264Encoder::Initialize(const gfx::Size& visible_size,
mb_height_ = coded_size_.height() / kH264MacroblockSizeInPixels;
profile_ = profile;
- if (!UpdateRates(initial_bitrate, initial_framerate))
+ VideoBitrateAllocation initial_bitrate_allocation;
+ initial_bitrate_allocation.SetBitrate(0, 0, initial_bitrate);
+ if (!UpdateRates(initial_bitrate_allocation, initial_framerate))
return false;
UpdateSPS();
@@ -140,11 +142,12 @@ bool H264Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
encode_job->ProduceKeyframe();
}
- if (pic->frame_num % curr_params_.i_period_frames == 0)
+ if (pic->idr || (curr_params_.i_period_frames != 0 &&
+ pic->frame_num % curr_params_.i_period_frames == 0)) {
pic->type = H264SliceHeader::kISlice;
- else
+ } else {
pic->type = H264SliceHeader::kPSlice;
-
+ }
if (curr_params_.ip_period_frames != 1) {
NOTIMPLEMENTED() << "B frames not implemented";
return false;
@@ -191,9 +194,11 @@ bool H264Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
return true;
}
-bool H264Encoder::UpdateRates(uint32_t bitrate, uint32_t framerate) {
+bool H264Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ uint32_t bitrate = bitrate_allocation.GetSumBps();
if (bitrate == 0 || framerate == 0)
return false;
diff --git a/chromium/media/gpu/vaapi/h264_encoder.h b/chromium/media/gpu/vaapi/h264_encoder.h
index 83e9613c618..a86ee4279af 100644
--- a/chromium/media/gpu/vaapi/h264_encoder.h
+++ b/chromium/media/gpu/vaapi/h264_encoder.h
@@ -99,7 +99,8 @@ class H264Encoder : public AcceleratedVideoEncoder {
VideoCodecProfile profile,
uint32_t initial_bitrate,
uint32_t initial_framerate) override;
- bool UpdateRates(uint32_t bitrate, uint32_t framerate) override;
+ bool UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override;
gfx::Size GetCodedSize() const override;
size_t GetBitstreamBufferSize() const override;
size_t GetMaxNumOfRefFrames() const override;
diff --git a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
index 89bfbb93a23..3bcf0f347c7 100644
--- a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
@@ -15,10 +15,11 @@
#from " and " #to " arrays must be of same size"); \
memcpy(to, from, sizeof(to)); \
} while (0)
-#define VLOGF(level) VLOG(level) << __func__ << "(): "
namespace media {
+using Status = H264Decoder::H264Accelerator::Status;
+
namespace {
// from ITU-T REC H.264 spec
@@ -67,7 +68,7 @@ static void InitVAPicture(VAPictureH264* va_pic) {
va_pic->flags = VA_PICTURE_H264_INVALID;
}
-bool VaapiH264Accelerator::SubmitFrameMetadata(
+Status VaapiH264Accelerator::SubmitFrameMetadata(
const H264SPS* sps,
const H264PPS* pps,
const H264DPB& dpb,
@@ -147,7 +148,7 @@ bool VaapiH264Accelerator::SubmitFrameMetadata(
if (!vaapi_wrapper_->SubmitBuffer(VAPictureParameterBufferType,
sizeof(pic_param), &pic_param))
- return false;
+ return Status::kFail;
VAIQMatrixBufferH264 iq_matrix_buf;
memset(&iq_matrix_buf, 0, sizeof(iq_matrix_buf));
@@ -179,16 +180,20 @@ bool VaapiH264Accelerator::SubmitFrameMetadata(
}
return vaapi_wrapper_->SubmitBuffer(VAIQMatrixBufferType,
- sizeof(iq_matrix_buf), &iq_matrix_buf);
+ sizeof(iq_matrix_buf), &iq_matrix_buf)
+ ? Status::kOk
+ : Status::kFail;
}
-bool VaapiH264Accelerator::SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) {
+Status VaapiH264Accelerator::SubmitSlice(
+ const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
VASliceParameterBufferH264 slice_param;
memset(&slice_param, 0, sizeof(slice_param));
@@ -280,19 +285,20 @@ bool VaapiH264Accelerator::SubmitSlice(const H264PPS* pps,
if (!vaapi_wrapper_->SubmitBuffer(VASliceParameterBufferType,
sizeof(slice_param), &slice_param))
- return false;
+ return Status::kFail;
- // Can't help it, blame libva...
- void* non_const_ptr = const_cast<uint8_t*>(data);
- return vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType, size,
- non_const_ptr);
+ return vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType, size, data)
+ ? Status::kOk
+ : Status::kFail;
}
-bool VaapiH264Accelerator::SubmitDecode(const scoped_refptr<H264Picture>& pic) {
- VLOGF(4) << "Decoding POC " << pic->pic_order_cnt;
+Status VaapiH264Accelerator::SubmitDecode(
+ const scoped_refptr<H264Picture>& pic) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- return vaapi_dec_->DecodeVASurface(pic->AsVaapiH264Picture()->va_surface());
+ return vaapi_dec_->DecodeVASurface(pic->AsVaapiH264Picture()->va_surface())
+ ? Status::kOk
+ : Status::kFail;
}
bool VaapiH264Accelerator::OutputPicture(
diff --git a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.h b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.h
index 470656117eb..41b41c94365 100644
--- a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.h
@@ -25,21 +25,22 @@ class VaapiH264Accelerator : public H264Decoder::H264Accelerator {
// H264Decoder::H264Accelerator implementation.
scoped_refptr<H264Picture> CreateH264Picture() override;
- bool SubmitFrameMetadata(const H264SPS* sps,
- const H264PPS* pps,
- const H264DPB& dpb,
- const H264Picture::Vector& ref_pic_listp0,
- const H264Picture::Vector& ref_pic_listb0,
- const H264Picture::Vector& ref_pic_listb1,
- const scoped_refptr<H264Picture>& pic) override;
- bool SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) override;
- bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
+ Status SubmitFrameMetadata(const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ const scoped_refptr<H264Picture>& pic) override;
+ Status SubmitSlice(const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) override;
+ Status SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
void Reset() override;
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.cc
index 017c08af4eb..e12f72dad9c 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.cc
@@ -16,9 +16,9 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "gpu/ipc/service/gpu_channel.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_frame.h"
#include "media/filters/jpeg_parser.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/vaapi/vaapi_picture.h"
#include "third_party/libyuv/include/libyuv.h"
@@ -80,15 +80,21 @@ static unsigned int VaSurfaceFormatForJpeg(
} // namespace
-VaapiJpegDecodeAccelerator::DecodeRequest::DecodeRequest(
- int32_t bitstream_buffer_id,
- std::unique_ptr<SharedMemoryRegion> shm,
- const scoped_refptr<VideoFrame>& video_frame)
- : bitstream_buffer_id(bitstream_buffer_id),
- shm(std::move(shm)),
- video_frame(video_frame) {}
-
-VaapiJpegDecodeAccelerator::DecodeRequest::~DecodeRequest() {}
+// An input buffer and the corresponding output video frame awaiting
+// consumption, provided by the client.
+struct VaapiJpegDecodeAccelerator::DecodeRequest {
+ DecodeRequest(int32_t bitstream_buffer_id,
+ std::unique_ptr<UnalignedSharedMemory> shm,
+ const scoped_refptr<VideoFrame>& video_frame)
+ : bitstream_buffer_id(bitstream_buffer_id),
+ shm(std::move(shm)),
+ video_frame(video_frame) {}
+ ~DecodeRequest() = default;
+
+ int32_t bitstream_buffer_id;
+ std::unique_ptr<UnalignedSharedMemory> shm;
+ scoped_refptr<VideoFrame> video_frame;
+};
void VaapiJpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
Error error) {
@@ -102,9 +108,10 @@ void VaapiJpegDecodeAccelerator::NotifyErrorFromDecoderThread(
int32_t bitstream_buffer_id,
Error error) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- task_runner_->PostTask(FROM_HERE,
- base::Bind(&VaapiJpegDecodeAccelerator::NotifyError,
- weak_this_, bitstream_buffer_id, error));
+ task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VaapiJpegDecodeAccelerator::NotifyError,
+ weak_this_factory_.GetWeakPtr(),
+ bitstream_buffer_id, error));
}
void VaapiJpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) {
@@ -118,9 +125,7 @@ VaapiJpegDecodeAccelerator::VaapiJpegDecodeAccelerator(
io_task_runner_(io_task_runner),
decoder_thread_("VaapiJpegDecoderThread"),
va_surface_id_(VA_INVALID_SURFACE),
- weak_this_factory_(this) {
- weak_this_ = weak_this_factory_.GetWeakPtr();
-}
+ weak_this_factory_(this) {}
VaapiJpegDecodeAccelerator::~VaapiJpegDecodeAccelerator() {
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -167,12 +172,9 @@ bool VaapiJpegDecodeAccelerator::OutputPicture(
<< " into video_frame associated with input buffer id "
<< input_buffer_id;
- VAImage image;
- VAImageFormat format;
- const uint32_t kI420Fourcc = VA_FOURCC('I', '4', '2', '0');
- memset(&image, 0, sizeof(image));
- memset(&format, 0, sizeof(format));
- format.fourcc = kI420Fourcc;
+ VAImage image = {};
+ VAImageFormat format = {};
+ format.fourcc = VA_FOURCC_I420;
format.byte_order = VA_LSB_FIRST;
format.bits_per_pixel = 12; // 12 for I420
@@ -216,14 +218,15 @@ bool VaapiJpegDecodeAccelerator::OutputPicture(
vaapi_wrapper_->ReturnVaImage(&image);
task_runner_->PostTask(
- FROM_HERE, base::Bind(&VaapiJpegDecodeAccelerator::VideoFrameReady,
- weak_this_, input_buffer_id));
+ FROM_HERE,
+ base::BindOnce(&VaapiJpegDecodeAccelerator::VideoFrameReady,
+ weak_this_factory_.GetWeakPtr(), input_buffer_id));
return true;
}
void VaapiJpegDecodeAccelerator::DecodeTask(
- const std::unique_ptr<DecodeRequest>& request) {
+ std::unique_ptr<DecodeRequest> request) {
DVLOGF(4);
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("jpeg", "DecodeTask");
@@ -294,9 +297,9 @@ void VaapiJpegDecodeAccelerator::Decode(
DVLOGF(4) << "Mapping new input buffer id: " << bitstream_buffer.id()
<< " size: " << bitstream_buffer.size();
- // SharedMemoryRegion will take over the |bitstream_buffer.handle()|.
- std::unique_ptr<SharedMemoryRegion> shm(
- new SharedMemoryRegion(bitstream_buffer, true));
+ // UnalignedSharedMemory will take over the |bitstream_buffer.handle()|.
+ auto shm = std::make_unique<UnalignedSharedMemory>(
+ bitstream_buffer.handle(), bitstream_buffer.size(), true);
if (bitstream_buffer.id() < 0) {
VLOGF(1) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
@@ -304,7 +307,7 @@ void VaapiJpegDecodeAccelerator::Decode(
return;
}
- if (!shm->Map()) {
+ if (!shm->MapAt(bitstream_buffer.offset(), bitstream_buffer.size())) {
VLOGF(1) << "Failed to map input buffer";
NotifyErrorFromDecoderThread(bitstream_buffer.id(), UNREADABLE_INPUT);
return;
@@ -314,8 +317,9 @@ void VaapiJpegDecodeAccelerator::Decode(
new DecodeRequest(bitstream_buffer.id(), std::move(shm), video_frame));
decoder_task_runner_->PostTask(
- FROM_HERE, base::Bind(&VaapiJpegDecodeAccelerator::DecodeTask,
- base::Unretained(this), base::Passed(&request)));
+ FROM_HERE,
+ base::BindOnce(&VaapiJpegDecodeAccelerator::DecodeTask,
+ base::Unretained(this), base::Passed(std::move(request))));
}
bool VaapiJpegDecodeAccelerator::IsSupported() {
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h
index 3a6b86038b9..30500a170a3 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator.h
@@ -16,8 +16,8 @@
#include "base/synchronization/lock.h"
#include "base/threading/thread.h"
#include "media/base/bitstream_buffer.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/video/jpeg_decode_accelerator.h"
@@ -46,18 +46,7 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
bool IsSupported() override;
private:
- // An input buffer and the corresponding output video frame awaiting
- // consumption, provided by the client.
- struct DecodeRequest {
- DecodeRequest(int32_t bitstream_buffer_id,
- std::unique_ptr<SharedMemoryRegion> shm,
- const scoped_refptr<VideoFrame>& video_frame);
- ~DecodeRequest();
-
- int32_t bitstream_buffer_id;
- std::unique_ptr<SharedMemoryRegion> shm;
- scoped_refptr<VideoFrame> video_frame;
- };
+ struct DecodeRequest;
// Notifies the client that an error has occurred and decoding cannot
// continue.
@@ -66,7 +55,7 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
void VideoFrameReady(int32_t bitstream_buffer_id);
// Processes one decode |request|.
- void DecodeTask(const std::unique_ptr<DecodeRequest>& request);
+ void DecodeTask(std::unique_ptr<DecodeRequest> request);
// Puts contents of |va_surface| into given |video_frame|, releases the
// surface and passes the |input_buffer_id| of the resulting picture to
@@ -76,22 +65,14 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
const scoped_refptr<VideoFrame>& video_frame);
// ChildThread's task runner.
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
// GPU IO task runner.
- scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
+ const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
// The client of this class.
Client* client_;
- // WeakPtr<> pointing to |this| for use in posting tasks from the decoder
- // thread back to the ChildThread. Because the decoder thread is a member of
- // this class, any task running on the decoder thread is guaranteed that this
- // object is still alive. As a result, tasks posted from ChildThread to
- // decoder thread should use base::Unretained(this), and tasks posted from the
- // decoder thread to the ChildThread should use |weak_this_|.
- base::WeakPtr<VaapiJpegDecodeAccelerator> weak_this_;
-
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
// Comes after vaapi_wrapper_ to ensure its destructor is executed before
@@ -110,7 +91,11 @@ class MEDIA_GPU_EXPORT VaapiJpegDecodeAccelerator
// The VA RT format associated with |va_surface_id_|.
unsigned int va_rt_format_;
- // The WeakPtrFactory for |weak_this_|.
+ // WeakPtr factory for use in posting tasks from |decoder_task_runner_| back
+ // to |task_runner_|. Since |decoder_thread_| is a fully owned member of
+ // this class, tasks posted to it may use base::Unretained(this), and tasks
+ // posted from the |decoder_task_runner_| to |task_runner_| should use a
+ // WeakPtr (obtained via weak_this_factory_.GetWeakPtr()).
base::WeakPtrFactory<VaapiJpegDecodeAccelerator> weak_this_factory_;
DISALLOW_COPY_AND_ASSIGN(VaapiJpegDecodeAccelerator);
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
index 45b62d693bf..b417d7363f6 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
@@ -189,41 +189,43 @@ bool VaapiJpegDecoder::Decode(VaapiWrapper* vaapi_wrapper,
VAPictureParameterBufferJPEGBaseline pic_param;
FillPictureParameters(parse_result.frame_header, &pic_param);
if (!vaapi_wrapper->SubmitBuffer(VAPictureParameterBufferType,
- sizeof(pic_param), &pic_param))
+ sizeof(pic_param), &pic_param)) {
return false;
+ }
// Set quantization table.
VAIQMatrixBufferJPEGBaseline iq_matrix;
FillIQMatrix(parse_result.q_table, &iq_matrix);
if (!vaapi_wrapper->SubmitBuffer(VAIQMatrixBufferType, sizeof(iq_matrix),
- &iq_matrix))
+ &iq_matrix)) {
return false;
+ }
// Set huffman table.
VAHuffmanTableBufferJPEGBaseline huffman_table;
FillHuffmanTable(parse_result.dc_table, parse_result.ac_table,
&huffman_table);
if (!vaapi_wrapper->SubmitBuffer(VAHuffmanTableBufferType,
- sizeof(huffman_table), &huffman_table))
+ sizeof(huffman_table), &huffman_table)) {
return false;
+ }
// Set slice parameters.
VASliceParameterBufferJPEGBaseline slice_param;
FillSliceParameters(parse_result, &slice_param);
if (!vaapi_wrapper->SubmitBuffer(VASliceParameterBufferType,
- sizeof(slice_param), &slice_param))
+ sizeof(slice_param), &slice_param)) {
return false;
+ }
// Set scan data.
if (!vaapi_wrapper->SubmitBuffer(VASliceDataBufferType,
parse_result.data_size,
- const_cast<char*>(parse_result.data)))
- return false;
-
- if (!vaapi_wrapper->ExecuteAndDestroyPendingBuffers(va_surface))
+ const_cast<char*>(parse_result.data))) {
return false;
+ }
- return true;
+ return vaapi_wrapper->ExecuteAndDestroyPendingBuffers(va_surface);
}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index caf6ff832a6..bf25a3be674 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -50,8 +50,8 @@ static void ReportToUMA(VAJEAEncoderResult result) {
VaapiJpegEncodeAccelerator::EncodeRequest::EncodeRequest(
int32_t buffer_id,
scoped_refptr<media::VideoFrame> video_frame,
- std::unique_ptr<SharedMemoryRegion> exif_shm,
- std::unique_ptr<SharedMemoryRegion> output_shm,
+ std::unique_ptr<UnalignedSharedMemory> exif_shm,
+ std::unique_ptr<UnalignedSharedMemory> output_shm,
int quality)
: buffer_id(buffer_id),
video_frame(std::move(video_frame)),
@@ -202,7 +202,9 @@ VaapiJpegEncodeAccelerator::~VaapiJpegEncodeAccelerator() {
VLOGF(2) << "Destroying VaapiJpegEncodeAccelerator";
weak_this_factory_.InvalidateWeakPtrs();
- encoder_task_runner_->DeleteSoon(FROM_HERE, std::move(encoder_));
+ if (encoder_task_runner_) {
+ encoder_task_runner_->DeleteSoon(FROM_HERE, std::move(encoder_));
+ }
}
void VaapiJpegEncodeAccelerator::NotifyError(int32_t buffer_id, Status status) {
@@ -284,11 +286,12 @@ void VaapiJpegEncodeAccelerator::Encode(
return;
}
- std::unique_ptr<SharedMemoryRegion> exif_shm;
+ std::unique_ptr<UnalignedSharedMemory> exif_shm;
if (exif_buffer) {
// |exif_shm| will take ownership of the |exif_buffer->handle()|.
- exif_shm = std::make_unique<SharedMemoryRegion>(*exif_buffer, true);
- if (!exif_shm->Map()) {
+ exif_shm = std::make_unique<UnalignedSharedMemory>(
+ exif_buffer->handle(), exif_buffer->size(), true);
+ if (!exif_shm->MapAt(exif_buffer->offset(), exif_buffer->size())) {
VLOGF(1) << "Failed to map exif buffer";
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiJpegEncodeAccelerator::NotifyError,
@@ -305,8 +308,9 @@ void VaapiJpegEncodeAccelerator::Encode(
}
// |output_shm| will take ownership of the |output_buffer.handle()|.
- auto output_shm = std::make_unique<SharedMemoryRegion>(output_buffer, false);
- if (!output_shm->Map()) {
+ auto output_shm = std::make_unique<UnalignedSharedMemory>(
+ output_buffer.handle(), output_buffer.size(), false);
+ if (!output_shm->MapAt(output_buffer.offset(), output_buffer.size())) {
VLOGF(1) << "Failed to map output buffer";
task_runner_->PostTask(
FROM_HERE,
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
index c883b31fa3f..5a7f22cc1b3 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h
@@ -11,8 +11,8 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "media/base/bitstream_buffer.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/video/jpeg_encode_accelerator.h"
@@ -50,15 +50,15 @@ class MEDIA_GPU_EXPORT VaapiJpegEncodeAccelerator
struct EncodeRequest {
EncodeRequest(int32_t buffer_id,
scoped_refptr<media::VideoFrame> video_frame,
- std::unique_ptr<SharedMemoryRegion> exif_shm,
- std::unique_ptr<SharedMemoryRegion> output_shm,
+ std::unique_ptr<UnalignedSharedMemory> exif_shm,
+ std::unique_ptr<UnalignedSharedMemory> output_shm,
int quality);
~EncodeRequest();
int32_t buffer_id;
scoped_refptr<media::VideoFrame> video_frame;
- std::unique_ptr<SharedMemoryRegion> exif_shm;
- std::unique_ptr<SharedMemoryRegion> output_shm;
+ std::unique_ptr<UnalignedSharedMemory> exif_shm;
+ std::unique_ptr<UnalignedSharedMemory> output_shm;
int quality;
DISALLOW_COPY_AND_ASSIGN(EncodeRequest);
diff --git a/chromium/media/gpu/vaapi/vaapi_picture.h b/chromium/media/gpu/vaapi/vaapi_picture.h
index dd6268ada9b..5ee61a9cb3f 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture.h
@@ -45,8 +45,7 @@ class MEDIA_GPU_EXPORT VaapiPicture {
virtual bool AllowOverlay() const;
- // Downloads the |va_surface| into the picture, potentially scaling
- // it if needed.
+ // Downloads |va_surface| into the picture, potentially scaling it if needed.
virtual bool DownloadFromSurface(
const scoped_refptr<VASurface>& va_surface) = 0;
@@ -60,7 +59,7 @@ class MEDIA_GPU_EXPORT VaapiPicture {
uint32_t client_texture_id,
uint32_t texture_target);
- scoped_refptr<VaapiWrapper> vaapi_wrapper_;
+ const scoped_refptr<VaapiWrapper> vaapi_wrapper_;
const MakeGLContextCurrentCallback make_context_current_cb_;
const BindGLImageCallback bind_image_cb_;
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
index 7985ccd72b2..5c4d1ac3bc5 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
@@ -5,6 +5,7 @@
#include "media/gpu/vaapi/vaapi_picture_factory.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "media/video/picture.h"
#include "ui/gl/gl_bindings.h"
#if defined(USE_X11)
@@ -43,12 +44,20 @@ std::unique_ptr<VaapiPicture> VaapiPictureFactory::Create(
const scoped_refptr<VaapiWrapper>& vaapi_wrapper,
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb,
- int32_t picture_buffer_id,
- const gfx::Size& size,
- uint32_t texture_id,
- uint32_t client_texture_id,
- uint32_t texture_target) {
- DCHECK_EQ(texture_target, GetGLTextureTarget());
+ const PictureBuffer& picture_buffer) {
+ // ARC++ sends |picture_buffer| with no texture_target().
+ DCHECK(picture_buffer.texture_target() == GetGLTextureTarget() ||
+ picture_buffer.texture_target() == 0u);
+
+ // |client_texture_ids| and |service_texture_ids| are empty from ARC++.
+ const uint32_t client_texture_id =
+ !picture_buffer.client_texture_ids().empty()
+ ? picture_buffer.client_texture_ids()[0]
+ : 0;
+ const uint32_t service_texture_id =
+ !picture_buffer.service_texture_ids().empty()
+ ? picture_buffer.service_texture_ids()[0]
+ : 0;
std::unique_ptr<VaapiPicture> picture;
@@ -61,25 +70,24 @@ std::unique_ptr<VaapiPicture> VaapiPictureFactory::Create(
case kVaapiImplementationDrm:
picture.reset(new VaapiPictureNativePixmapOzone(
vaapi_wrapper, make_context_current_cb, bind_image_cb,
- picture_buffer_id, size, texture_id, client_texture_id,
- texture_target));
+ picture_buffer.id(), picture_buffer.size(), service_texture_id,
+ client_texture_id, picture_buffer.texture_target()));
break;
#elif defined(USE_EGL)
case kVaapiImplementationDrm:
picture.reset(new VaapiPictureNativePixmapEgl(
vaapi_wrapper, make_context_current_cb, bind_image_cb,
- picture_buffer_id, size, texture_id, client_texture_id,
- texture_target));
+ picture_buffer.id(), picture_buffer.size(), service_texture_id,
+ client_texture_id, picture_buffer.texture_target()));
break;
#endif
#if defined(USE_X11)
case kVaapiImplementationX11:
- picture.reset(new VaapiTFPPicture(vaapi_wrapper, make_context_current_cb,
- bind_image_cb, picture_buffer_id, size,
- texture_id, client_texture_id,
- texture_target));
-
+ picture.reset(new VaapiTFPPicture(
+ vaapi_wrapper, make_context_current_cb, bind_image_cb,
+ picture_buffer.id(), picture_buffer.size(), service_texture_id,
+ client_texture_id, picture_buffer.texture_target()));
break;
#endif // USE_X11
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.h b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
index b5184d930fd..984e236eb1c 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
@@ -15,6 +15,7 @@
namespace media {
+class PictureBuffer;
class VaapiWrapper;
// Factory of platform dependent VaapiPictures.
@@ -29,18 +30,13 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
VaapiPictureFactory();
virtual ~VaapiPictureFactory();
- // Creates a VaapiPicture of |size| associated with |picture_buffer_id|. If
- // provided, bind it to |texture_id|, as well as to |client_texture_id| using
- // |bind_image_cb|.
+ // Creates a VaapiPicture of picture_buffer.size() associated with
+ // picture_buffer.id().
virtual std::unique_ptr<VaapiPicture> Create(
const scoped_refptr<VaapiWrapper>& vaapi_wrapper,
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb,
- int32_t picture_buffer_id,
- const gfx::Size& size,
- uint32_t texture_id,
- uint32_t client_texture_id,
- uint32_t texture_target);
+ const PictureBuffer& picture_buffer);
// Return the type of the VaapiPicture implementation for the given GL
// implementation.
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.cc b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.cc
index f7d49814362..312d998f2c1 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_egl.cc
@@ -33,6 +33,8 @@ VaapiPictureNativePixmapEgl::VaapiPictureNativePixmapEgl(
client_texture_id,
texture_target) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(texture_id);
+ DCHECK(client_texture_id);
}
VaapiPictureNativePixmapEgl::~VaapiPictureNativePixmapEgl() {
@@ -58,14 +60,12 @@ bool VaapiPictureNativePixmapEgl::Initialize() {
// because the dmabuf fds have been made from it.
DCHECK(pixmap_->AreDmaBufFdsValid());
- if (client_texture_id_ != 0 && !bind_image_cb_.is_null()) {
- if (!bind_image_cb_.Run(client_texture_id_, texture_target_, gl_image_,
- true)) {
- LOG(ERROR) << "Failed to bind client_texture_id";
- return false;
- }
+ if (bind_image_cb_ &&
+ !bind_image_cb_.Run(client_texture_id_, texture_target_, gl_image_,
+ true /* can_bind_to_sampler */)) {
+ LOG(ERROR) << "Failed to bind client_texture_id";
+ return false;
}
-
return true;
}
@@ -73,52 +73,45 @@ bool VaapiPictureNativePixmapEgl::Allocate(gfx::BufferFormat format) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Export the gl texture as dmabuf.
- if (texture_id_ != 0 && !make_context_current_cb_.is_null()) {
- if (!make_context_current_cb_.Run())
- return false;
-
- scoped_refptr<gl::GLImageNativePixmap> image(new gl::GLImageNativePixmap(
- size_, BufferFormatToInternalFormat(format)));
-
- // Create an EGLImage from a gl texture
- if (!image->InitializeFromTexture(texture_id_)) {
- DLOG(ERROR) << "Failed to initialize eglimage from texture id: "
- << texture_id_;
- return false;
- }
-
- // Export the EGLImage as dmabuf.
- gfx::NativePixmapHandle native_pixmap_handle = image->ExportHandle();
- if (!native_pixmap_handle.planes.size()) {
- DLOG(ERROR) << "Failed to export EGLImage as dmabuf fds";
- return false;
- }
-
- // Convert NativePixmapHandle to NativePixmapDmaBuf.
- scoped_refptr<gfx::NativePixmap> native_pixmap_dmabuf(
- new gfx::NativePixmapDmaBuf(size_, format, native_pixmap_handle));
- if (!native_pixmap_dmabuf->AreDmaBufFdsValid()) {
- DLOG(ERROR) << "Invalid dmabuf fds";
- return false;
- }
-
- if (!image->BindTexImage(texture_target_)) {
- DLOG(ERROR) << "Failed to bind texture to GLImage";
- return false;
- }
-
- // The |pixmap_| takes ownership of the dmabuf fds. So the only reason
- // to keep a reference on the image is because the GPU service needs to
- // track this image as it will be attached to a client texture.
- pixmap_ = native_pixmap_dmabuf;
- gl_image_ = image;
+ if (make_context_current_cb_ && !make_context_current_cb_.Run())
+ return false;
+
+ scoped_refptr<gl::GLImageNativePixmap> image(
+ new gl::GLImageNativePixmap(size_, BufferFormatToInternalFormat(format)));
+
+ // Create an EGLImage from a gl texture
+ if (!image->InitializeFromTexture(texture_id_)) {
+ DLOG(ERROR) << "Failed to initialize eglimage from texture id: "
+ << texture_id_;
+ return false;
}
- if (!pixmap_) {
- DVLOG(1) << "Failed allocating a pixmap";
+ // Export the EGLImage as dmabuf.
+ gfx::NativePixmapHandle native_pixmap_handle = image->ExportHandle();
+ if (!native_pixmap_handle.planes.size()) {
+ DLOG(ERROR) << "Failed to export EGLImage as dmabuf fds";
return false;
}
+ // Convert NativePixmapHandle to NativePixmapDmaBuf.
+ scoped_refptr<gfx::NativePixmap> native_pixmap_dmabuf(
+ new gfx::NativePixmapDmaBuf(size_, format, native_pixmap_handle));
+ if (!native_pixmap_dmabuf->AreDmaBufFdsValid()) {
+ DLOG(ERROR) << "Invalid dmabuf fds";
+ return false;
+ }
+
+ if (!image->BindTexImage(texture_target_)) {
+ DLOG(ERROR) << "Failed to bind texture to GLImage";
+ return false;
+ }
+
+ // The |pixmap_| takes ownership of the dmabuf fds. So the only reason
+ // to keep a reference on the image is because the GPU service needs to
+ // track this image as it will be attached to a client texture.
+ pixmap_ = native_pixmap_dmabuf;
+ gl_image_ = image;
+
return Initialize();
}
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.cc b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.cc
index f07235bed0a..9cd691f5f92 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_ozone.cc
@@ -35,6 +35,9 @@ VaapiPictureNativePixmapOzone::VaapiPictureNativePixmapOzone(
client_texture_id,
texture_target) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // Either |texture_id| and |client_texture_id| are both zero, or not.
+ DCHECK((texture_id == 0 && client_texture_id == 0) ||
+ (texture_id != 0 && client_texture_id != 0));
}
VaapiPictureNativePixmapOzone::~VaapiPictureNativePixmapOzone() {
@@ -56,34 +59,35 @@ bool VaapiPictureNativePixmapOzone::Initialize() {
return false;
}
+ // ARC++ has no texture ids.
+ if (texture_id_ == 0 && client_texture_id_ == 0)
+ return true;
+
// Import dmabuf fds into the output gl texture through EGLImage.
- if (texture_id_ != 0 && !make_context_current_cb_.is_null()) {
- if (!make_context_current_cb_.Run())
- return false;
-
- gl::ScopedTextureBinder texture_binder(texture_target_, texture_id_);
-
- gfx::BufferFormat format = pixmap_->GetBufferFormat();
-
- scoped_refptr<gl::GLImageNativePixmap> image(new gl::GLImageNativePixmap(
- size_, BufferFormatToInternalFormat(format)));
- if (!image->Initialize(pixmap_.get(), format)) {
- LOG(ERROR) << "Failed to create GLImage";
- return false;
- }
- gl_image_ = image;
- if (!gl_image_->BindTexImage(texture_target_)) {
- LOG(ERROR) << "Failed to bind texture to GLImage";
- return false;
- }
+ if (make_context_current_cb_ && !make_context_current_cb_.Run())
+ return false;
+
+ gl::ScopedTextureBinder texture_binder(texture_target_, texture_id_);
+
+ const gfx::BufferFormat format = pixmap_->GetBufferFormat();
+
+ scoped_refptr<gl::GLImageNativePixmap> image(
+ new gl::GLImageNativePixmap(size_, BufferFormatToInternalFormat(format)));
+ if (!image->Initialize(pixmap_.get(), format)) {
+ LOG(ERROR) << "Failed to create GLImage";
+ return false;
+ }
+ gl_image_ = image;
+ if (!gl_image_->BindTexImage(texture_target_)) {
+ LOG(ERROR) << "Failed to bind texture to GLImage";
+ return false;
}
- if (client_texture_id_ != 0 && !bind_image_cb_.is_null()) {
- if (!bind_image_cb_.Run(client_texture_id_, texture_target_, gl_image_,
- true)) {
- LOG(ERROR) << "Failed to bind client_texture_id";
- return false;
- }
+ if (bind_image_cb_ &&
+ !bind_image_cb_.Run(client_texture_id_, texture_target_, gl_image_,
+ true /* can_bind_to_sampler */)) {
+ LOG(ERROR) << "Failed to bind client_texture_id";
+ return false;
}
return true;
@@ -98,7 +102,7 @@ bool VaapiPictureNativePixmapOzone::Allocate(gfx::BufferFormat format) {
factory->CreateNativePixmap(gfx::kNullAcceleratedWidget, size_, format,
gfx::BufferUsage::SCANOUT_VDA_WRITE);
if (!pixmap_) {
- DVLOG(1) << "Failed allocating a pixmap";
+ LOG(ERROR) << "Failed allocating a pixmap";
return false;
}
@@ -118,7 +122,7 @@ bool VaapiPictureNativePixmapOzone::ImportGpuMemoryBufferHandle(
gpu_memory_buffer_handle.native_pixmap_handle);
if (!pixmap_) {
- DVLOG(1) << "Failed creating a pixmap from a native handle";
+ LOG(ERROR) << "Failed creating a pixmap from a native handle";
return false;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
index 30d6f0d9712..dca5b5c46d7 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
@@ -33,6 +33,8 @@ VaapiTFPPicture::VaapiTFPPicture(
x_display_(gfx::GetXDisplay()),
x_pixmap_(0) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(texture_id);
+ DCHECK(client_texture_id);
}
VaapiTFPPicture::~VaapiTFPPicture() {
@@ -50,22 +52,20 @@ bool VaapiTFPPicture::Initialize() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(x_pixmap_);
- if (texture_id_ != 0 && !make_context_current_cb_.is_null()) {
- if (!make_context_current_cb_.Run())
- return false;
-
- glx_image_ = new gl::GLImageGLX(size_, GL_RGB);
- if (!glx_image_->Initialize(x_pixmap_)) {
- // x_pixmap_ will be freed in the destructor.
- DLOG(ERROR) << "Failed creating a GLX Pixmap for TFP";
- return false;
- }
-
- gl::ScopedTextureBinder texture_binder(texture_target_, texture_id_);
- if (!glx_image_->BindTexImage(texture_target_)) {
- DLOG(ERROR) << "Failed to bind texture to glx image";
- return false;
- }
+ if (make_context_current_cb_ && !make_context_current_cb_.Run())
+ return false;
+
+ glx_image_ = new gl::GLImageGLX(size_, GL_RGB);
+ if (!glx_image_->Initialize(x_pixmap_)) {
+ // x_pixmap_ will be freed in the destructor.
+ DLOG(ERROR) << "Failed creating a GLX Pixmap for TFP";
+ return false;
+ }
+
+ gl::ScopedTextureBinder texture_binder(texture_target_, texture_id_);
+ if (!glx_image_->BindTexImage(texture_target_)) {
+ DLOG(ERROR) << "Failed to bind texture to glx image";
+ return false;
}
return true;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index cac05e1dd1c..b3473260d97 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -22,6 +22,7 @@
#include "base/trace_event/trace_event.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/unaligned_shared_memory.h"
#include "media/gpu/accelerated_video_decoder.h"
#include "media/gpu/format_utils.h"
#include "media/gpu/h264_decoder.h"
@@ -85,11 +86,11 @@ class VaapiVideoDecodeAccelerator::InputBuffer {
public:
InputBuffer() = default;
InputBuffer(uint32_t id,
- std::unique_ptr<SharedMemoryRegion> shm,
+ std::unique_ptr<UnalignedSharedMemory> shm,
base::OnceCallback<void(int32_t id)> release_cb)
: id_(id), shm_(std::move(shm)), release_cb_(std::move(release_cb)) {}
~InputBuffer() {
- VLOGF(4) << "id = " << id_;
+ DVLOGF(4) << "id = " << id_;
if (release_cb_)
std::move(release_cb_).Run(id_);
}
@@ -97,11 +98,11 @@ class VaapiVideoDecodeAccelerator::InputBuffer {
// Indicates this is a dummy buffer for flush request.
bool IsFlushRequest() const { return shm_ == nullptr; }
int32_t id() const { return id_; }
- SharedMemoryRegion* shm() const { return shm_.get(); }
+ UnalignedSharedMemory* shm() const { return shm_.get(); }
private:
const int32_t id_ = -1;
- const std::unique_ptr<SharedMemoryRegion> shm_;
+ const std::unique_ptr<UnalignedSharedMemory> shm_;
base::OnceCallback<void(int32_t id)> release_cb_;
DISALLOW_COPY_AND_ASSIGN(InputBuffer);
@@ -131,7 +132,7 @@ VaapiPicture* VaapiVideoDecodeAccelerator::PictureById(
int32_t picture_buffer_id) {
Pictures::iterator it = pictures_.find(picture_buffer_id);
if (it == pictures_.end()) {
- VLOGF(4) << "Picture id " << picture_buffer_id << " does not exist";
+ DVLOGF(4) << "Picture id " << picture_buffer_id << " does not exist";
return NULL;
}
@@ -225,8 +226,8 @@ void VaapiVideoDecodeAccelerator::OutputPicture(
int32_t output_id = picture->picture_buffer_id();
- VLOGF(4) << "Outputting VASurface " << va_surface->id()
- << " into pixmap bound to picture buffer id " << output_id;
+ DVLOGF(4) << "Outputting VASurface " << va_surface->id()
+ << " into pixmap bound to picture buffer id " << output_id;
{
TRACE_EVENT2("media,gpu", "VAVDA::DownloadFromSurface", "input_id",
input_id, "output_id", output_id);
@@ -237,9 +238,9 @@ void VaapiVideoDecodeAccelerator::OutputPicture(
// Notify the client a picture is ready to be displayed.
++num_frames_at_client_;
TRACE_COUNTER1("media,gpu", "Vaapi frames at client", num_frames_at_client_);
- VLOGF(4) << "Notifying output picture id " << output_id << " for input "
- << input_id
- << " is ready. visible rect: " << visible_rect.ToString();
+ DVLOGF(4) << "Notifying output picture id " << output_id << " for input "
+ << input_id
+ << " is ready. visible rect: " << visible_rect.ToString();
if (client_) {
// TODO(hubbe): Use the correct color space. http://crbug.com/647725
client_->PictureReady(Picture(output_id, input_id, visible_rect,
@@ -272,8 +273,8 @@ void VaapiVideoDecodeAccelerator::TryOutputSurface() {
void VaapiVideoDecodeAccelerator::QueueInputBuffer(
const BitstreamBuffer& bitstream_buffer) {
- VLOGF(4) << "Queueing new input buffer id: " << bitstream_buffer.id()
- << " size: " << (int)bitstream_buffer.size();
+ DVLOGF(4) << "Queueing new input buffer id: " << bitstream_buffer.id()
+ << " size: " << (int)bitstream_buffer.size();
DCHECK(task_runner_->BelongsToCurrentThread());
TRACE_EVENT1("media,gpu", "QueueInputBuffer", "input_id",
bitstream_buffer.id());
@@ -286,10 +287,11 @@ void VaapiVideoDecodeAccelerator::QueueInputBuffer(
DCHECK(flush_buffer->IsFlushRequest());
input_buffers_.push(std::move(flush_buffer));
} else {
- std::unique_ptr<SharedMemoryRegion> shm(
- new SharedMemoryRegion(bitstream_buffer, true));
- RETURN_AND_NOTIFY_ON_FAILURE(shm->Map(), "Failed to map input buffer",
- UNREADABLE_INPUT, );
+ auto shm = std::make_unique<UnalignedSharedMemory>(
+ bitstream_buffer.handle(), bitstream_buffer.size(), true);
+ RETURN_AND_NOTIFY_ON_FAILURE(
+ shm->MapAt(bitstream_buffer.offset(), bitstream_buffer.size()),
+ "Failed to map input buffer", UNREADABLE_INPUT, );
auto input_buffer = std::make_unique<InputBuffer>(
bitstream_buffer.id(), std::move(shm),
@@ -350,12 +352,12 @@ bool VaapiVideoDecodeAccelerator::GetCurrInputBuffer_Locked() {
TRACE_COUNTER1("media,gpu", "Input buffers", input_buffers_.size());
if (curr_input_buffer_->IsFlushRequest()) {
- VLOGF(4) << "New flush buffer";
+ DVLOGF(4) << "New flush buffer";
return true;
}
- VLOGF(4) << "New |curr_input_buffer_|, id: " << curr_input_buffer_->id()
- << " size: " << curr_input_buffer_->shm()->size() << "B";
+ DVLOGF(4) << "New |curr_input_buffer_|, id: " << curr_input_buffer_->id()
+ << " size: " << curr_input_buffer_->shm()->size() << "B";
decoder_->SetStream(
curr_input_buffer_->id(),
static_cast<uint8_t*>(curr_input_buffer_->shm()->memory()),
@@ -391,7 +393,7 @@ void VaapiVideoDecodeAccelerator::DecodeTask() {
if (state_ != kDecoding)
return;
- VLOGF(4) << "Decode task";
+ DVLOGF(4) << "Decode task";
// Try to decode what stream data is (still) in the decoder until we run out
// of it.
@@ -449,7 +451,7 @@ void VaapiVideoDecodeAccelerator::DecodeTask() {
PLATFORM_FAILURE, );
return;
- case AcceleratedVideoDecoder::kNoKey:
+ case AcceleratedVideoDecoder::kTryAgain:
NOTREACHED() << "Should not reach here unless this class accepts "
"encrypted streams.";
RETURN_AND_NOTIFY_ON_FAILURE(false, "Error decoding stream",
@@ -586,22 +588,12 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
DCHECK_EQ(va_surface_ids.size(), buffers.size());
for (size_t i = 0; i < buffers.size(); ++i) {
- uint32_t client_id = !buffers[i].client_texture_ids().empty()
- ? buffers[i].client_texture_ids()[0]
- : 0;
- uint32_t service_id = !buffers[i].service_texture_ids().empty()
- ? buffers[i].service_texture_ids()[0]
- : 0;
-
- DCHECK_EQ(buffers[i].texture_target(),
- vaapi_picture_factory_->GetGLTextureTarget());
+ DCHECK(requested_pic_size_ == buffers[i].size());
std::unique_ptr<VaapiPicture> picture(vaapi_picture_factory_->Create(
- vaapi_wrapper_, make_context_current_cb_, bind_image_cb_,
- buffers[i].id(), requested_pic_size_, service_id, client_id,
- buffers[i].texture_target()));
- RETURN_AND_NOTIFY_ON_FAILURE(
- picture.get(), "Failed creating a VaapiPicture", PLATFORM_FAILURE, );
+ vaapi_wrapper_, make_context_current_cb_, bind_image_cb_, buffers[i]));
+ RETURN_AND_NOTIFY_ON_FAILURE(picture, "Failed creating a VaapiPicture",
+ PLATFORM_FAILURE, );
if (output_mode_ == Config::OutputMode::ALLOCATE) {
RETURN_AND_NOTIFY_ON_FAILURE(
@@ -649,8 +641,8 @@ void VaapiVideoDecodeAccelerator::ImportBufferForPicture(
// picture, but it has not yet executed when this ImportBufferForPicture
// was posted to us by the client. In that case just ignore this (we've
// already dismissed it and accounted for that).
- VLOGF(3) << "got picture id=" << picture_buffer_id
- << " not in use (anymore?).";
+ DVLOGF(3) << "got picture id=" << picture_buffer_id
+ << " not in use (anymore?).";
return;
}
@@ -670,7 +662,7 @@ void VaapiVideoDecodeAccelerator::ImportBufferForPicture(
void VaapiVideoDecodeAccelerator::ReusePictureBuffer(
int32_t picture_buffer_id) {
- VLOGF(4) << "picture id=" << picture_buffer_id;
+ DVLOGF(4) << "picture id=" << picture_buffer_id;
DCHECK(task_runner_->BelongsToCurrentThread());
TRACE_EVENT1("media,gpu", "VAVDA::ReusePictureBuffer", "Picture id",
picture_buffer_id);
@@ -680,8 +672,8 @@ void VaapiVideoDecodeAccelerator::ReusePictureBuffer(
// picture, but it has not yet executed when this ReusePictureBuffer
// was posted to us by the client. In that case just ignore this (we've
// already dismissed it and accounted for that).
- VLOGF(3) << "got picture id=" << picture_buffer_id
- << " not in use (anymore?).";
+ DVLOGF(3) << "got picture id=" << picture_buffer_id
+ << " not in use (anymore?).";
return;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
index 5308c735134..271bcc00d67 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
@@ -28,7 +28,6 @@
#include "media/base/bitstream_buffer.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/vaapi/vaapi_picture_factory.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/video/picture.h"
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index a2c88a3a305..76907a60138 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -116,15 +116,14 @@ class MockVaapiPictureFactory : public VaapiPictureFactory {
const scoped_refptr<VaapiWrapper>& vaapi_wrapper,
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb,
- int32_t picture_buffer_id,
- const gfx::Size& size,
- uint32_t texture_id,
- uint32_t client_texture_id,
- uint32_t texture_target) override {
- MockCreateVaapiPicture(vaapi_wrapper.get(), size);
+ const PictureBuffer& picture_buffer) override {
+ const uint32_t service_texture_id = picture_buffer.service_texture_ids()[0];
+ const uint32_t client_texture_id = picture_buffer.client_texture_ids()[0];
+ MockCreateVaapiPicture(vaapi_wrapper.get(), picture_buffer.size());
return std::make_unique<MockVaapiPicture>(
vaapi_wrapper, make_context_current_cb, bind_image_cb,
- picture_buffer_id, size, texture_id, client_texture_id, texture_target);
+ picture_buffer.id(), picture_buffer.size(), service_texture_id,
+ client_texture_id, picture_buffer.texture_target());
}
};
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index a1ebebe4012..5c9a84ebf51 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -25,8 +25,9 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/unaligned_shared_memory.h"
+#include "media/base/video_bitrate_allocation.h"
#include "media/gpu/h264_dpb.h"
-#include "media/gpu/shared_memory_region.h"
#include "media/gpu/vaapi/h264_encoder.h"
#include "media/gpu/vaapi/vaapi_common.h"
#include "media/gpu/vaapi/vp8_encoder.h"
@@ -123,10 +124,15 @@ struct VaapiVideoEncodeAccelerator::InputFrameRef {
};
struct VaapiVideoEncodeAccelerator::BitstreamBufferRef {
- BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm)
- : id(id), shm(std::move(shm)) {}
+ BitstreamBufferRef(int32_t id, const BitstreamBuffer& buffer)
+ : id(id),
+ shm(std::make_unique<UnalignedSharedMemory>(buffer.handle(),
+ buffer.size(),
+ false)),
+ offset(buffer.offset()) {}
const int32_t id;
- const std::unique_ptr<SharedMemoryRegion> shm;
+ const std::unique_ptr<UnalignedSharedMemory> shm;
+ const off_t offset;
};
VideoEncodeAccelerator::SupportedProfiles
@@ -376,11 +382,8 @@ void VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer(
void VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer(
scoped_refptr<H264BitstreamBuffer> buffer) {
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- // TODO(crbug.com/844303): use vaMapBuffer in VaapiWrapper::SubmitBuffer()
- // instead to avoid this.
- void* non_const_ptr = const_cast<uint8_t*>(buffer->data());
if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
- buffer->BytesInBuffer(), non_const_ptr)) {
+ buffer->BytesInBuffer(), buffer->data())) {
NOTIFY_ERROR(kPlatformFailureError, "Failed submitting a bitstream buffer");
}
}
@@ -432,10 +435,8 @@ void VaapiVideoEncodeAccelerator::ReturnBitstreamBuffer(
<< " id: " << buffer->id << " size: " << data_size;
child_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&Client::BitstreamBufferReady, client_, buffer->id,
- data_size, encode_job->IsKeyframeRequested(),
- encode_job->timestamp()));
+ FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, client_,
+ buffer->id, encode_job->Metadata(data_size)));
}
void VaapiVideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame,
@@ -542,8 +543,7 @@ void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
return;
}
- auto buffer_ref = std::make_unique<BitstreamBufferRef>(
- buffer.id(), std::make_unique<SharedMemoryRegion>(buffer, false));
+ auto buffer_ref = std::make_unique<BitstreamBufferRef>(buffer.id(), buffer);
encoder_thread_task_runner_->PostTask(
FROM_HERE,
@@ -556,7 +556,7 @@ void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, kUninitialized);
- if (!buffer_ref->shm->Map()) {
+ if (!buffer_ref->shm->MapAt(buffer_ref->offset, buffer_ref->shm->size())) {
NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
return;
}
@@ -571,22 +571,41 @@ void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
VLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
DCHECK(child_task_runner_->BelongsToCurrentThread());
+ VideoBitrateAllocation allocation;
+ allocation.SetBitrate(0, 0, bitrate);
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
+ base::Unretained(this), allocation, framerate));
+}
+
+void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
+ VLOGF(2) << "bitrate: " << bitrate_allocation.GetSumBps()
+ << " framerate: " << framerate;
+ DCHECK(child_task_runner_->BelongsToCurrentThread());
+
encoder_thread_task_runner_->PostTask(
FROM_HERE,
base::Bind(
&VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
- base::Unretained(this), bitrate, framerate));
+ base::Unretained(this), bitrate_allocation, framerate));
}
void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
- uint32_t bitrate,
+ VideoBitrateAllocation bitrate_allocation,
uint32_t framerate) {
- VLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
+ VLOGF(2) << "bitrate: " << bitrate_allocation.GetSumBps()
+ << " framerate: " << framerate;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, kUninitialized);
- if (!encoder_->UpdateRates(bitrate, framerate))
- VLOGF(1) << "Failed to update rates to " << bitrate << " " << framerate;
+ if (!encoder_->UpdateRates(bitrate_allocation, framerate)) {
+ VLOGF(1) << "Failed to update rates to " << bitrate_allocation.GetSumBps()
+ << " " << framerate;
+ }
}
void VaapiVideoEncodeAccelerator::Flush(FlushCallback flush_callback) {
@@ -938,7 +957,7 @@ bool VaapiVideoEncodeAccelerator::VP8Accelerator::SubmitFrameParameters(
seq_param.frame_width_scale = frame_header->horizontal_scale;
seq_param.frame_height_scale = frame_header->vertical_scale;
seq_param.error_resilient = 1;
- seq_param.bits_per_second = encode_params.bitrate_bps;
+ seq_param.bits_per_second = encode_params.bitrate_allocation.GetSumBps();
seq_param.intra_period = encode_params.kf_period_frames;
VAEncPictureParameterBufferVP8 pic_param = {};
@@ -1035,7 +1054,8 @@ bool VaapiVideoEncodeAccelerator::VP8Accelerator::SubmitFrameParameters(
frame_header->quantization_hdr.uv_ac_delta;
VAEncMiscParameterRateControl rate_control_param = {};
- rate_control_param.bits_per_second = encode_params.bitrate_bps;
+ rate_control_param.bits_per_second =
+ encode_params.bitrate_allocation.GetSumBps();
rate_control_param.target_percentage = kTargetBitratePercentage;
rate_control_param.window_size = encode_params.cpb_window_size_ms;
rate_control_param.initial_qp = encode_params.initial_qp;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
index bcd4fcc8d8f..f2e8ee82dab 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
@@ -44,6 +44,9 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
void UseOutputBitstreamBuffer(const BitstreamBuffer& buffer) override;
void RequestEncodingParametersChange(uint32_t bitrate,
uint32_t framerate) override;
+ void RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override;
void Destroy() override;
void Flush(FlushCallback flush_callback) override;
@@ -80,8 +83,10 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
void UseOutputBitstreamBufferTask(
std::unique_ptr<BitstreamBufferRef> buffer_ref);
- void RequestEncodingParametersChangeTask(uint32_t bitrate,
- uint32_t framerate);
+ void RequestEncodingParametersChangeTask(
+ VideoBitrateAllocation bitrate_allocation,
+ uint32_t framerate);
+
void DestroyTask();
void FlushTask();
diff --git a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
index 2e763ebdd19..638537b11b4 100644
--- a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
@@ -216,9 +216,8 @@ bool VaapiVP8Accelerator::SubmitDecode(
&slice_param))
return false;
- void* non_const_ptr = const_cast<uint8_t*>(frame_hdr->data);
if (!vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType,
- frame_hdr->frame_size, non_const_ptr))
+ frame_hdr->frame_size, frame_hdr->data))
return false;
return vaapi_dec_->DecodeVASurface(pic->AsVaapiVP8Picture()->va_surface());
diff --git a/chromium/media/gpu/vaapi/vaapi_vp9_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_vp9_accelerator.cc
index 5a6d73504a4..53ebb7beb06 100644
--- a/chromium/media/gpu/vaapi/vaapi_vp9_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_vp9_accelerator.cc
@@ -154,9 +154,8 @@ bool VaapiVP9Accelerator::SubmitDecode(
sizeof(slice_param), &slice_param))
return false;
- void* non_const_ptr = const_cast<uint8_t*>(frame_hdr->data);
if (!vaapi_wrapper_->SubmitBuffer(VASliceDataBufferType,
- frame_hdr->frame_size, non_const_ptr))
+ frame_hdr->frame_size, frame_hdr->data))
return false;
return vaapi_dec_->DecodeVASurface(pic->AsVaapiVP9Picture()->va_surface());
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index 4068c9f69f8..98e22cc96a8 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -909,14 +909,30 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
bool VaapiWrapper::SubmitBuffer(VABufferType va_buffer_type,
size_t size,
- void* buffer) {
+ const void* buffer) {
base::AutoLock auto_lock(*va_lock_);
VABufferID buffer_id;
VAStatus va_res = vaCreateBuffer(va_display_, va_context_id_, va_buffer_type,
- size, 1, buffer, &buffer_id);
+ size, 1, nullptr, &buffer_id);
VA_SUCCESS_OR_RETURN(va_res, "Failed to create a VA buffer", false);
+ void* va_buffer_data = nullptr;
+ va_res = vaMapBuffer(va_display_, buffer_id, &va_buffer_data);
+ VA_LOG_ON_ERROR(va_res, "vaMapBuffer failed");
+ if (va_res != VA_STATUS_SUCCESS) {
+ vaDestroyBuffer(va_display_, buffer_id);
+ return false;
+ }
+
+ DCHECK(va_buffer_data);
+ // TODO(selcott): Investigate potentially faster alternatives to memcpy here
+ // such as libyuv::CopyX and family.
+ memcpy(va_buffer_data, buffer, size);
+
+ va_res = vaUnmapBuffer(va_display_, buffer_id);
+ VA_LOG_ON_ERROR(va_res, "vaUnmapBuffer failed");
+
switch (va_buffer_type) {
case VASliceParameterBufferType:
case VASliceDataBufferType:
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index 1dd6a3cf29e..b892b46aa8e 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -122,7 +122,9 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// Data submitted via this method awaits in the HW codec until
// ExecuteAndDestroyPendingBuffers() is called to execute or
// DestroyPendingBuffers() is used to cancel a pending job.
- bool SubmitBuffer(VABufferType va_buffer_type, size_t size, void* buffer);
+ bool SubmitBuffer(VABufferType va_buffer_type,
+ size_t size,
+ const void* buffer);
// Submit a VAEncMiscParameterBuffer of given |misc_param_type|, copying its
// data from |buffer| of size |size|, into HW codec. The data in |buffer| is
diff --git a/chromium/media/gpu/vaapi/vp8_encoder.cc b/chromium/media/gpu/vaapi/vp8_encoder.cc
index a7b8cd413f5..2463f23dc0b 100644
--- a/chromium/media/gpu/vaapi/vp8_encoder.cc
+++ b/chromium/media/gpu/vaapi/vp8_encoder.cc
@@ -25,7 +25,6 @@ const int kDefaultQP = (3 * kMinQP + kMaxQP) / 4;
VP8Encoder::EncodeParams::EncodeParams()
: kf_period_frames(kKFPeriod),
- bitrate_bps(0),
framerate(0),
cpb_window_size_ms(kCPBWindowSizeMs),
cpb_size_bits(0),
@@ -67,7 +66,9 @@ bool VP8Encoder::Initialize(const gfx::Size& visible_size,
Reset();
- return UpdateRates(initial_bitrate, initial_framerate);
+ VideoBitrateAllocation initial_bitrate_allocation;
+ initial_bitrate_allocation.SetBitrate(0, 0, initial_bitrate);
+ return UpdateRates(initial_bitrate_allocation, initial_framerate);
}
gfx::Size VP8Encoder::GetCodedSize() const {
@@ -118,22 +119,24 @@ bool VP8Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
return true;
}
-bool VP8Encoder::UpdateRates(uint32_t bitrate, uint32_t framerate) {
+bool VP8Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- if (bitrate == 0 || framerate == 0)
+ if (bitrate_allocation.GetSumBps() == 0 || framerate == 0)
return false;
- if (current_params_.bitrate_bps == bitrate &&
+ if (current_params_.bitrate_allocation == bitrate_allocation &&
current_params_.framerate == framerate) {
return true;
}
- current_params_.bitrate_bps = bitrate;
+ current_params_.bitrate_allocation = bitrate_allocation;
current_params_.framerate = framerate;
current_params_.cpb_size_bits =
- current_params_.bitrate_bps * current_params_.cpb_window_size_ms / 1000;
+ current_params_.bitrate_allocation.GetSumBps() *
+ current_params_.cpb_window_size_ms / 1000;
return true;
}
diff --git a/chromium/media/gpu/vaapi/vp8_encoder.h b/chromium/media/gpu/vaapi/vp8_encoder.h
index 66abde9ce8d..a5f9e5e4e87 100644
--- a/chromium/media/gpu/vaapi/vp8_encoder.h
+++ b/chromium/media/gpu/vaapi/vp8_encoder.h
@@ -10,6 +10,7 @@
#include "base/macros.h"
#include "base/sequence_checker.h"
+#include "media/base/video_bitrate_allocation.h"
#include "media/filters/vp8_parser.h"
#include "media/gpu/vaapi/accelerated_video_encoder.h"
#include "media/gpu/vp8_picture.h"
@@ -25,8 +26,8 @@ class VP8Encoder : public AcceleratedVideoEncoder {
// Produce a keyframe at least once per this many frames.
size_t kf_period_frames;
- // Bitrate in bps.
- uint32_t bitrate_bps;
+ // Bitrate allocation in bps.
+ VideoBitrateAllocation bitrate_allocation;
// Framerate in FPS.
uint32_t framerate;
@@ -75,7 +76,8 @@ class VP8Encoder : public AcceleratedVideoEncoder {
VideoCodecProfile profile,
uint32_t initial_bitrate,
uint32_t initial_framerate) override;
- bool UpdateRates(uint32_t bitrate, uint32_t framerate) override;
+ bool UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override;
gfx::Size GetCodedSize() const override;
size_t GetBitstreamBufferSize() const override;
size_t GetMaxNumOfRefFrames() const override;
diff --git a/chromium/media/gpu/video_decode_accelerator_unittest.cc b/chromium/media/gpu/video_decode_accelerator_unittest.cc
index 871c0378c5a..eeaf91577f6 100644
--- a/chromium/media/gpu/video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_decode_accelerator_unittest.cc
@@ -56,8 +56,8 @@
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/test_data_util.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/fake_video_decode_accelerator.h"
@@ -67,6 +67,7 @@
#include "media/gpu/test/video_accelerator_unittest_helpers.h"
#include "media/gpu/test/video_decode_accelerator_unittest_helpers.h"
#include "media/video/h264_parser.h"
+#include "mojo/core/embedder/embedder.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/codec/png_codec.h"
#include "ui/gl/gl_image.h"
@@ -258,8 +259,8 @@ class GLRenderingVDAClient
// will start delaying the call to ReusePictureBuffer() for kReuseDelay.
// |decode_calls_per_second| is the number of VDA::Decode calls per second.
// If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
- // |render_as_thumbnails| indicates if the decoded picture will be rendered
- // as thumbnails at the end of tests.
+ // |num_frames| is the number of frames that must be verified to be decoded
+ // during the test.
struct Config {
size_t window_id = 0;
size_t num_in_flight_decodes = 1;
@@ -275,7 +276,7 @@ class GLRenderingVDAClient
bool fake_decoder = false;
size_t delay_reuse_after_frame_num = std::numeric_limits<size_t>::max();
size_t decode_calls_per_second = 0;
- bool render_as_thumbnails = false;
+ size_t num_frames = 0;
};
// Doesn't take ownership of |rendering_helper| or |note|, which must outlive
@@ -323,6 +324,9 @@ class GLRenderingVDAClient
void SetState(ClientState new_state);
void FinishInitialization();
void ReturnPicture(int32_t picture_buffer_id);
+ bool IsLastPlayThrough() {
+ return config_.num_play_throughs - completed_play_throughs_ == 1;
+ }
// Delete the associated decoder helper.
void DeleteDecoder();
@@ -343,7 +347,7 @@ class GLRenderingVDAClient
std::unique_ptr<base::WeakPtrFactory<VideoDecodeAccelerator>>
weak_vda_ptr_factory_;
std::unique_ptr<GpuVideoDecodeAcceleratorFactory> vda_factory_;
- size_t remaining_play_throughs_;
+ size_t completed_play_throughs_;
ResetPoint reset_point_;
ClientState state_;
size_t num_queued_fragments_;
@@ -398,7 +402,7 @@ GLRenderingVDAClient::GLRenderingVDAClient(
outstanding_decodes_(0),
next_bitstream_buffer_id_(0),
note_(note),
- remaining_play_throughs_(config_.num_play_throughs),
+ completed_play_throughs_(0),
reset_point_(config_.reset_point),
state_(CS_CREATED),
num_queued_fragments_(0),
@@ -525,14 +529,15 @@ void GLRenderingVDAClient::ProvidePictureBuffers(
LOG_ASSERT(texture_ref);
int32_t picture_buffer_id = next_picture_buffer_id_++;
+ int irrelevant_id = picture_buffer_id;
LOG_ASSERT(
active_textures_.insert(std::make_pair(picture_buffer_id, texture_ref))
.second);
PictureBuffer::TextureIds texture_ids(1, texture_id);
buffers.push_back(PictureBuffer(picture_buffer_id, dimensions,
- PictureBuffer::TextureIds(), texture_ids,
- texture_target, pixel_format));
+ PictureBuffer::TextureIds{irrelevant_id++},
+ texture_ids, texture_target, pixel_format));
}
decoder_->AssignPictureBuffers(buffers);
@@ -554,11 +559,10 @@ void GLRenderingVDAClient::DismissPictureBuffer(int32_t picture_buffer_id) {
}
void GLRenderingVDAClient::PictureReady(const Picture& picture) {
- // We shouldn't be getting pictures delivered after Reset has completed.
- LOG_ASSERT(state_ < CS_RESET);
-
if (decoder_deleted())
return;
+ // We shouldn't be getting pictures delivered after Reset has completed.
+ LOG_ASSERT(state_ < CS_RESET);
gfx::Rect visible_rect = picture.visible_rect();
if (!visible_rect.IsEmpty())
@@ -580,7 +584,7 @@ void GLRenderingVDAClient::PictureReady(const Picture& picture) {
// Mid-stream reset applies only to the last play-through per constructor
// comment.
- if (remaining_play_throughs_ == 1 && reset_point_ == MID_STREAM_RESET &&
+ if (IsLastPlayThrough() && reset_point_ == MID_STREAM_RESET &&
config_.reset_after_frame_num == num_decoded_frames_) {
decoder_->Reset();
// Re-start decoding from the beginning of the stream to avoid needing to
@@ -597,19 +601,22 @@ void GLRenderingVDAClient::PictureReady(const Picture& picture) {
base::Bind(&GLRenderingVDAClient::ReturnPicture, AsWeakPtr(),
picture.picture_buffer_id()));
ASSERT_TRUE(pending_textures_.insert(*texture_it).second);
-
- if (config_.render_as_thumbnails) {
- rendering_helper_->RenderThumbnail(video_frame->texture_target(),
- video_frame->texture_id());
- } else {
- rendering_helper_->QueueVideoFrame(config_.window_id, video_frame);
- }
+ rendering_helper_->ConsumeVideoFrame(config_.window_id,
+ std::move(video_frame));
}
void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
+ // Remove TextureRef from pending_textures_ regardless whether decoder is
+ // deleted.
+ LOG_ASSERT(1U == pending_textures_.erase(picture_buffer_id));
if (decoder_deleted())
return;
- LOG_ASSERT(1U == pending_textures_.erase(picture_buffer_id));
+
+ if (active_textures_.find(picture_buffer_id) == active_textures_.end()) {
+ // The picture associated with picture_buffer_id is dismissed.
+ // Do not execute ReusePictureBuffer().
+ return;
+ }
if (pending_textures_.empty() && state_ == CS_RESETTING) {
SetState(CS_RESET);
@@ -628,8 +635,6 @@ void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
}
void GLRenderingVDAClient::ResetDecoderAfterFlush() {
- DCHECK_GE(remaining_play_throughs_, 1u);
- --remaining_play_throughs_;
// SetState(CS_RESETTING) should be called before decoder_->Reset(), because
// VDA can call NotifyFlushDone() from Reset().
// TODO(johnylin): call SetState() before all decoder Flush() and Reset().
@@ -681,6 +686,11 @@ void GLRenderingVDAClient::NotifyFlushDone() {
return;
}
+ // Check all the Decode()-ed frames are returned by PictureReady() in
+ // END_OF_STREAM_RESET case.
+ if (config_.reset_point == END_OF_STREAM_RESET)
+ EXPECT_EQ(num_decoded_frames_, config_.num_frames);
+
SetState(CS_FLUSHED);
ResetDecoderAfterFlush();
}
@@ -693,9 +703,14 @@ void GLRenderingVDAClient::NotifyResetDone() {
case DONE_RESET_AFTER_FIRST_CONFIG_INFO:
case MID_STREAM_RESET:
reset_point_ = END_OF_STREAM_RESET;
- DecodeNextFragment();
+ // Because VDA::Decode() is executed if |reset_point_| is
+ // MID_STREAM_RESET or RESET_AFTER_FIRST_CONFIG_INFO,
+ // NotifyEndOfBitstreamBuffer() will be invoked. Next VDA::Decode() is
+ // triggered from NotifyEndOfBitstreamBuffer().
return;
case START_OF_STREAM_RESET:
+ EXPECT_EQ(num_decoded_frames_, 0u);
+ EXPECT_EQ(encoded_data_helper_->AtHeadOfStream(), true);
reset_point_ = END_OF_STREAM_RESET;
for (size_t i = 0; i < config_.num_in_flight_decodes; ++i)
DecodeNextFragment();
@@ -708,12 +723,16 @@ void GLRenderingVDAClient::NotifyResetDone() {
break;
}
- if (remaining_play_throughs_) {
+ completed_play_throughs_++;
+ DCHECK_GE(config_.num_play_throughs, completed_play_throughs_);
+
+ if (completed_play_throughs_ < config_.num_play_throughs) {
encoded_data_helper_->Rewind();
FinishInitialization();
return;
}
+ // completed_play_throughs == config.num_play_throughs.
rendering_helper_->Flush(config_.window_id);
if (pending_textures_.empty()) {
@@ -742,7 +761,11 @@ void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
void GLRenderingVDAClient::SetState(ClientState new_state) {
note_->Notify(new_state);
state_ = new_state;
- if (!remaining_play_throughs_ && new_state == config_.delete_decoder_state) {
+ if (IsLastPlayThrough() && new_state == config_.delete_decoder_state) {
+ // If config_.delete_decoder_state is CS_RESET, IsLastPlayThrough() is
+ // false. But it does not matter, because DeleteDecoder() is executed after
+ // SetState(CS_RESET) in NotifyResetDone().
+ ASSERT_NE(config_.delete_decoder_state, CS_RESET);
LOG_ASSERT(!decoder_deleted());
DeleteDecoder();
}
@@ -751,6 +774,10 @@ void GLRenderingVDAClient::SetState(ClientState new_state) {
void GLRenderingVDAClient::FinishInitialization() {
SetState(CS_INITIALIZED);
initialize_done_ticks_ = base::TimeTicks::Now();
+ EXPECT_EQ(encoded_data_helper_->AtHeadOfStream(), true);
+ num_decoded_frames_ = 0;
+ if (decoder_deleted())
+ return;
if (reset_point_ == START_OF_STREAM_RESET) {
decoder_->Reset();
@@ -759,7 +786,7 @@ void GLRenderingVDAClient::FinishInitialization() {
for (size_t i = 0; i < config_.num_in_flight_decodes; ++i)
DecodeNextFragment();
- DCHECK_EQ(outstanding_decodes_, config_.num_in_flight_decodes);
+ EXPECT_EQ(outstanding_decodes_, config_.num_in_flight_decodes);
}
void GLRenderingVDAClient::DeleteDecoder() {
@@ -817,7 +844,7 @@ void GLRenderingVDAClient::DecodeNextFragment() {
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
decoder_->Decode(bitstream_buffer);
++outstanding_decodes_;
- if (!remaining_play_throughs_ &&
+ if (IsLastPlayThrough() &&
-config_.delete_decoder_state == next_bitstream_buffer_id_) {
DeleteDecoder();
}
@@ -833,6 +860,12 @@ void GLRenderingVDAClient::DecodeNextFragment() {
FROM_HERE,
base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
base::TimeDelta::FromSeconds(1) / config_.decode_calls_per_second);
+ } else {
+ // Unless DecodeNextFragment() is posted from the above PostDelayedTask(),
+ // all the DecodeNextFragment() will be executed from
+ // NotifyEndOfBitstreamBuffer(). The number of Decode()s in flight must be
+ // less than or equal to the specified times.
+ EXPECT_LE(outstanding_decodes_, config_.num_in_flight_decodes);
}
}
@@ -1092,9 +1125,15 @@ static void AssertWaitForStateOrDeleted(
<< ", instead of " << expected_state;
}
+// Fails on Win only. crbug.com/849368
+#if defined(OS_WIN)
+#define MAYBE_TestSimpleDecode DISABLED_TestSimpleDecode
+#else
+#define MAYBE_TestSimpleDecode TestSimpleDecode
+#endif
// Test the most straightforward case possible: data is decoded from a single
// chunk and rendered to the screen.
-TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
+TEST_P(VideoDecodeAcceleratorParamTest, MAYBE_TestSimpleDecode) {
size_t num_concurrent_decoders = std::get<0>(GetParam());
const size_t num_in_flight_decodes = std::get<1>(GetParam());
size_t num_play_throughs = std::get<2>(GetParam());
@@ -1147,7 +1186,7 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
config.profile = video_file->profile;
config.fake_decoder = g_fake_decoder;
config.delay_reuse_after_frame_num = delay_reuse_after_frame_num;
- config.render_as_thumbnails = render_as_thumbnails;
+ config.num_frames = video_file->num_frames;
clients_[index] = std::make_unique<GLRenderingVDAClient>(
std::move(config), video_file->data_str, &rendering_helper_,
@@ -1176,7 +1215,9 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
for (size_t i = 0; i < num_concurrent_decoders; ++i) {
ClientStateNotification<ClientState>* note = notes_[i].get();
ClientState state = note->Wait();
- if (state != CS_INITIALIZED) {
+ EXPECT_TRUE(delete_decoder_state != CS_DECODER_SET ||
+ state == CS_DESTROYED);
+ if (delete_decoder_state != CS_DECODER_SET && state != CS_INITIALIZED) {
skip_performance_and_correctness_checks = true;
// We expect initialization to fail only when more than the supported
// number of decoders is instantiated. Assert here that something else
@@ -1406,7 +1447,6 @@ INSTANTIATE_TEST_CASE_P(
CS_RESETTING,
false,
false),
- std::make_tuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
std::make_tuple(1,
1,
1,
@@ -1494,6 +1534,7 @@ TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
config.profile = video_file->profile;
config.fake_decoder = g_fake_decoder;
config.decode_calls_per_second = kWebRtcDecodeCallsPerSecond;
+ config.num_frames = video_file->num_frames;
clients_.push_back(std::make_unique<GLRenderingVDAClient>(
std::move(config), video_file->data_str, &rendering_helper_,
@@ -1527,6 +1568,7 @@ TEST_F(VideoDecodeAcceleratorTest, NoCrash) {
config.frame_size = gfx::Size(video_file->width, video_file->height);
config.profile = video_file->profile;
config.fake_decoder = g_fake_decoder;
+ config.num_frames = video_file->num_frames;
clients_.push_back(std::make_unique<GLRenderingVDAClient>(
std::move(config), video_file->data_str, &rendering_helper_,
@@ -1550,6 +1592,8 @@ class VDATestSuite : public base::TestSuite {
int Run() {
#if defined(OS_WIN) || defined(OS_CHROMEOS)
+ mojo::core::Init(); // Required only for Win7 tests.
+
// For windows the decoding thread initializes the media foundation decoder
// which uses COM. We need the thread to be a UI thread.
// On Ozone, the backend initializes the event system using a UI
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
index d76b06d482f..25958a8df19 100644
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_encode_accelerator_unittest.cc
@@ -49,6 +49,7 @@
#include "media/base/video_frame.h"
#include "media/filters/ffmpeg_video_decoder.h"
#include "media/filters/ivf_parser.h"
+#include "media/filters/vp8_parser.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/gpu_video_encode_accelerator_factory.h"
#include "media/gpu/h264_decoder.h"
@@ -559,9 +560,10 @@ enum ClientState {
class StreamValidator {
public:
// To be called when a complete frame is found while processing a stream
- // buffer, passing true if the frame is a keyframe. Returns false if we
- // are not interested in more frames and further processing should be aborted.
- typedef base::Callback<bool(bool)> FrameFoundCallback;
+ // buffer, passing true if the frame is a keyframe and the visible size.
+ // Returns false if we are not interested in more frames and further
+ // processing should be aborted.
+ typedef base::Callback<bool(bool, const gfx::Size&)> FrameFoundCallback;
virtual ~StreamValidator() {}
@@ -578,6 +580,7 @@ class StreamValidator {
: frame_cb_(frame_cb) {}
FrameFoundCallback frame_cb_;
+ gfx::Size visible_size_;
};
class H264Validator : public StreamValidator {
@@ -635,7 +638,7 @@ void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
ASSERT_EQ(H264Parser::kOk,
h264_parser_.ParseSliceHeader(nalu, &slice_hdr));
if (IsNewPicture(slice_hdr)) {
- if (!frame_cb_.Run(keyframe))
+ if (!frame_cb_.Run(keyframe, visible_size_))
return;
ASSERT_TRUE(UpdateCurrentPicture(slice_hdr));
}
@@ -645,6 +648,11 @@ void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
case H264NALU::kSPS: {
int sps_id;
ASSERT_EQ(H264Parser::kOk, h264_parser_.ParseSPS(&sps_id));
+ // Check the visible size.
+ gfx::Rect visible_size =
+ h264_parser_.GetSPS(sps_id)->GetVisibleRect().value_or(gfx::Rect());
+ ASSERT_FALSE(visible_size.IsEmpty());
+ visible_size_ = visible_size.size();
seen_sps_ = true;
break;
}
@@ -707,16 +715,20 @@ class VP8Validator : public StreamValidator {
};
void VP8Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
- bool keyframe = !(stream[0] & 0x01);
- if (keyframe)
- seen_keyframe_ = true;
-
- EXPECT_TRUE(seen_keyframe_);
-
- frame_cb_.Run(keyframe);
// TODO(posciak): We could be getting more frames in the buffer, but there is
// no simple way to detect this. We'd need to parse the frames and go through
// partition numbers/sizes. For now assume one frame per buffer.
+ Vp8Parser parser;
+ Vp8FrameHeader header;
+ EXPECT_TRUE(parser.ParseFrame(stream, size, &header));
+ if (header.IsKeyframe()) {
+ seen_keyframe_ = true;
+ visible_size_.SetSize(header.width, header.height);
+ }
+
+ EXPECT_TRUE(seen_keyframe_);
+ ASSERT_FALSE(visible_size_.IsEmpty());
+ frame_cb_.Run(header.IsKeyframe(), visible_size_);
}
// static
@@ -1186,10 +1198,9 @@ class VEAClient : public VEAClientBase {
void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) override;
- void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) override;
+ void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) override;
private:
// Return the number of encoded frames per second.
@@ -1210,9 +1221,10 @@ class VEAClient : public VEAClientBase {
void FeedEncoderWithOutput(base::SharedMemory* shm);
// Called on finding a complete frame (with |keyframe| set to true for
- // keyframes) in the stream, to perform codec-independent, per-frame checks
- // and accounting. Returns false once we have collected all frames we needed.
- bool HandleEncodedFrame(bool keyframe);
+ // keyframes, |visible_size| for the visible size of encoded frame) in the
+ // stream, to perform codec-independent, per-frame checks and accounting.
+ // Returns false once we have collected all frames we needed.
+ bool HandleEncodedFrame(bool keyframe, const gfx::Size& visible_size);
// Ask the encoder to flush the frame.
void FlushEncoder();
@@ -1641,12 +1653,11 @@ void VEAClient::VerifyOutputTimestamp(base::TimeDelta timestamp) {
}
}
-void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) {
+void VEAClient::BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) {
DCHECK(thread_checker_.CalledOnValidThread());
- ASSERT_LE(payload_size, output_buffer_size_);
+ ASSERT_LE(metadata.payload_size_bytes, output_buffer_size_);
IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id);
ASSERT_NE(it, output_buffers_at_client_.end());
@@ -1658,24 +1669,27 @@ void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
// When flush is completed, VEA may return an extra empty buffer. Skip
// checking the buffer.
- if (verify_output_timestamp_ && payload_size > 0) {
- VerifyOutputTimestamp(timestamp);
+ if (verify_output_timestamp_ && metadata.payload_size_bytes > 0) {
+ VerifyOutputTimestamp(metadata.timestamp);
}
- encoded_stream_size_since_last_check_ += payload_size;
+ encoded_stream_size_since_last_check_ += metadata.payload_size_bytes;
const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory());
- if (payload_size > 0) {
+ if (metadata.payload_size_bytes > 0) {
if (stream_validator_) {
- stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size);
+ stream_validator_->ProcessStreamBuffer(stream_ptr,
+ metadata.payload_size_bytes);
} else {
- HandleEncodedFrame(key_frame);
+ // We don't know the visible size of without stream validator, just
+ // send the expected value to pass the check.
+ HandleEncodedFrame(metadata.key_frame, test_stream_->visible_size);
}
if (quality_validator_) {
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(shm->memory()),
- static_cast<int>(payload_size)));
+ static_cast<int>(metadata.payload_size_bytes)));
quality_validator_->AddDecodeBuffer(buffer);
}
// If flush is disabled, pretend flush is done when all frames are received.
@@ -1685,16 +1699,17 @@ void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
if (save_to_file_) {
if (IsVP8(test_stream_->requested_profile))
- WriteIvfFrameHeader(num_encoded_frames_ - 1, payload_size);
+ WriteIvfFrameHeader(num_encoded_frames_ - 1,
+ metadata.payload_size_bytes);
EXPECT_TRUE(base::AppendToFile(
base::FilePath::FromUTF8Unsafe(test_stream_->out_filename),
static_cast<char*>(shm->memory()),
- base::checked_cast<int>(payload_size)));
+ base::checked_cast<int>(metadata.payload_size_bytes)));
}
}
- EXPECT_EQ(key_frame, seen_keyframe_in_this_buffer_);
+ EXPECT_EQ(metadata.key_frame, seen_keyframe_in_this_buffer_);
seen_keyframe_in_this_buffer_ = false;
FeedEncoderWithOutput(shm);
@@ -1855,7 +1870,8 @@ void VEAClient::FeedEncoderWithOutput(base::SharedMemory* shm) {
encoder_->UseOutputBitstreamBuffer(bitstream_buffer);
}
-bool VEAClient::HandleEncodedFrame(bool keyframe) {
+bool VEAClient::HandleEncodedFrame(bool keyframe,
+ const gfx::Size& visible_size) {
DCHECK(thread_checker_.CalledOnValidThread());
// This would be a bug in the test, which should not ignore false
// return value from this method.
@@ -1890,6 +1906,7 @@ bool VEAClient::HandleEncodedFrame(bool keyframe) {
}
seen_keyframe_in_this_buffer_ = true;
}
+ EXPECT_EQ(test_stream_->visible_size, visible_size);
if (num_keyframes_requested_ > 0)
EXPECT_LE(num_encoded_frames_, next_keyframe_at_ + kMaxKeyframeDelay);
@@ -2173,15 +2190,14 @@ class VEANoInputClient : public SimpleVEAClientBase {
void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) override;
- void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) override;
+ void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) override;
private:
// The timer used to monitor the encoder doesn't return an output buffer in
// a period of time.
- std::unique_ptr<base::Timer> timer_;
+ std::unique_ptr<base::OneShotTimer> timer_;
};
VEANoInputClient::VEANoInputClient(ClientStateNotification<ClientState>* note)
@@ -2202,18 +2218,15 @@ void VEANoInputClient::RequireBitstreamBuffers(
output_size);
// Timer is used to make sure there is no output frame in 100ms.
- timer_.reset(
- new base::Timer(FROM_HERE, base::TimeDelta::FromMilliseconds(100),
- base::BindRepeating(&VEANoInputClient::SetState,
- base::Unretained(this), CS_FINISHED),
- false));
- timer_->Reset();
-}
-
-void VEANoInputClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) {
+ timer_.reset(new base::OneShotTimer());
+ timer_->Start(FROM_HERE, base::TimeDelta::FromMilliseconds(100),
+ base::Bind(&VEANoInputClient::SetState, base::Unretained(this),
+ CS_FINISHED));
+}
+
+void VEANoInputClient::BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) {
DCHECK(thread_checker_.CalledOnValidThread());
SetState(CS_ERROR);
}
@@ -2231,10 +2244,9 @@ class VEACacheLineUnalignedInputClient : public SimpleVEAClientBase {
void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) override;
- void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) override;
+ void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) override;
private:
// Feed the encoder with one input frame.
@@ -2259,9 +2271,7 @@ void VEACacheLineUnalignedInputClient::RequireBitstreamBuffers(
void VEACacheLineUnalignedInputClient::BitstreamBufferReady(
int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) {
+ const media::BitstreamBufferMetadata& metadata) {
DCHECK(thread_checker_.CalledOnValidThread());
// It's enough to encode just one frame. If plane size is not aligned,
// VideoEncodeAccelerator::Encode will fail.
diff --git a/chromium/media/gpu/vp8_decoder.cc b/chromium/media/gpu/vp8_decoder.cc
index 73800ce1ecc..33a181d0d4e 100644
--- a/chromium/media/gpu/vp8_decoder.cc
+++ b/chromium/media/gpu/vp8_decoder.cc
@@ -71,27 +71,44 @@ VP8Decoder::DecodeResult VP8Decoder::Decode() {
}
}
+ // The |stream_id_|s are expected to be monotonically increasing, and we've
+ // lost (at least) a frame if this condition doesn't uphold.
+ const bool have_skipped_frame = last_decoded_stream_id_ + 1 != stream_id_ &&
+ last_decoded_stream_id_ != kInvalidId;
if (curr_frame_hdr_->IsKeyframe()) {
- gfx::Size new_pic_size(curr_frame_hdr_->width, curr_frame_hdr_->height);
- if (new_pic_size.IsEmpty())
+ const gfx::Size new_picture_size(curr_frame_hdr_->width,
+ curr_frame_hdr_->height);
+ if (new_picture_size.IsEmpty())
return kDecodeError;
- if (new_pic_size != pic_size_) {
- DVLOG(2) << "New resolution: " << new_pic_size.ToString();
- pic_size_ = new_pic_size;
+ if (new_picture_size != pic_size_) {
+ DVLOG(2) << "New resolution: " << new_picture_size.ToString();
+ pic_size_ = new_picture_size;
ref_frames_.Clear();
+ last_decoded_stream_id_ = stream_id_;
+ size_change_failure_counter_ = 0;
return kAllocateNewSurfaces;
}
state_ = kDecoding;
- } else {
- if (state_ != kDecoding) {
- // Need a resume point.
- curr_frame_hdr_ = nullptr;
- return kRanOutOfStreamData;
+ } else if (state_ != kDecoding || have_skipped_frame) {
+ // Only trust the next frame. Otherwise, new keyframe might be missed, so
+ // |pic_size_| might be stale.
+ // TODO(dshwang): if rtc decoder can know the size of inter frame, change
+ // this condition to check if new keyframe is missed.
+ // https://crbug.com/832545
+ DVLOG(4) << "Drop the frame because the size maybe stale.";
+ if (have_skipped_frame &&
+ ++size_change_failure_counter_ > kVPxMaxNumOfSizeChangeFailures) {
+ state_ = kError;
+ return kDecodeError;
}
+
+ // Need a resume point.
+ curr_frame_hdr_ = nullptr;
+ return kRanOutOfStreamData;
}
scoped_refptr<VP8Picture> pic = accelerator_->CreateVP8Picture();
@@ -103,6 +120,8 @@ VP8Decoder::DecodeResult VP8Decoder::Decode() {
return kDecodeError;
}
+ last_decoded_stream_id_ = stream_id_;
+ size_change_failure_counter_ = 0;
return kRanOutOfStreamData;
}
diff --git a/chromium/media/gpu/vp8_decoder.h b/chromium/media/gpu/vp8_decoder.h
index fd3de24040e..f08a8126cf1 100644
--- a/chromium/media/gpu/vp8_decoder.h
+++ b/chromium/media/gpu/vp8_decoder.h
@@ -91,7 +91,10 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
Vp8ReferenceFrameVector ref_frames_;
// Current stream buffer id; to be assigned to pictures decoded from it.
- int32_t stream_id_ = -1;
+ static constexpr int32_t kInvalidId = -1;
+ int32_t stream_id_ = kInvalidId;
+ int32_t last_decoded_stream_id_ = kInvalidId;
+ size_t size_change_failure_counter_ = 0;
const uint8_t* curr_frame_start_;
size_t frame_size_;
diff --git a/chromium/media/gpu/vp8_decoder_unittest.cc b/chromium/media/gpu/vp8_decoder_unittest.cc
new file mode 100644
index 00000000000..deee372b6fb
--- /dev/null
+++ b/chromium/media/gpu/vp8_decoder_unittest.cc
@@ -0,0 +1,248 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include <string>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "media/base/test_data_util.h"
+#include "media/gpu/vp8_decoder.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::InSequence;
+using ::testing::Invoke;
+using ::testing::Mock;
+using ::testing::Return;
+
+namespace media {
+namespace {
+
+const std::string kNullFrame = "";
+const std::string kIFrame = "vp8-I-frame-320x240";
+const std::string kPFrame = "vp8-P-frame-320x240";
+const std::string kCorruptFrame = "vp8-corrupt-I-frame";
+constexpr gfx::Size kVideoSize(320, 240);
+constexpr size_t kRequiredNumOfPictures = 9u;
+
+class MockVP8Accelerator : public VP8Decoder::VP8Accelerator {
+ public:
+ MockVP8Accelerator() = default;
+
+ MOCK_METHOD0(CreateVP8Picture, scoped_refptr<VP8Picture>());
+ MOCK_METHOD2(SubmitDecode,
+ bool(scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames));
+ MOCK_METHOD1(OutputPicture, bool(const scoped_refptr<VP8Picture>& pic));
+};
+
+// Test VP8Decoder by feeding different VP8 frame sequences and making sure it
+// behaves as expected.
+class VP8DecoderTest : public ::testing::Test {
+ public:
+ VP8DecoderTest() = default;
+
+ void SetUp() override;
+
+ AcceleratedVideoDecoder::DecodeResult Decode(std::string input_frame_file);
+
+ protected:
+ void SkipFrame() { bitstream_id_++; }
+ void CompleteToDecodeFirstIFrame();
+
+ std::unique_ptr<VP8Decoder> decoder_;
+ MockVP8Accelerator* accelerator_ = nullptr;
+
+ private:
+ void DecodeFirstIFrame();
+
+ int32_t bitstream_id_ = 0;
+};
+
+void VP8DecoderTest::SetUp() {
+ auto mock_accelerator = std::make_unique<MockVP8Accelerator>();
+ accelerator_ = mock_accelerator.get();
+ decoder_.reset(new VP8Decoder(std::move(mock_accelerator)));
+
+ // Sets default behaviors for mock methods for convenience.
+ ON_CALL(*accelerator_, CreateVP8Picture())
+ .WillByDefault(Return(new VP8Picture()));
+ ON_CALL(*accelerator_, SubmitDecode(_, _)).WillByDefault(Return(true));
+ ON_CALL(*accelerator_, OutputPicture(_)).WillByDefault(Return(true));
+
+ DecodeFirstIFrame();
+}
+
+void VP8DecoderTest::DecodeFirstIFrame() {
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kNullFrame));
+ ASSERT_EQ(AcceleratedVideoDecoder::kAllocateNewSurfaces, Decode(kIFrame));
+ EXPECT_EQ(kVideoSize, decoder_->GetPicSize());
+ EXPECT_LE(kRequiredNumOfPictures, decoder_->GetRequiredNumOfPictures());
+}
+
+// DecodeFirstIFrame() allocates new surfaces so VP8Decoder::Decode() must be
+// called again to complete to decode the first frame.
+void VP8DecoderTest::CompleteToDecodeFirstIFrame() {
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kNullFrame));
+}
+
+AcceleratedVideoDecoder::DecodeResult VP8DecoderTest::Decode(
+ std::string input_frame_file) {
+ std::string bitstream;
+ if (!input_frame_file.empty()) {
+ auto input_file = GetTestDataFilePath(input_frame_file);
+ EXPECT_TRUE(base::ReadFileToString(input_file, &bitstream));
+ decoder_->SetStream(bitstream_id_++,
+ reinterpret_cast<const uint8_t*>(bitstream.data()),
+ bitstream.size());
+ }
+
+ return decoder_->Decode();
+}
+
+// Test Cases
+
+TEST_F(VP8DecoderTest, DecodeSingleFrame) {
+ CompleteToDecodeFirstIFrame();
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, FailCreatePicture) {
+ EXPECT_CALL(*accelerator_, CreateVP8Picture()).WillOnce(Return(nullptr));
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfSurfaces, Decode(kNullFrame));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, DecodeCorruptFrame) {
+ CompleteToDecodeFirstIFrame();
+ ASSERT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode(kCorruptFrame));
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, DecodeIAndPFrames) {
+ CompleteToDecodeFirstIFrame();
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, DecodeIandMultiplePFrames) {
+ CompleteToDecodeFirstIFrame();
+
+ for (size_t i = 0; i < 5; i++) {
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+ }
+
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, DecodeMultipleIAndPFrames) {
+ CompleteToDecodeFirstIFrame();
+
+ for (size_t i = 0; i < 10; i++) {
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData,
+ Decode((i % 3) == 0 ? kIFrame : kPFrame));
+ }
+
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+TEST_F(VP8DecoderTest, HaveSkippedFrames) {
+ CompleteToDecodeFirstIFrame();
+
+ SkipFrame();
+ for (size_t i = 0; i < 5; i++) {
+ // VP8Decoder::Decode() gives up decoding it and returns early.
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+ }
+
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+// Verify that |decoder_| returns kDecodeError if too many kPFrames are received
+// while expecting a kIFrame.
+TEST_F(VP8DecoderTest, HaveSkippedFramesAtMaxNumOfSizeChangeFailures) {
+ CompleteToDecodeFirstIFrame();
+
+ SkipFrame();
+ for (size_t i = 0;
+ i < AcceleratedVideoDecoder::kVPxMaxNumOfSizeChangeFailures; i++) {
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode(kPFrame));
+
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+// Verify that new kIFrame recovers |decoder_| to decode the frame when the
+// previous I frame is missing.
+TEST_F(VP8DecoderTest, RecoverFromSkippedFrames) {
+ CompleteToDecodeFirstIFrame();
+
+ SkipFrame();
+ for (size_t i = 0; i < 5; i++)
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+
+ // The new I frame recovers to decode it correctly.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kIFrame));
+
+ for (size_t i = 0; i < 5; i++) {
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateVP8Picture());
+ EXPECT_CALL(*accelerator_, SubmitDecode(_, _));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kPFrame));
+ }
+
+ ASSERT_TRUE(Mock::VerifyAndClearExpectations(accelerator_));
+ ASSERT_TRUE(decoder_->Flush());
+}
+
+} // namespace
+} // namespace media
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index 83a6041cbeb..8909156d98b 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -127,9 +127,15 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
if (!curr_frame_hdr_->IsKeyframe()) {
// TODO(posciak): This is doable, but requires a few modifications to
// VDA implementations to allow multiple picture buffer sets in flight.
+ // http://crbug.com/832264
DVLOG(1) << "Resolution change currently supported for keyframes only";
- SetError();
- return kDecodeError;
+ if (++size_change_failure_counter_ > kVPxMaxNumOfSizeChangeFailures) {
+ SetError();
+ return kDecodeError;
+ }
+
+ curr_frame_hdr_.reset();
+ return kRanOutOfStreamData;
}
// TODO(posciak): This requires us to be on a keyframe (see above) and is
@@ -140,6 +146,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
ref_frame = nullptr;
pic_size_ = new_pic_size;
+ size_change_failure_counter_ = 0;
return kAllocateNewSurfaces;
}
@@ -160,7 +167,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
pic->set_visible_rect(new_render_rect);
pic->set_bitstream_id(stream_id_);
- pic->frame_hdr.reset(curr_frame_hdr_.release());
+ pic->frame_hdr = std::move(curr_frame_hdr_);
if (!DecodeAndOutputPicture(pic)) {
SetError();
diff --git a/chromium/media/gpu/vp9_decoder.h b/chromium/media/gpu/vp9_decoder.h
index 5190435a347..de746baa166 100644
--- a/chromium/media/gpu/vp9_decoder.h
+++ b/chromium/media/gpu/vp9_decoder.h
@@ -146,6 +146,8 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
// Current coded resolution.
gfx::Size pic_size_;
+ size_t size_change_failure_counter_ = 0;
+
const std::unique_ptr<VP9Accelerator> accelerator_;
Vp9Parser parser_;
diff --git a/chromium/media/gpu/vt_video_decode_accelerator_mac.cc b/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
index db101834837..68dbfafff52 100644
--- a/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
+++ b/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
@@ -29,7 +29,6 @@
#include "base/trace_event/process_memory_dump.h"
#include "base/version.h"
#include "media/base/limits.h"
-#include "media/gpu/shared_memory_region.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_image_io_surface.h"
diff --git a/chromium/media/gpu/vt_video_encode_accelerator_mac.cc b/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
index 29f733e3ba9..171c923bd8f 100644
--- a/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
+++ b/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
@@ -440,8 +440,9 @@ void VTVideoEncodeAccelerator::ReturnBitstreamBuffer(
DVLOG(2) << " frame dropped";
client_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, 0,
- false, encode_output->capture_timestamp));
+ base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id,
+ BitstreamBufferMetadata(0, false,
+ encode_output->capture_timestamp)));
return;
}
@@ -465,7 +466,8 @@ void VTVideoEncodeAccelerator::ReturnBitstreamBuffer(
client_task_runner_->PostTask(
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id,
- used_buffer_size, keyframe, encode_output->capture_timestamp));
+ BitstreamBufferMetadata(used_buffer_size, keyframe,
+ encode_output->capture_timestamp)));
}
bool VTVideoEncodeAccelerator::ResetCompressionSession() {
diff --git a/chromium/media/gpu/windows/OWNERS b/chromium/media/gpu/windows/OWNERS
new file mode 100644
index 00000000000..badaf4dea92
--- /dev/null
+++ b/chromium/media/gpu/windows/OWNERS
@@ -0,0 +1,2 @@
+# For encoder files.
+per-file *encode*=emircan@chromium.org
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
index bf92dd0e9b2..8b84e5af15c 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
@@ -9,6 +9,10 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "base/power_monitor/power_monitor.h"
+#include "base/power_monitor/power_observer.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/win/object_watcher.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/cdm_proxy_context.h"
@@ -18,6 +22,39 @@ namespace media {
namespace {
+// Checks whether there is a hardware protected key exhange method.
+// https://msdn.microsoft.com/en-us/library/windows/desktop/dn894125(v=vs.85).aspx
+// The key exhange capabilities are checked using these.
+// https://msdn.microsoft.com/en-us/library/windows/desktop/hh447640%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
+// https://msdn.microsoft.com/en-us/library/windows/desktop/hh447782(v=vs.85).aspx
+bool CanDoHardwareProtectedKeyExchange(
+ Microsoft::WRL::ComPtr<ID3D11VideoDevice> video_device,
+ const GUID& crypto_type) {
+ D3D11_VIDEO_CONTENT_PROTECTION_CAPS caps = {};
+ HRESULT hresult = video_device->GetContentProtectionCaps(
+ &crypto_type, &D3D11_DECODER_PROFILE_H264_VLD_NOFGT, &caps);
+ if (FAILED(hresult)) {
+ DVLOG(1) << "Failed to get content protection caps.";
+ return false;
+ }
+
+ for (uint32_t i = 0; i < caps.KeyExchangeTypeCount; ++i) {
+ GUID kex_guid = {};
+ hresult = video_device->CheckCryptoKeyExchange(
+ &crypto_type, &D3D11_DECODER_PROFILE_H264_VLD_NOFGT, i, &kex_guid);
+ if (FAILED(hresult)) {
+ DVLOG(1) << "Failed to get key exchange GUID";
+ return false;
+ }
+
+ if (kex_guid == D3D11_KEY_EXCHANGE_HW_PROTECTION)
+ return true;
+ }
+
+ DVLOG(1) << "Hardware key exchange is not supported.";
+ return false;
+}
+
class D3D11CdmProxyContext : public CdmProxyContext {
public:
explicit D3D11CdmProxyContext(const GUID& key_info_guid)
@@ -41,6 +78,9 @@ class D3D11CdmProxyContext : public CdmProxyContext {
key_info_map_.erase(key_id_str);
}
+ // Removes all keys from the context.
+ void RemoveAllKeys() { key_info_map_.clear(); }
+
// CdmProxyContext implementation.
base::Optional<D3D11DecryptContext> GetD3D11DecryptContext(
const std::string& key_id) override {
@@ -85,6 +125,62 @@ class D3D11CdmProxyContext : public CdmProxyContext {
} // namespace
+// Watches for any content protection teardown events.
+// If the instance has been started for watching, the destructor will
+// automatically stop watching.
+class D3D11CdmProxy::HardwareEventWatcher
+ : public base::win::ObjectWatcher::Delegate,
+ public base::PowerObserver {
+ public:
+ ~HardwareEventWatcher() override;
+
+ // |teardown_callback| is called on the current sequence.
+ // Returns an instance if it starts watching for events, otherwise returns
+ // nullptr.
+ static std::unique_ptr<HardwareEventWatcher> Create(
+ ComPtr<ID3D11Device> device,
+ base::RepeatingClosure teardown_callback);
+
+ private:
+ HardwareEventWatcher(ComPtr<ID3D11Device> device,
+ base::RepeatingClosure teardown_callback);
+
+ // Start watching for events.
+ bool StartWatching();
+
+ // Registers for hardware content protection teardown events.
+ // Return true on success.
+ bool RegisterHardwareContentProtectionTeardown(ComPtr<ID3D11Device> device);
+
+ // Regiesters for power events, specifically power suspend event.
+ // Returns true on success.
+ bool RegisterPowerEvents();
+
+ // base::win::ObjectWatcher::Delegate implementation.
+ void OnObjectSignaled(HANDLE object) override;
+
+ // base::PowerObserver implementation. Other power events are not relevant to
+ // this class.
+ void OnSuspend() override;
+
+ // Stops watching for events. Good for clean up.
+ void StopWatching();
+
+ // IDXGIAdapter3::RegisterHardwareContentProtectionTeardownStatusEvent
+ // allows watching for teardown events. It is queried thru the following
+ // Devices.
+ ComPtr<ID3D11Device> device_;
+ ComPtr<IDXGIDevice2> dxgi_device_;
+ ComPtr<IDXGIAdapter3> dxgi_adapter_;
+
+ // Cookie, event, and watcher used for watching events from
+ // RegisterHardwareContentProtectionTeardownStatusEvent.
+ DWORD teardown_event_cookie_ = 0u;
+ base::WaitableEvent content_protection_teardown_event_;
+ base::RepeatingClosure teardown_callback_;
+ base::win::ObjectWatcher teardown_status_watcher_;
+};
+
class D3D11CdmContext : public CdmContext {
public:
explicit D3D11CdmContext(const GUID& key_info_guid)
@@ -103,6 +199,9 @@ class D3D11CdmContext : public CdmContext {
cdm_proxy_context_.RemoveKey(crypto_session, key_id);
}
+ // Removes all keys from the context.
+ void RemoveAllKeys() { cdm_proxy_context_.RemoveAllKeys(); }
+
base::WeakPtr<D3D11CdmContext> GetWeakPtr() {
return weak_factory_.GetWeakPtr();
}
@@ -140,7 +239,8 @@ D3D11CdmProxy::D3D11CdmProxy(const GUID& crypto_type,
protocol_(protocol),
function_id_map_(function_id_map),
cdm_context_(std::make_unique<D3D11CdmContext>(crypto_type)),
- create_device_func_(base::BindRepeating(D3D11CreateDevice)) {}
+ create_device_func_(base::BindRepeating(D3D11CreateDevice)),
+ weak_factory_(this) {}
D3D11CdmProxy::~D3D11CdmProxy() {}
@@ -176,6 +276,19 @@ void D3D11CdmProxy::Initialize(Client* client, InitializeCB init_cb) {
return;
}
+ // TODO(rkuroiwa): This should be registered iff
+ // D3D11_CONTENT_PROTECTION_CAPS_HARDWARE_TEARDOWN is set in the capabilties.
+ hardware_event_watcher_ = HardwareEventWatcher::Create(
+ device_, base::BindRepeating(
+ &D3D11CdmProxy::NotifyHardwareContentProtectionTeardown,
+ weak_factory_.GetWeakPtr()));
+ if (!hardware_event_watcher_) {
+ DLOG(ERROR)
+ << "Failed to start waching for content protection teardown events.";
+ failed();
+ return;
+ }
+
hresult = device_.CopyTo(video_device_.GetAddressOf());
if (FAILED(hresult)) {
DLOG(ERROR) << "Failed to get ID3D11VideoDevice: " << hresult;
@@ -183,6 +296,12 @@ void D3D11CdmProxy::Initialize(Client* client, InitializeCB init_cb) {
return;
}
+ if (!CanDoHardwareProtectedKeyExchange(video_device_, crypto_type_)) {
+ DLOG(ERROR) << "Cannot do hardware proteted key exhange.";
+ failed();
+ return;
+ }
+
hresult = device_context_.CopyTo(video_context_.GetAddressOf());
if (FAILED(hresult)) {
DLOG(ERROR) << "Failed to get ID3D11VideoContext: " << hresult;
@@ -204,7 +323,7 @@ void D3D11CdmProxy::Initialize(Client* client, InitializeCB init_cb) {
return;
}
- Microsoft::WRL::ComPtr<ID3D11CryptoSession> csme_crypto_session;
+ ComPtr<ID3D11CryptoSession> csme_crypto_session;
hresult = video_device_->CreateCryptoSession(
&crypto_type_, &D3D11_DECODER_PROFILE_H264_VLD_NOFGT,
&D3D11_KEY_EXCHANGE_HW_PROTECTION, csme_crypto_session.GetAddressOf());
@@ -259,8 +378,7 @@ void D3D11CdmProxy::Process(Function function,
return;
}
- Microsoft::WRL::ComPtr<ID3D11CryptoSession>& crypto_session =
- crypto_session_it->second;
+ ComPtr<ID3D11CryptoSession>& crypto_session = crypto_session_it->second;
D3D11_KEY_EXCHANGE_HW_PROTECTION_DATA key_exchange_data = {};
key_exchange_data.HWProtectionFunctionID = function_id_it->second;
@@ -302,10 +420,10 @@ void D3D11CdmProxy::Process(Function function,
return;
}
- std::vector<uint8_t> output_vec;
- output_vec.reserve(expected_output_data_size);
- memcpy(output_vec.data(), output_data->pbOutput, expected_output_data_size);
- std::move(process_cb).Run(Status::kOk, output_vec);
+ std::move(process_cb)
+ .Run(Status::kOk, std::vector<uint8_t>(
+ output_data->pbOutput,
+ output_data->pbOutput + expected_output_data_size));
return;
}
@@ -324,7 +442,7 @@ void D3D11CdmProxy::CreateMediaCryptoSession(
return;
}
- Microsoft::WRL::ComPtr<ID3D11CryptoSession> media_crypto_session;
+ ComPtr<ID3D11CryptoSession> media_crypto_session;
HRESULT hresult = video_device_->CreateCryptoSession(
&crypto_type_, &D3D11_DECODER_PROFILE_H264_VLD_NOFGT, &crypto_type_,
media_crypto_session.GetAddressOf());
@@ -389,8 +507,112 @@ void D3D11CdmProxy::RemoveKey(uint32_t crypto_session_id,
cdm_context_->RemoveKey(crypto_session_it->second.Get(), key_id);
}
-void D3D11CdmProxy::SetCreateDeviceCallbackForTesting(CreateDeviceCB callback) {
+void D3D11CdmProxy::SetCreateDeviceCallbackForTesting(
+ D3D11CreateDeviceCB callback) {
create_device_func_ = std::move(callback);
}
+void D3D11CdmProxy::NotifyHardwareContentProtectionTeardown() {
+ cdm_context_->RemoveAllKeys();
+ if (client_)
+ client_->NotifyHardwareReset();
+}
+
+D3D11CdmProxy::HardwareEventWatcher::~HardwareEventWatcher() {
+ StopWatching();
+}
+
+std::unique_ptr<D3D11CdmProxy::HardwareEventWatcher>
+D3D11CdmProxy::HardwareEventWatcher::Create(
+ Microsoft::WRL::ComPtr<ID3D11Device> device,
+ base::RepeatingClosure teardown_callback) {
+ std::unique_ptr<HardwareEventWatcher> event_watcher = base::WrapUnique(
+ new HardwareEventWatcher(device, std::move(teardown_callback)));
+ if (!event_watcher->StartWatching())
+ return nullptr;
+ return event_watcher;
+}
+
+D3D11CdmProxy::HardwareEventWatcher::HardwareEventWatcher(
+ Microsoft::WRL::ComPtr<ID3D11Device> device,
+ base::RepeatingClosure teardown_callback)
+ : device_(device), teardown_callback_(std::move(teardown_callback)) {}
+
+bool D3D11CdmProxy::HardwareEventWatcher::StartWatching() {
+ if (!RegisterPowerEvents() ||
+ !RegisterHardwareContentProtectionTeardown(device_)) {
+ StopWatching();
+ return false;
+ }
+
+ return true;
+}
+
+bool D3D11CdmProxy::HardwareEventWatcher::
+ RegisterHardwareContentProtectionTeardown(ComPtr<ID3D11Device> device) {
+ device_ = device;
+ HRESULT hresult = device_.CopyTo(dxgi_device_.ReleaseAndGetAddressOf());
+ if (FAILED(hresult)) {
+ DVLOG(1) << "Failed to get dxgi device from device: "
+ << logging::SystemErrorCodeToString(hresult);
+ return false;
+ }
+
+ hresult = dxgi_device_->GetParent(
+ IID_PPV_ARGS(dxgi_adapter_.ReleaseAndGetAddressOf()));
+ if (FAILED(hresult)) {
+ DVLOG(1) << "Failed to get dxgi adapter from dxgi device: "
+ << logging::SystemErrorCodeToString(hresult);
+ return false;
+ }
+
+ if (!teardown_status_watcher_.StartWatchingOnce(
+ content_protection_teardown_event_.handle(), this)) {
+ DVLOG(1) << "Failed to watch tear down event.";
+ return false;
+ }
+
+ hresult = dxgi_adapter_->RegisterHardwareContentProtectionTeardownStatusEvent(
+ content_protection_teardown_event_.handle(), &teardown_event_cookie_);
+ if (FAILED(hresult)) {
+ DVLOG(1)
+ << "Failed to register for HardwareContentProtectionTeardownStatus: "
+ << logging::SystemErrorCodeToString(hresult);
+ return false;
+ }
+
+ return true;
+}
+
+bool D3D11CdmProxy::HardwareEventWatcher::RegisterPowerEvents() {
+ base::PowerMonitor* power_monitor = base::PowerMonitor::Get();
+ if (!power_monitor) {
+ DVLOG(1) << "Power monitor not available.";
+ return false;
+ }
+
+ power_monitor->AddObserver(this);
+ return true;
+}
+
+void D3D11CdmProxy::HardwareEventWatcher::OnObjectSignaled(HANDLE object) {
+ DCHECK_EQ(object, content_protection_teardown_event_.handle());
+ teardown_callback_.Run();
+}
+
+void D3D11CdmProxy::HardwareEventWatcher::OnSuspend() {
+ teardown_callback_.Run();
+}
+
+void D3D11CdmProxy::HardwareEventWatcher::StopWatching() {
+ if (dxgi_adapter_) {
+ dxgi_adapter_->UnregisterHardwareContentProtectionTeardownStatus(
+ teardown_event_cookie_);
+ }
+ teardown_status_watcher_.StopWatching();
+ base::PowerMonitor* power_monitor = base::PowerMonitor::Get();
+ if (power_monitor)
+ power_monitor->RemoveObserver(this);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy.h b/chromium/media/gpu/windows/d3d11_cdm_proxy.h
index ce444ed7820..eb24b14d759 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy.h
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy.h
@@ -8,13 +8,16 @@
#include "media/cdm/cdm_proxy.h"
#include <d3d11_1.h>
+#include <dxgi1_4.h>
#include <wrl/client.h>
#include <map>
#include <vector>
#include "base/callback.h"
+#include "base/memory/weak_ptr.h"
#include "media/gpu/media_gpu_export.h"
+#include "media/gpu/windows/d3d11_create_device_cb.h"
namespace media {
@@ -24,20 +27,6 @@ class D3D11CdmContext;
class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
public:
using FunctionIdMap = std::map<Function, uint32_t>;
- // The signature matches D3D11CreateDevice(). decltype(D3D11CreateDevice) does
- // not work because __attribute__((stdcall)) gets appended, and the template
- // instantiation fails.
- using CreateDeviceCB =
- base::RepeatingCallback<HRESULT(IDXGIAdapter*,
- D3D_DRIVER_TYPE,
- HMODULE,
- UINT,
- const D3D_FEATURE_LEVEL*,
- UINT,
- UINT,
- ID3D11Device**,
- D3D_FEATURE_LEVEL*,
- ID3D11DeviceContext**)>;
// |crypto_type| is the ID that is used to do crypto session operations. This
// includes creating a crypto session with
@@ -73,12 +62,16 @@ class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
void RemoveKey(uint32_t crypto_session_id,
const std::vector<uint8_t>& key_id) override;
- void SetCreateDeviceCallbackForTesting(CreateDeviceCB callback);
+ void SetCreateDeviceCallbackForTesting(D3D11CreateDeviceCB callback);
private:
template <typename T>
using ComPtr = Microsoft::WRL::ComPtr<T>;
+ class HardwareEventWatcher;
+
+ void NotifyHardwareContentProtectionTeardown();
+
const GUID crypto_type_;
const CdmProxy::Protocol protocol_;
const FunctionIdMap function_id_map_;
@@ -89,7 +82,7 @@ class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
// order to inject D3D11CreateDevice() function for testing, this member is
// required. The test will replace this with a function that returns a mock
// devices.
- CreateDeviceCB create_device_func_;
+ D3D11CreateDeviceCB create_device_func_;
// Counter for assigning IDs to crypto sessions.
uint32_t next_crypto_session_id_ = 1;
@@ -108,6 +101,8 @@ class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
ComPtr<ID3D11VideoContext> video_context_;
ComPtr<ID3D11VideoContext1> video_context1_;
+ std::unique_ptr<HardwareEventWatcher> hardware_event_watcher_;
+
// Crypto session ID -> actual crypto session.
std::map<uint32_t, ComPtr<ID3D11CryptoSession>> crypto_session_map_;
@@ -116,6 +111,8 @@ class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
UINT private_input_size_ = 0;
UINT private_output_size_ = 0;
+ base::WeakPtrFactory<D3D11CdmProxy> weak_factory_;
+
DISALLOW_COPY_AND_ASSIGN(D3D11CdmProxy);
};
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc b/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
index 4595e696545..aa51929f056 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
@@ -9,37 +9,51 @@
#include <initguid.h>
#include "base/bind.h"
+#include "base/power_monitor/power_monitor.h"
+#include "base/power_monitor/power_monitor_source.h"
+#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
#include "media/base/cdm_proxy_context.h"
#include "media/gpu/windows/d3d11_mocks.h"
#include "testing/gtest/include/gtest/gtest.h"
+using ::testing::_;
using ::testing::AllOf;
using ::testing::AtLeast;
+using ::testing::DoAll;
using ::testing::Invoke;
+using ::testing::Lt;
using ::testing::Ne;
using ::testing::Pointee;
using ::testing::Return;
using ::testing::SaveArg;
using ::testing::SetArgPointee;
using ::testing::WithArgs;
-using ::testing::_;
namespace media {
namespace {
-// Use this function to create a mock so that they are ref-counted correctly.
-template <typename Interface>
-Microsoft::WRL::ComPtr<Interface> CreateMock() {
- Interface* mock = new Interface();
- mock->AddRef();
- return mock;
-}
+// TODO(rkuroiwa): Although inheriting from different classes, there are several
+// mock CdmProxy clients already. They all have NotifyHardwareReset(), so share
+// a single mock class that inherits from all the CdmProxy client classes.
+class MockProxyClient : public CdmProxy::Client {
+ public:
+ MOCK_METHOD0(NotifyHardwareReset, void());
+};
+
+class MockPowerMonitorSource : public base::PowerMonitorSource {
+ public:
+ // Use this method to send a power suspend event.
+ void Suspend() { ProcessPowerEvent(SUSPEND_EVENT); }
+
+ MOCK_METHOD0(Shutdown, void());
+ MOCK_METHOD0(IsOnBatteryPowerImpl, bool());
+};
// The values doesn't matter as long as this is consistently used thruout the
// test.
-const CdmProxy::Protocol kTestProtocol =
- CdmProxy::Protocol::kIntelConvergedSecurityAndManageabilityEngine;
+const CdmProxy::Protocol kTestProtocol = CdmProxy::Protocol::kIntel;
const CdmProxy::Function kTestFunction =
CdmProxy::Function::kIntelNegotiateCryptoSessionKeyExchange;
const uint32_t kTestFunctionId = 123;
@@ -51,12 +65,6 @@ DEFINE_GUID(CRYPTO_TYPE_GUID,
} // namespace
-// Class for mocking D3D11CreateDevice() function.
-class D3D11CreateDeviceMock {
- public:
- MOCK_METHOD10(Create, D3D11CdmProxy::CreateDeviceCB::RunType);
-};
-
// Class for mocking the callbacks that get passed to the proxy methods.
class CallbackMock {
public:
@@ -72,58 +80,169 @@ class D3D11CdmProxyTest : public ::testing::Test {
std::map<CdmProxy::Function, uint32_t> function_id_map;
function_id_map[kTestFunction] = kTestFunctionId;
+ auto mock_power_monitor_source = std::make_unique<MockPowerMonitorSource>();
+ mock_power_monitor_source_ = mock_power_monitor_source.get();
+ power_monitor_ = std::make_unique<base::PowerMonitor>(
+ std::move(mock_power_monitor_source));
+
proxy_ = std::make_unique<D3D11CdmProxy>(CRYPTO_TYPE_GUID, kTestProtocol,
function_id_map);
- device_mock_ = CreateMock<D3D11DeviceMock>();
- video_device_mock_ = CreateMock<D3D11VideoDeviceMock>();
- video_device1_mock_ = CreateMock<D3D11VideoDevice1Mock>();
- crypto_session_mock_ = CreateMock<D3D11CryptoSessionMock>();
- device_context_mock_ = CreateMock<D3D11DeviceContextMock>();
- video_context_mock_ = CreateMock<D3D11VideoContextMock>();
- video_context1_mock_ = CreateMock<D3D11VideoContext1Mock>();
+ device_mock_ = CreateD3D11Mock<D3D11DeviceMock>();
+ video_device_mock_ = CreateD3D11Mock<D3D11VideoDeviceMock>();
+ video_device1_mock_ = CreateD3D11Mock<D3D11VideoDevice1Mock>();
+ crypto_session_mock_ = CreateD3D11Mock<D3D11CryptoSessionMock>();
+ device_context_mock_ = CreateD3D11Mock<D3D11DeviceContextMock>();
+ video_context_mock_ = CreateD3D11Mock<D3D11VideoContextMock>();
+ video_context1_mock_ = CreateD3D11Mock<D3D11VideoContext1Mock>();
+ dxgi_device_ = CreateD3D11Mock<DXGIDevice2Mock>();
+ dxgi_adapter_ = CreateD3D11Mock<DXGIAdapter3Mock>();
+
+ // These flags are a reasonable subset of flags to get HARDWARE protected
+ // playback.
+ content_protection_caps_.Caps =
+ D3D11_CONTENT_PROTECTION_CAPS_HARDWARE |
+ D3D11_CONTENT_PROTECTION_CAPS_HARDWARE_PROTECT_UNCOMPRESSED |
+ D3D11_CONTENT_PROTECTION_CAPS_HARDWARE_PROTECTED_MEMORY_PAGEABLE |
+ D3D11_CONTENT_PROTECTION_CAPS_HARDWARE_TEARDOWN |
+ D3D11_CONTENT_PROTECTION_CAPS_HARDWARE_DRM_COMMUNICATION;
+ // 1 for the mock behavior below for CheckCryptoKeyExchange().
+ content_protection_caps_.KeyExchangeTypeCount = 1;
+ // This is arbitrary but 1 is reasonable, meaning doesn't need to be
+ // aligned.
+ content_protection_caps_.BlockAlignmentSize = 1;
+ // This value is arbitrary.
+ content_protection_caps_.ProtectedMemorySize = 10000000;
+
+ OnCallsForInitialize();
proxy_->SetCreateDeviceCallbackForTesting(
base::BindRepeating(&D3D11CreateDeviceMock::Create,
base::Unretained(&create_device_mock_)));
}
+
+ // Sets up ON_CALLs for the mock objects. These can be overriden with
+ // EXPECT_CALLs.
+ // |content_protection_caps_| should be set.
+ void OnCallsForInitialize() {
+ ON_CALL(create_device_mock_,
+ Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<7>(device_mock_.Get()),
+ AddRefAndSetArgPointee<9>(device_context_mock_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(*device_mock_.Get(), QueryInterface(IID_ID3D11VideoDevice, _))
+ .WillByDefault(DoAll(
+ AddRefAndSetArgPointee<1>(video_device_mock_.Get()), Return(S_OK)));
+
+ ON_CALL(*device_mock_.Get(), QueryInterface(IID_ID3D11VideoDevice1, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<1>(video_device1_mock_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(*device_mock_.Get(), QueryInterface(IID_IDXGIDevice2, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<1>(dxgi_device_.Get()), Return(S_OK)));
+
+ ON_CALL(*dxgi_device_.Get(), GetParent(IID_IDXGIAdapter3, _))
+ .WillByDefault(DoAll(AddRefAndSetArgPointee<1>(dxgi_adapter_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(*dxgi_adapter_.Get(),
+ RegisterHardwareContentProtectionTeardownStatusEvent(_, _))
+ .WillByDefault(DoAll(SaveArg<0>(&teardown_event_), Return(S_OK)));
+
+ ON_CALL(*device_context_mock_.Get(),
+ QueryInterface(IID_ID3D11VideoContext, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<1>(video_context_mock_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(*device_context_mock_.Get(),
+ QueryInterface(IID_ID3D11VideoContext1, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<1>(video_context1_mock_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(*video_device_mock_.Get(),
+ CreateCryptoSession(Pointee(CRYPTO_TYPE_GUID), _,
+ Pointee(D3D11_KEY_EXCHANGE_HW_PROTECTION), _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<3>(crypto_session_mock_.Get()),
+ Return(S_OK)));
+
+ ON_CALL(
+ *video_device1_mock_.Get(),
+ GetCryptoSessionPrivateDataSize(Pointee(CRYPTO_TYPE_GUID), _, _, _, _))
+ .WillByDefault(DoAll(SetArgPointee<3>(kPrivateInputSize),
+ SetArgPointee<4>(kPrivateOutputSize),
+ Return(S_OK)));
+
+ ON_CALL(*video_device_mock_.Get(), GetContentProtectionCaps(_, _, _))
+ .WillByDefault(
+ DoAll(SetArgPointee<2>(content_protection_caps_), Return(S_OK)));
+
+ ON_CALL(*video_device_mock_.Get(), CheckCryptoKeyExchange(_, _, Lt(1u), _))
+ .WillByDefault(DoAll(SetArgPointee<3>(D3D11_KEY_EXCHANGE_HW_PROTECTION),
+ Return(S_OK)));
+ }
+
// Helper method to do Initialize(). Only useful if the test doesn't require
// access to the mocks later.
- void Initialize(CdmProxy::InitializeCB callback) {
+ void Initialize(CdmProxy::Client* client, CdmProxy::InitializeCB callback) {
EXPECT_CALL(create_device_mock_,
Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
- .WillOnce(DoAll(SetArgPointee<7>(device_mock_.Get()),
- SetArgPointee<9>(device_context_mock_.Get()),
+ .WillOnce(DoAll(AddRefAndSetArgPointee<7>(device_mock_.Get()),
+ AddRefAndSetArgPointee<9>(device_context_mock_.Get()),
Return(S_OK)));
EXPECT_CALL(*device_mock_.Get(), QueryInterface(IID_ID3D11VideoDevice, _))
.Times(AtLeast(1))
+ .WillRepeatedly(DoAll(
+ AddRefAndSetArgPointee<1>(video_device_mock_.Get()), Return(S_OK)));
+
+ EXPECT_CALL(*device_mock_.Get(), QueryInterface(IID_IDXGIDevice2, _))
+ .Times(AtLeast(1))
.WillRepeatedly(
- DoAll(SetArgPointee<1>(video_device_mock_.Get()), Return(S_OK)));
+ DoAll(AddRefAndSetArgPointee<1>(dxgi_device_.Get()), Return(S_OK)));
+
+ EXPECT_CALL(*dxgi_device_.Get(), GetParent(IID_IDXGIAdapter3, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(DoAll(AddRefAndSetArgPointee<1>(dxgi_adapter_.Get()),
+ Return(S_OK)));
+
+ EXPECT_CALL(*dxgi_adapter_.Get(),
+ RegisterHardwareContentProtectionTeardownStatusEvent(_, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(DoAll(SaveArg<0>(&teardown_event_), Return(S_OK)));
EXPECT_CALL(*device_mock_.Get(), QueryInterface(IID_ID3D11VideoDevice1, _))
.Times(AtLeast(1))
.WillRepeatedly(
- DoAll(SetArgPointee<1>(video_device1_mock_.Get()), Return(S_OK)));
+ DoAll(AddRefAndSetArgPointee<1>(video_device1_mock_.Get()),
+ Return(S_OK)));
EXPECT_CALL(*device_context_mock_.Get(),
QueryInterface(IID_ID3D11VideoContext, _))
.Times(AtLeast(1))
.WillRepeatedly(
- DoAll(SetArgPointee<1>(video_context_mock_.Get()), Return(S_OK)));
+ DoAll(AddRefAndSetArgPointee<1>(video_context_mock_.Get()),
+ Return(S_OK)));
EXPECT_CALL(*device_context_mock_.Get(),
QueryInterface(IID_ID3D11VideoContext1, _))
.Times(AtLeast(1))
.WillRepeatedly(
- DoAll(SetArgPointee<1>(video_context1_mock_.Get()), Return(S_OK)));
+ DoAll(AddRefAndSetArgPointee<1>(video_context1_mock_.Get()),
+ Return(S_OK)));
EXPECT_CALL(
*video_device_mock_.Get(),
CreateCryptoSession(Pointee(CRYPTO_TYPE_GUID), _,
Pointee(D3D11_KEY_EXCHANGE_HW_PROTECTION), _))
- .WillOnce(
- DoAll(SetArgPointee<3>(crypto_session_mock_.Get()), Return(S_OK)));
+ .WillOnce(DoAll(AddRefAndSetArgPointee<3>(crypto_session_mock_.Get()),
+ Return(S_OK)));
EXPECT_CALL(
*video_device1_mock_.Get(),
@@ -131,7 +250,7 @@ class D3D11CdmProxyTest : public ::testing::Test {
.WillOnce(DoAll(SetArgPointee<3>(kPrivateInputSize),
SetArgPointee<4>(kPrivateOutputSize), Return(S_OK)));
- proxy_->Initialize(nullptr, std::move(callback));
+ proxy_->Initialize(client, std::move(callback));
::testing::Mock::VerifyAndClearExpectations(device_mock_.Get());
::testing::Mock::VerifyAndClearExpectations(video_device_mock_.Get());
::testing::Mock::VerifyAndClearExpectations(video_device1_mock_.Get());
@@ -142,6 +261,10 @@ class D3D11CdmProxyTest : public ::testing::Test {
}
std::unique_ptr<D3D11CdmProxy> proxy_;
+ std::unique_ptr<base::PowerMonitor> power_monitor_;
+ // Owned by power_monitor_. Use this to simulate a power-suspend.
+ MockPowerMonitorSource* mock_power_monitor_source_;
+
D3D11CreateDeviceMock create_device_mock_;
CallbackMock callback_mock_;
@@ -152,11 +275,22 @@ class D3D11CdmProxyTest : public ::testing::Test {
Microsoft::WRL::ComPtr<D3D11DeviceContextMock> device_context_mock_;
Microsoft::WRL::ComPtr<D3D11VideoContextMock> video_context_mock_;
Microsoft::WRL::ComPtr<D3D11VideoContext1Mock> video_context1_mock_;
+ Microsoft::WRL::ComPtr<DXGIDevice2Mock> dxgi_device_;
+ Microsoft::WRL::ComPtr<DXGIAdapter3Mock> dxgi_adapter_;
+
+ D3D11_VIDEO_CONTENT_PROTECTION_CAPS content_protection_caps_ = {};
+
+ // Event captured in Initialize(). Used in tests to notify hardware content
+ // protection teardown.
+ HANDLE teardown_event_;
// These size values are arbitrary. Used for mocking
// GetCryptoSessionPrivateDataSize().
const UINT kPrivateInputSize = 10;
const UINT kPrivateOutputSize = 40;
+
+ // ObjectWatcher uses SequencedTaskRunnerHandle.
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
};
// Verifies that if device creation fails, then the call fails.
@@ -173,8 +307,85 @@ TEST_F(D3D11CdmProxyTest, FailedToCreateDevice) {
// Initialize() success case.
TEST_F(D3D11CdmProxyTest, Initialize) {
EXPECT_CALL(callback_mock_, InitializeCallback(CdmProxy::Status::kOk, _, _));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
+}
+
+// Hardware content protection teardown is notified to the proxy.
+// Verify that the client is notified.
+TEST_F(D3D11CdmProxyTest, HardwareContentProtectionTeardown) {
+ base::RunLoop run_loop;
+ MockProxyClient client;
+ EXPECT_CALL(client, NotifyHardwareReset()).WillOnce(Invoke([&run_loop]() {
+ run_loop.Quit();
+ }));
+
+ EXPECT_CALL(callback_mock_, InitializeCallback(CdmProxy::Status::kOk, _, _));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(&client, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
+ SetEvent(teardown_event_);
+ run_loop.Run();
+}
+
+// Verify that failing to register to hardware content protection teardown
+// status event results in initializaion failure.
+TEST_F(D3D11CdmProxyTest, FailedToRegisterForContentProtectionTeardown) {
+ EXPECT_CALL(callback_mock_,
+ InitializeCallback(CdmProxy::Status::kFail, _, _));
+
+ EXPECT_CALL(*dxgi_adapter_.Get(),
+ RegisterHardwareContentProtectionTeardownStatusEvent(_, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Return(E_FAIL));
+
+ proxy_->Initialize(nullptr,
+ base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_)));
+}
+
+// Verify that the client is notified on power suspend.
+TEST_F(D3D11CdmProxyTest, PowerSuspend) {
+ base::RunLoop run_loop;
+ MockProxyClient client;
+ EXPECT_CALL(client, NotifyHardwareReset()).WillOnce(Invoke([&run_loop]() {
+ run_loop.Quit();
+ }));
+
+ EXPECT_CALL(callback_mock_, InitializeCallback(CdmProxy::Status::kOk, _, _));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(&client, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
+ mock_power_monitor_source_->Suspend();
+ run_loop.Run();
+}
+
+// Verify that if there isn't a power monitor, initialization fails.
+TEST_F(D3D11CdmProxyTest, NoPowerMonitor) {
+ power_monitor_ = nullptr;
+ EXPECT_CALL(callback_mock_,
+ InitializeCallback(CdmProxy::Status::kFail, _, _));
+
+ proxy_->Initialize(nullptr,
+ base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_)));
+}
+
+// Initialization failure because HW key exchange is not available.
+TEST_F(D3D11CdmProxyTest, NoHwKeyExchange) {
+ EXPECT_CALL(callback_mock_,
+ InitializeCallback(CdmProxy::Status::kFail, _, _));
+ // GUID is set to non-D3D11_KEY_EXCHANGE_HW_PROTECTION, which means no HW key
+ // exchange.
+ EXPECT_CALL(*video_device_mock_.Get(),
+ CheckCryptoKeyExchange(_, _, Lt(1u), _))
+ .WillOnce(
+ DoAll(SetArgPointee<3>(D3D11_CRYPTO_TYPE_AES128_CTR), Return(S_OK)));
+
+ proxy_->Initialize(nullptr,
+ base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_)));
}
// Verifies that Process() won't work if not initialized.
@@ -194,8 +405,9 @@ TEST_F(D3D11CdmProxyTest, ProcessInvalidCryptoSessionID) {
uint32_t crypto_session_id = 0;
EXPECT_CALL(callback_mock_, InitializeCallback(CdmProxy::Status::kOk, _, _))
.WillOnce(SaveArg<2>(&crypto_session_id));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
// The size nor value here matter, so making non empty non zero vector.
@@ -286,8 +498,9 @@ TEST_F(D3D11CdmProxyTest, Process) {
EXPECT_CALL(callback_mock_,
InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
.WillOnce(SaveArg<2>(&crypto_session_id));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
// The size nor value here matter, so making non empty non zero vector.
@@ -328,7 +541,8 @@ TEST_F(D3D11CdmProxyTest, Process) {
// The value does not matter, so making non zero vector.
std::vector<uint8_t> test_output_data(kExpectedOutputDataSize, 0xAA);
- EXPECT_CALL(callback_mock_, ProcessCallback(CdmProxy::Status::kOk, _));
+ EXPECT_CALL(callback_mock_,
+ ProcessCallback(CdmProxy::Status::kOk, test_output_data));
auto set_test_output_data = [&test_output_data](void* output) {
D3D11_KEY_EXCHANGE_HW_PROTECTION_DATA* kex_struct =
@@ -366,20 +580,22 @@ TEST_F(D3D11CdmProxyTest, CreateMediaCryptoSessionNoExtraData) {
EXPECT_CALL(callback_mock_,
InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
.WillOnce(SaveArg<2>(&crypto_session_id_from_initialize));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
// Expect a new crypto session.
EXPECT_CALL(callback_mock_, CreateMediaCryptoSessionCallback(
CdmProxy::Status::kOk,
Ne(crypto_session_id_from_initialize), _));
- auto media_crypto_session_mock = CreateMock<D3D11CryptoSessionMock>();
+ auto media_crypto_session_mock = CreateD3D11Mock<D3D11CryptoSessionMock>();
EXPECT_CALL(*video_device_mock_.Get(),
CreateCryptoSession(Pointee(CRYPTO_TYPE_GUID), _,
Pointee(CRYPTO_TYPE_GUID), _))
- .WillOnce(DoAll(SetArgPointee<3>(media_crypto_session_mock.Get()),
- Return(S_OK)));
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<3>(media_crypto_session_mock.Get()),
+ Return(S_OK)));
EXPECT_CALL(*video_context1_mock_.Get(), GetDataForNewHardwareKey(_, _, _, _))
.Times(0);
@@ -413,8 +629,9 @@ TEST_F(D3D11CdmProxyTest, CreateMediaCryptoSessionWithExtraData) {
EXPECT_CALL(callback_mock_,
InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
.WillOnce(SaveArg<2>(&crypto_session_id_from_initialize));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
// Expect a new crypto session.
@@ -422,12 +639,13 @@ TEST_F(D3D11CdmProxyTest, CreateMediaCryptoSessionWithExtraData) {
CdmProxy::Status::kOk,
Ne(crypto_session_id_from_initialize), _));
- auto media_crypto_session_mock = CreateMock<D3D11CryptoSessionMock>();
+ auto media_crypto_session_mock = CreateD3D11Mock<D3D11CryptoSessionMock>();
EXPECT_CALL(*video_device_mock_.Get(),
CreateCryptoSession(Pointee(CRYPTO_TYPE_GUID), _,
Pointee(CRYPTO_TYPE_GUID), _))
- .WillOnce(DoAll(SetArgPointee<3>(media_crypto_session_mock.Get()),
- Return(S_OK)));
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<3>(media_crypto_session_mock.Get()),
+ Return(S_OK)));
// The size nor value here matter, so making non empty non zero vector.
const std::vector<uint8_t> kAnyInput(16, 0xFF);
const uint64_t kAnyOutputData = 23298u;
@@ -477,8 +695,9 @@ TEST_F(D3D11CdmProxyTest, SetKeyAndGetDecryptContext) {
EXPECT_CALL(callback_mock_,
InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
.WillOnce(SaveArg<2>(&crypto_session_id_from_initialize));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
std::vector<uint8_t> kKeyId = {
@@ -504,6 +723,46 @@ TEST_F(D3D11CdmProxyTest, SetKeyAndGetDecryptContext) {
EXPECT_EQ(CRYPTO_TYPE_GUID, decrypt_context->key_info_guid);
}
+// Verify that the keys are not accessible via CdmProxyContext, after a
+// teardown..
+TEST_F(D3D11CdmProxyTest, ClearKeysAfterHardwareContentProtectionTeardown) {
+ base::RunLoop run_loop;
+ MockProxyClient client;
+ EXPECT_CALL(client, NotifyHardwareReset()).WillOnce(Invoke([&run_loop]() {
+ run_loop.Quit();
+ }));
+
+ base::WeakPtr<CdmContext> context = proxy_->GetCdmContext();
+ ASSERT_TRUE(context);
+ CdmProxyContext* proxy_context = context->GetCdmProxyContext();
+
+ uint32_t crypto_session_id_from_initialize = 0;
+ EXPECT_CALL(callback_mock_,
+ InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
+ .WillOnce(SaveArg<2>(&crypto_session_id_from_initialize));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(&client, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
+ ::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
+
+ std::vector<uint8_t> kKeyId = {
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ };
+ std::vector<uint8_t> kKeyBlob = {
+ 0xab, 0x01, 0x20, 0xd3, 0xee, 0x05, 0x99, 0x87,
+ 0xff, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x7F,
+ };
+ proxy_->SetKey(crypto_session_id_from_initialize, kKeyId, kKeyBlob);
+
+ SetEvent(teardown_event_);
+ run_loop.Run();
+
+ std::string key_id_str(kKeyId.begin(), kKeyId.end());
+ auto decrypt_context = proxy_context->GetD3D11DecryptContext(key_id_str);
+ ASSERT_FALSE(decrypt_context);
+}
+
// Verify that removing a key works.
TEST_F(D3D11CdmProxyTest, RemoveKey) {
base::WeakPtr<CdmContext> context = proxy_->GetCdmContext();
@@ -514,8 +773,9 @@ TEST_F(D3D11CdmProxyTest, RemoveKey) {
EXPECT_CALL(callback_mock_,
InitializeCallback(CdmProxy::Status::kOk, kTestProtocol, _))
.WillOnce(SaveArg<2>(&crypto_session_id_from_initialize));
- ASSERT_NO_FATAL_FAILURE(Initialize(base::BindOnce(
- &CallbackMock::InitializeCallback, base::Unretained(&callback_mock_))));
+ ASSERT_NO_FATAL_FAILURE(
+ Initialize(nullptr, base::BindOnce(&CallbackMock::InitializeCallback,
+ base::Unretained(&callback_mock_))));
::testing::Mock::VerifyAndClearExpectations(&callback_mock_);
std::vector<uint8_t> kKeyId = {
diff --git a/chromium/media/gpu/windows/d3d11_create_device_cb.h b/chromium/media/gpu/windows/d3d11_create_device_cb.h
new file mode 100644
index 00000000000..5b505298bcf
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_create_device_cb.h
@@ -0,0 +1,33 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_WINDOWS_D3D11_CREATE_DEVICE_CB_H_
+#define MEDIA_GPU_WINDOWS_D3D11_CREATE_DEVICE_CB_H_
+
+#include <d3d11_1.h>
+#include <wrl/client.h>
+
+#include "base/callback.h"
+
+namespace media {
+
+// Handy callback to inject mocks via D3D11CreateDevice.
+//
+// The signature matches D3D11CreateDevice(). decltype(D3D11CreateDevice) does
+// not work because __attribute__((stdcall)) gets appended, and the template
+// instantiation fails.
+using D3D11CreateDeviceCB =
+ base::RepeatingCallback<HRESULT(IDXGIAdapter*,
+ D3D_DRIVER_TYPE,
+ HMODULE,
+ UINT,
+ const D3D_FEATURE_LEVEL*,
+ UINT,
+ UINT,
+ ID3D11Device**,
+ D3D_FEATURE_LEVEL*,
+ ID3D11DeviceContext**)>;
+} // namespace media
+
+#endif // MEDIA_GPU_WINDOWS_D3D11_CREATE_DEVICE_CB_H_
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.cc b/chromium/media/gpu/windows/d3d11_decryptor.cc
index 87b064ca314..121d947df97 100644
--- a/chromium/media/gpu/windows/d3d11_decryptor.cc
+++ b/chromium/media/gpu/windows/d3d11_decryptor.cc
@@ -4,13 +4,109 @@
#include "media/gpu/windows/d3d11_decryptor.h"
+#include "base/bind.h"
#include "base/logging.h"
+#include "base/stl_util.h"
#include "media/base/decoder_buffer.h"
namespace media {
+namespace {
+
+// "A buffer is defined as a single subresource."
+// https://msdn.microsoft.com/en-us/library/windows/desktop/ff476901(v=vs.85).aspx
+const UINT kSubresourceIndex = 0;
+const UINT kWaitIfGPUBusy = 0;
+
+// Creates ID3D11Buffer using the values. Return true on success.
+bool CreateBuffer(ID3D11Device* device,
+ D3D11_USAGE usage,
+ UINT bind_flags,
+ UINT cpu_access,
+ ID3D11Buffer** out) {
+ D3D11_BUFFER_DESC buf_desc = {};
+ // This value is somewhat arbitrary but is a multiple of 16 and 4K and is
+ // equal to D3D11_REQ_TEXTURE2D_U_OR_V_DIMENSION. Since the buffers are cast
+ // to ID3D11Texture2D, setting it as its size should make sense.
+ buf_desc.ByteWidth = 16384;
+ buf_desc.BindFlags = bind_flags;
+ buf_desc.Usage = usage;
+ buf_desc.CPUAccessFlags = cpu_access;
+
+ HRESULT hresult = device->CreateBuffer(&buf_desc, nullptr, out);
+ return SUCCEEDED(hresult);
+}
+
+// Copies |input| into |output|, the output buffer should be a staging buffer
+// that is CPU writable.
+bool CopyDataToBuffer(base::span<const uint8_t> input,
+ ID3D11DeviceContext* device_context,
+ ID3D11Buffer* output) {
+ D3D11_BUFFER_DESC output_buffer_desc = {};
+ output->GetDesc(&output_buffer_desc);
+
+ if (output_buffer_desc.ByteWidth < input.size()) {
+ DVLOG(1) << input.size() << " does not fit in "
+ << output_buffer_desc.ByteWidth;
+ return false;
+ }
+
+ D3D11_MAPPED_SUBRESOURCE map_resource = {};
+ HRESULT hresult =
+ device_context->Map(output, kSubresourceIndex, D3D11_MAP_WRITE,
+ kWaitIfGPUBusy, &map_resource);
+ if (FAILED(hresult)) {
+ DVLOG(3) << "Failed to map buffer for writing.";
+ return false;
+ }
+ memcpy(map_resource.pData, input.data(), input.size_bytes());
+ device_context->Unmap(output, kSubresourceIndex);
+ return true;
+}
+
+// Copies |input| into |output|. The input buffer is should be a staging buffer
+// that is CPU readable.
+bool CopyDataOutFromBuffer(ID3D11Buffer* input,
+ size_t input_size,
+ ID3D11DeviceContext* device_context,
+ std::vector<uint8_t>* output) {
+ D3D11_MAPPED_SUBRESOURCE map_resource = {};
+ HRESULT hresult = device_context->Map(
+ input, kSubresourceIndex, D3D11_MAP_READ, kWaitIfGPUBusy, &map_resource);
+ if (FAILED(hresult)) {
+ DVLOG(3) << "Failed to map buffer for reading.";
+ return false;
+ }
+ output->resize(input_size);
+ memcpy(output->data(), map_resource.pData, input_size);
+ device_context->Unmap(input, kSubresourceIndex);
+ return true;
+}
+
+D3D11_AES_CTR_IV StringIvToD3D11Iv(const std::string& iv) {
+ D3D11_AES_CTR_IV d3d11_iv = {};
+ DCHECK_LE(iv.size(), 16u);
+ memcpy(&d3d11_iv, iv.data(), iv.size());
+ return d3d11_iv;
+}
+
+// Returns true if the entire sample is encrypted.
+bool IsWholeSampleEncrypted(const DecryptConfig& decrypt_config,
+ size_t sample_size) {
+ const auto& subsamples = decrypt_config.subsamples();
+ if (subsamples.size() != 1)
+ return false;
+
+ return subsamples.front().clear_bytes == 0 &&
+ subsamples.front().cypher_bytes == sample_size;
+}
+
+} // namespace
+
D3D11Decryptor::D3D11Decryptor(CdmProxyContext* cdm_proxy_context)
- : cdm_proxy_context_(cdm_proxy_context), weak_factory_(this) {
+ : cdm_proxy_context_(cdm_proxy_context),
+ create_device_func_(base::BindRepeating(D3D11CreateDevice)),
+ weak_factory_(this) {
DCHECK(cdm_proxy_context_);
}
@@ -26,8 +122,58 @@ void D3D11Decryptor::RegisterNewKeyCB(StreamType stream_type,
void D3D11Decryptor::Decrypt(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
const DecryptCB& decrypt_cb) {
- // TODO(rkuroiwa): Implemented this function using |cdm_proxy_context_|.
- NOTIMPLEMENTED();
+ if (encrypted->end_of_stream()) {
+ decrypt_cb.Run(kSuccess, encrypted);
+ return;
+ }
+
+ const auto* decrypt_config = encrypted->decrypt_config();
+ if (!decrypt_config) {
+ // Not encrypted, nothing to do.
+ decrypt_cb.Run(kSuccess, encrypted);
+ return;
+ }
+
+ if (decrypt_config->HasPattern()) {
+ DVLOG(3) << "Cannot handle pattern decryption.";
+ decrypt_cb.Run(kError, nullptr);
+ return;
+ }
+
+ auto context =
+ cdm_proxy_context_->GetD3D11DecryptContext(decrypt_config->key_id());
+ if (!context) {
+ decrypt_cb.Run(kNoKey, nullptr);
+ return;
+ }
+
+ if (!IsDecryptionBufferInitialized() && !InitializeDecryptionBuffer()) {
+ decrypt_cb.Run(kError, nullptr);
+ return;
+ }
+
+ std::vector<uint8_t> output;
+ if (IsWholeSampleEncrypted(*encrypted->decrypt_config(),
+ encrypted->data_size())) {
+ if (!CtrDecrypt(base::make_span(encrypted->data(), encrypted->data_size()),
+ encrypted->decrypt_config()->iv(), *context, &output)) {
+ decrypt_cb.Run(kError, nullptr);
+ return;
+ }
+ } else {
+ if (!SubsampleCtrDecrypt(encrypted, *context, &output)) {
+ decrypt_cb.Run(kError, nullptr);
+ return;
+ }
+ }
+
+ auto decoder_buffer = DecoderBuffer::CopyFrom(output.data(), output.size());
+ decoder_buffer->set_timestamp(encrypted->timestamp());
+ decoder_buffer->set_duration(encrypted->duration());
+ decoder_buffer->set_is_key_frame(encrypted->is_key_frame());
+ decoder_buffer->CopySideDataFrom(encrypted->side_data(),
+ encrypted->side_data_size());
+ decrypt_cb.Run(kSuccess, decoder_buffer);
}
void D3D11Decryptor::CancelDecrypt(StreamType stream_type) {
@@ -68,4 +214,132 @@ void D3D11Decryptor::DeinitializeDecoder(StreamType stream_type) {
// nothing to be done here.
}
+bool D3D11Decryptor::IsDecryptionBufferInitialized() {
+ // This must be the last object initialized in InitializeDecryptionBuffer().
+ return cpu_accessible_buffer_;
+}
+
+bool D3D11Decryptor::InitializeDecryptionBuffer() {
+ const D3D_FEATURE_LEVEL feature_levels[] = {D3D_FEATURE_LEVEL_11_1};
+ HRESULT hresult = create_device_func_.Run(
+ nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, 0, feature_levels,
+ base::size(feature_levels), D3D11_SDK_VERSION,
+ device_.ReleaseAndGetAddressOf(), nullptr,
+ device_context_.ReleaseAndGetAddressOf());
+ if (FAILED(hresult)) {
+ DVLOG(2) << "Failed to create D3D11 device: " << hresult;
+ return false;
+ }
+
+ hresult = device_context_.CopyTo(video_context_.ReleaseAndGetAddressOf());
+ if (FAILED(hresult)) {
+ DVLOG(2) << "Failed to get video context.";
+ return false;
+ }
+
+ // The buffer is statging so that the data can be accessed by the CPU and HW.
+ if (!CreateBuffer(device_.Get(), D3D11_USAGE_STAGING, 0, // no binding.
+ D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
+ encrypted_sample_buffer_.ReleaseAndGetAddressOf())) {
+ DVLOG(2) << "Failed to create buffer for encrypted sample.";
+ return false;
+ }
+
+ // Note that the cpu access flag is 0 because this buffer is used to write the
+ // decrypted buffer in HW.
+ if (!CreateBuffer(device_.Get(), D3D11_USAGE_DEFAULT,
+ D3D11_BIND_RENDER_TARGET,
+ 0, // no cpu access.
+ decrypted_sample_buffer_.ReleaseAndGetAddressOf())) {
+ DVLOG(2) << "Failed to create buffer for decrypted sample.";
+ return false;
+ }
+
+ if (!CreateBuffer(device_.Get(), D3D11_USAGE_STAGING, 0, // no binding.
+ D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
+ cpu_accessible_buffer_.ReleaseAndGetAddressOf())) {
+ DVLOG(2) << "Failed to create cpu accessible buffer.";
+ return false;
+ }
+
+ return true;
+}
+
+bool D3D11Decryptor::CtrDecrypt(
+ base::span<const uint8_t> input,
+ const std::string& iv,
+ const CdmProxyContext::D3D11DecryptContext& context,
+ std::vector<uint8_t>* output) {
+ output->clear();
+ if (input.empty())
+ return true;
+
+ if (!CopyDataToBuffer(input, device_context_.Get(),
+ encrypted_sample_buffer_.Get())) {
+ return false;
+ }
+
+ D3D11_AES_CTR_IV aes_ctr_iv = StringIvToD3D11Iv(iv);
+ D3D11_ENCRYPTED_BLOCK_INFO block_info = {};
+ // The field says num bytes but it should be number of 4K blocks. See more at
+ // https://crbug.com/849466.
+ block_info.NumEncryptedBytesAtBeginning = (input.size() - 1) / 4096 + 1;
+
+ // ID3D11Buffers should be used but since the interface takes ID3D11Texture2D,
+ // it is reinterpret cast. See more at https://crbug.com/849466.
+ video_context_->DecryptionBlt(
+ context.crypto_session,
+ reinterpret_cast<ID3D11Texture2D*>(encrypted_sample_buffer_.Get()),
+ reinterpret_cast<ID3D11Texture2D*>(decrypted_sample_buffer_.Get()),
+ &block_info, context.key_blob_size, context.key_blob, sizeof(aes_ctr_iv),
+ &aes_ctr_iv);
+
+ // Because DecryptionBlt() doesn't have a return value, this is a hack to
+ // check for decryption operation status. If it has been modified, then there
+ // was an error. See more at https://crbug.com/849466.
+ if (block_info.NumBytesInSkipPattern != 0) {
+ return false;
+ }
+
+ device_context_->CopyResource(cpu_accessible_buffer_.Get(),
+ decrypted_sample_buffer_.Get());
+ return CopyDataOutFromBuffer(cpu_accessible_buffer_.Get(), input.size(),
+ device_context_.Get(), output);
+}
+
+// TODO(crbug.com/845631): This is the same as DecryptCencBuffer(), so it should
+// be deduped.
+bool D3D11Decryptor::SubsampleCtrDecrypt(
+ scoped_refptr<DecoderBuffer> encrypted,
+ const CdmProxyContext::D3D11DecryptContext& context,
+ std::vector<uint8_t>* output) {
+ const auto& subsamples = encrypted->decrypt_config()->subsamples();
+ std::vector<uint8_t> encrypted_data;
+ const uint8_t* data = encrypted->data();
+ for (const auto& subsample : subsamples) {
+ data += subsample.clear_bytes;
+ encrypted_data.insert(encrypted_data.end(), data,
+ data + subsample.cypher_bytes);
+ data += subsample.cypher_bytes;
+ }
+
+ std::vector<uint8_t> decrypted_data;
+ if (!CtrDecrypt(encrypted_data, encrypted->decrypt_config()->iv(), context,
+ &decrypted_data)) {
+ return false;
+ }
+
+ data = encrypted->data();
+ const uint8_t* decrypted_data_ptr = decrypted_data.data();
+ for (const auto& subsample : subsamples) {
+ output->insert(output->end(), data, data + subsample.clear_bytes);
+ data += subsample.clear_bytes;
+ output->insert(output->end(), decrypted_data_ptr,
+ decrypted_data_ptr + subsample.cypher_bytes);
+ decrypted_data_ptr += subsample.cypher_bytes;
+ data += subsample.cypher_bytes;
+ }
+ return true;
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.h b/chromium/media/gpu/windows/d3d11_decryptor.h
index ef4d5ae30f6..34b48a60250 100644
--- a/chromium/media/gpu/windows/d3d11_decryptor.h
+++ b/chromium/media/gpu/windows/d3d11_decryptor.h
@@ -5,15 +5,18 @@
#ifndef MEDIA_GPU_WINDOWS_D3D11_DECRYPTOR_H_
#define MEDIA_GPU_WINDOWS_D3D11_DECRYPTOR_H_
+#include <wrl/client.h>
+
+#include "base/containers/span.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
+#include "media/base/cdm_proxy_context.h"
#include "media/base/decryptor.h"
#include "media/gpu/media_gpu_export.h"
+#include "media/gpu/windows/d3d11_create_device_cb.h"
namespace media {
-class CdmProxyContext;
-
class MEDIA_GPU_EXPORT D3D11Decryptor : public Decryptor {
public:
explicit D3D11Decryptor(CdmProxyContext* cdm_proxy_context);
@@ -37,9 +40,60 @@ class MEDIA_GPU_EXPORT D3D11Decryptor : public Decryptor {
void ResetDecoder(StreamType stream_type) final;
void DeinitializeDecoder(StreamType stream_type) final;
+ void SetCreateDeviceCallbackForTesting(D3D11CreateDeviceCB callback) {
+ create_device_func_ = callback;
+ }
+
private:
+ // Returns true if the decryption buffers have been initialized.
+ bool IsDecryptionBufferInitialized();
+
+ // Initialize the buffers for decryption.
+ bool InitializeDecryptionBuffer();
+
+ // CTR mode decrypts |encrypted| data into |output|. |output| is always
+ // cleared. Returns true on success.
+ bool CtrDecrypt(base::span<const uint8_t> input,
+ const std::string& iv,
+ const CdmProxyContext::D3D11DecryptContext& context,
+ std::vector<uint8_t>* output);
+
+ // CTR mode decryption method, aware of subsamples. |output| is always
+ // cleared. Returns true and populates |output| on success.
+ bool SubsampleCtrDecrypt(scoped_refptr<DecoderBuffer> encrypted,
+ const CdmProxyContext::D3D11DecryptContext& context,
+ std::vector<uint8_t>* output);
+
CdmProxyContext* cdm_proxy_context_;
+ template <class T>
+ using ComPtr = Microsoft::WRL::ComPtr<T>;
+ ComPtr<ID3D11Device> device_;
+ ComPtr<ID3D11DeviceContext> device_context_;
+ ComPtr<ID3D11VideoContext> video_context_;
+
+ // Due to how D3D11 resource permissons work, there are differences between
+ // CPU (user) and HW accessible buffers. And things get more complicated with
+ // what can read or write from/to it, what combinations are valid, and
+ // performance tradeoffs in giving different permissions. The most straight
+ // forward way is to use three buffers as described below.
+
+ // A buffer where encrypted data is written by the CPU and is readable by the
+ // HW.
+ ComPtr<ID3D11Buffer> encrypted_sample_buffer_;
+
+ // A buffer where the decrypted buffer is written by the HW that is not CPU
+ // accessible.
+ ComPtr<ID3D11Buffer> decrypted_sample_buffer_;
+
+ // A CPU accessible buffer where the content of |decrypted_buffer_| is copied
+ // to.
+ ComPtr<ID3D11Buffer> cpu_accessible_buffer_;
+
+ // Can be set in tests via SetCreateDeviceCallbackForTesting().
+ // Is D3D11CreateDevice() when not mocked.
+ D3D11CreateDeviceCB create_device_func_;
+
base::WeakPtrFactory<D3D11Decryptor> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(D3D11Decryptor);
diff --git a/chromium/media/gpu/windows/d3d11_decryptor_unittest.cc b/chromium/media/gpu/windows/d3d11_decryptor_unittest.cc
new file mode 100644
index 00000000000..e982472b95f
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_decryptor_unittest.cc
@@ -0,0 +1,523 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/windows/d3d11_decryptor.h"
+
+#include <initguid.h>
+
+#include <array>
+
+#include "base/bind.h"
+#include "base/stl_util.h"
+#include "media/base/cdm_proxy_context.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/subsample_entry.h"
+#include "media/gpu/windows/d3d11_mocks.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::DoAll;
+using ::testing::ElementsAreArray;
+using ::testing::Invoke;
+using ::testing::IsNull;
+using ::testing::Pointee;
+using ::testing::Return;
+using ::testing::SetArgPointee;
+
+template <class T>
+using ComPtr = Microsoft::WRL::ComPtr<T>;
+
+namespace media {
+
+namespace {
+// clang-format off
+// The value doesn't matter this is just a GUID.
+DEFINE_GUID(TEST_GUID,
+ 0x01020304, 0xffee, 0xefba,
+ 0x93, 0xaa, 0x47, 0x77, 0x43, 0xb1, 0x22, 0x98);
+// clang-format on
+
+// Should be non-0 so that it's different from the default TimeDelta.
+constexpr base::TimeDelta kTestTimestamp =
+ base::TimeDelta::FromMilliseconds(33);
+
+scoped_refptr<DecoderBuffer> TestDecoderBuffer(
+ const uint8_t* input,
+ size_t data_size,
+ const std::string& key_id,
+ const std::string& iv,
+ const SubsampleEntry& subsample) {
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ DecoderBuffer::CopyFrom(input, data_size);
+
+ std::vector<SubsampleEntry> subsamples = {subsample};
+ encrypted_buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig(key_id, iv, subsamples));
+ encrypted_buffer->set_timestamp(kTestTimestamp);
+ return encrypted_buffer;
+}
+
+class CallbackMock {
+ public:
+ MOCK_METHOD2(DecryptCallback, Decryptor::DecryptCB::RunType);
+};
+
+class CdmProxyContextMock : public CdmProxyContext {
+ public:
+ MOCK_METHOD1(GetD3D11DecryptContext,
+ base::Optional<D3D11DecryptContext>(const std::string& key_id));
+};
+
+// Checks that BUFFER_DESC has these fields match.
+// Flags are ORed values, so this only checks that the expected flags are set.
+// The other fields are ignored.
+MATCHER_P3(BufferDescHas, usage, bind_flags, cpu_access, "") {
+ const D3D11_BUFFER_DESC& buffer_desc = *arg;
+ if (buffer_desc.Usage != usage)
+ return false;
+
+ // Because the flags are enums the compiler infers that the input flags are
+ // signed ints. And the compiler rejects comparing signed int and unsigned
+ // int, so they are cast here.
+ const UINT unsigned_bind_flags = bind_flags;
+ const UINT unsigned_cpu_access_flags = cpu_access;
+
+ if ((buffer_desc.BindFlags & unsigned_bind_flags) != unsigned_bind_flags)
+ return false;
+
+ return (buffer_desc.CPUAccessFlags & unsigned_cpu_access_flags) ==
+ unsigned_cpu_access_flags;
+}
+
+MATCHER_P(NumEncryptedBytesAtBeginningEquals, value, "") {
+ const D3D11_ENCRYPTED_BLOCK_INFO& block_info = *arg;
+ return block_info.NumEncryptedBytesAtBeginning == value;
+}
+
+ACTION_P(SetBufferDescSize, size) {
+ arg0->ByteWidth = size;
+}
+
+MATCHER_P2(OutputDataEquals, data, size, "") {
+ scoped_refptr<DecoderBuffer> buffer = arg;
+ if (size != buffer->data_size()) {
+ return false;
+ }
+ if (buffer->timestamp() != kTestTimestamp) {
+ return false;
+ }
+
+ std::vector<uint8_t> expected(data, data + size);
+ std::vector<uint8_t> actual(buffer->data(),
+ buffer->data() + buffer->data_size());
+ return actual == expected;
+}
+} // namespace
+
+class D3D11DecryptorTest : public ::testing::Test {
+ protected:
+ void SetUp() override {
+ decryptor_ = std::make_unique<D3D11Decryptor>(&mock_proxy_);
+
+ device_mock_ = CreateD3D11Mock<D3D11DeviceMock>();
+ device_context_mock_ = CreateD3D11Mock<D3D11DeviceContextMock>();
+ video_context_mock_ = CreateD3D11Mock<D3D11VideoContextMock>();
+
+ ON_CALL(create_device_mock_,
+ Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
+ .WillByDefault(
+ DoAll(AddRefAndSetArgPointee<7>(device_mock_.Get()),
+ AddRefAndSetArgPointee<9>(device_context_mock_.Get()),
+ Return(S_OK)));
+
+ decryptor_->SetCreateDeviceCallbackForTesting(
+ base::BindRepeating(&D3D11CreateDeviceMock::Create,
+ base::Unretained(&create_device_mock_)));
+ }
+
+ std::unique_ptr<D3D11Decryptor> decryptor_;
+ CdmProxyContextMock mock_proxy_;
+
+ D3D11CreateDeviceMock create_device_mock_;
+
+ ComPtr<D3D11DeviceMock> device_mock_;
+ ComPtr<D3D11DeviceContextMock> device_context_mock_;
+ ComPtr<D3D11VideoContextMock> video_context_mock_;
+};
+
+// Verify that full sample encrypted sample works.
+TEST_F(D3D11DecryptorTest, FullSampleCtrDecrypt) {
+ const uint8_t kInput[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ const SubsampleEntry kSubsample(0, base::size(kInput));
+ // This is arbitrary. Just used to check that this value is output from the
+ // method.
+ const uint8_t kFakeDecryptedData[] = {
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
+ };
+ static_assert(base::size(kFakeDecryptedData) == base::size(kInput),
+ "Fake input and output data size must match.");
+ const std::string kKeyId = "some 16 byte id.";
+ const std::string kIv = "some 16 byte iv.";
+ const uint8_t kAnyKeyBlob[] = {3, 5, 38, 19};
+
+ CdmProxyContext::D3D11DecryptContext decrypt_context = {};
+ ComPtr<D3D11CryptoSessionMock> crypto_session_mock =
+ CreateD3D11Mock<D3D11CryptoSessionMock>();
+ decrypt_context.crypto_session = crypto_session_mock.Get();
+ decrypt_context.key_blob = kAnyKeyBlob;
+ decrypt_context.key_blob_size = base::size(kAnyKeyBlob);
+ decrypt_context.key_info_guid = TEST_GUID;
+ EXPECT_CALL(mock_proxy_, GetD3D11DecryptContext(kKeyId))
+ .WillOnce(Return(decrypt_context));
+
+ EXPECT_CALL(create_device_mock_,
+ Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
+ .WillOnce(DoAll(AddRefAndSetArgPointee<7>(device_mock_.Get()),
+ AddRefAndSetArgPointee<9>(device_context_mock_.Get()),
+ Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(),
+ QueryInterface(IID_ID3D11VideoContext, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(DoAll(
+ AddRefAndSetArgPointee<1>(video_context_mock_.Get()), Return(S_OK)));
+
+ ComPtr<D3D11BufferMock> staging_buffer1 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> staging_buffer2 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> gpu_buffer = CreateD3D11Mock<D3D11BufferMock>();
+ // These return big enough size.
+ ON_CALL(*staging_buffer1.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*staging_buffer2.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*gpu_buffer.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(
+ BufferDescHas(D3D11_USAGE_STAGING, 0u,
+ D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(staging_buffer1.Get()), Return(S_OK)))
+ .WillOnce(DoAll(AddRefAndSetArgPointee<2>(staging_buffer2.Get()),
+ Return(S_OK)));
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(BufferDescHas(D3D11_USAGE_DEFAULT,
+ D3D11_BIND_RENDER_TARGET, 0u),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(gpu_buffer.Get()), Return(S_OK)));
+
+ D3D11_MAPPED_SUBRESOURCE staging_buffer1_subresource = {};
+ auto staging_buffer1_subresource_buffer = std::make_unique<uint8_t[]>(20000);
+ staging_buffer1_subresource.pData = staging_buffer1_subresource_buffer.get();
+ EXPECT_CALL(*device_context_mock_.Get(),
+ Map(staging_buffer1.Get(), 0, D3D11_MAP_WRITE, _, _))
+ .WillOnce(
+ DoAll(SetArgPointee<4>(staging_buffer1_subresource), Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(), Unmap(staging_buffer1.Get(), 0));
+
+ EXPECT_CALL(
+ *video_context_mock_.Get(),
+ DecryptionBlt(crypto_session_mock.Get(),
+ reinterpret_cast<ID3D11Texture2D*>(staging_buffer1.Get()),
+ reinterpret_cast<ID3D11Texture2D*>(gpu_buffer.Get()),
+ NumEncryptedBytesAtBeginningEquals(1u), sizeof(kAnyKeyBlob),
+ kAnyKeyBlob, _, _));
+ EXPECT_CALL(*device_context_mock_.Get(),
+ CopyResource(staging_buffer2.Get(), gpu_buffer.Get()));
+
+ D3D11_MAPPED_SUBRESOURCE staging_buffer2_subresource = {};
+
+ // pData field is non-const void* so make a copy of kFakeDecryptedData that
+ // can be cast to void*.
+ std::unique_ptr<uint8_t[]> decrypted_data =
+ std::make_unique<uint8_t[]>(base::size(kFakeDecryptedData));
+ memcpy(decrypted_data.get(), kFakeDecryptedData,
+ base::size(kFakeDecryptedData));
+
+ staging_buffer2_subresource.pData = decrypted_data.get();
+ EXPECT_CALL(*device_context_mock_.Get(),
+ Map(staging_buffer2.Get(), 0, D3D11_MAP_READ, _, _))
+ .WillOnce(
+ DoAll(SetArgPointee<4>(staging_buffer2_subresource), Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(), Unmap(staging_buffer2.Get(), 0));
+
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks, DecryptCallback(
+ Decryptor::kSuccess,
+ OutputDataEquals(kFakeDecryptedData,
+ base::size(kFakeDecryptedData))));
+
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ TestDecoderBuffer(kInput, base::size(kInput), kKeyId, kIv, kSubsample);
+ decryptor_->Decrypt(Decryptor::kAudio, encrypted_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+}
+
+// Verify subsample decryption works.
+TEST_F(D3D11DecryptorTest, SubsampleCtrDecrypt) {
+ // clang-format off
+ const uint8_t kInput[] = {
+ // clear 16 bytes.
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ // encrypted 16 bytes.
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ // clear 5 bytes.
+ 0, 1, 2, 3, 4,
+ // encrypted 16 bytes.
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ // Encrypted parts of the input
+ const uint8_t kInputEncrypted[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ // This is arbitrary. Just used to check that this value is output from the
+ // method.
+ const uint8_t kFakeOutputData[] = {
+ // clear 16 bytes.
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ // decrypted 16 bytes.
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
+ // clear 5 bytes.
+ 0, 1, 2, 3, 4,
+ // decrypted 16 bytes.
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
+ };
+ const uint8_t kFakeDecryptedData[] = {
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
+ 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
+ };
+ // clang-format on
+ static_assert(base::size(kFakeOutputData) == base::size(kInput),
+ "Fake input and output data size must match.");
+ const std::vector<SubsampleEntry> subsamples = {SubsampleEntry(16, 16),
+ SubsampleEntry(5, 16)};
+ const std::string kKeyId = "some 16 byte id.";
+ const std::string kIv = "some 16 byte iv.";
+ const uint8_t kAnyKeyBlob[] = {3, 5, 38, 19};
+
+ CdmProxyContext::D3D11DecryptContext decrypt_context = {};
+ ComPtr<D3D11CryptoSessionMock> crypto_session_mock =
+ CreateD3D11Mock<D3D11CryptoSessionMock>();
+ decrypt_context.crypto_session = crypto_session_mock.Get();
+ decrypt_context.key_blob = kAnyKeyBlob;
+ decrypt_context.key_blob_size = base::size(kAnyKeyBlob);
+ decrypt_context.key_info_guid = TEST_GUID;
+ EXPECT_CALL(mock_proxy_, GetD3D11DecryptContext(kKeyId))
+ .WillOnce(Return(decrypt_context));
+
+ EXPECT_CALL(create_device_mock_,
+ Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
+ .WillOnce(DoAll(AddRefAndSetArgPointee<7>(device_mock_.Get()),
+ AddRefAndSetArgPointee<9>(device_context_mock_.Get()),
+ Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(),
+ QueryInterface(IID_ID3D11VideoContext, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(DoAll(
+ AddRefAndSetArgPointee<1>(video_context_mock_.Get()), Return(S_OK)));
+
+ ComPtr<D3D11BufferMock> staging_buffer1 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> staging_buffer2 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> gpu_buffer = CreateD3D11Mock<D3D11BufferMock>();
+ // These return big enough size.
+ ON_CALL(*staging_buffer1.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*staging_buffer2.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*gpu_buffer.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(
+ BufferDescHas(D3D11_USAGE_STAGING, 0u,
+ D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(staging_buffer1.Get()), Return(S_OK)))
+ .WillOnce(DoAll(AddRefAndSetArgPointee<2>(staging_buffer2.Get()),
+ Return(S_OK)));
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(BufferDescHas(D3D11_USAGE_DEFAULT,
+ D3D11_BIND_RENDER_TARGET, 0u),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(gpu_buffer.Get()), Return(S_OK)));
+
+ D3D11_MAPPED_SUBRESOURCE staging_buffer1_subresource = {};
+ auto staging_buffer1_subresource_buffer = std::make_unique<uint8_t[]>(20000);
+ staging_buffer1_subresource.pData = staging_buffer1_subresource_buffer.get();
+ EXPECT_CALL(*device_context_mock_.Get(),
+ Map(staging_buffer1.Get(), 0, D3D11_MAP_WRITE, _, _))
+ .WillOnce(
+ DoAll(SetArgPointee<4>(staging_buffer1_subresource), Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(), Unmap(staging_buffer1.Get(), 0));
+
+ EXPECT_CALL(
+ *video_context_mock_.Get(),
+ DecryptionBlt(crypto_session_mock.Get(),
+ reinterpret_cast<ID3D11Texture2D*>(staging_buffer1.Get()),
+ reinterpret_cast<ID3D11Texture2D*>(gpu_buffer.Get()),
+ NumEncryptedBytesAtBeginningEquals(1u), sizeof(kAnyKeyBlob),
+ kAnyKeyBlob, _, _));
+ EXPECT_CALL(*device_context_mock_.Get(),
+ CopyResource(staging_buffer2.Get(), gpu_buffer.Get()));
+
+ D3D11_MAPPED_SUBRESOURCE staging_buffer2_subresource = {};
+
+ // pData field is non-const void* so make a oc kFakeDecryptedData that can be
+ // cast to void*.
+ std::unique_ptr<uint8_t[]> decrypted_data =
+ std::make_unique<uint8_t[]>(base::size(kFakeDecryptedData));
+ memcpy(decrypted_data.get(), kFakeDecryptedData,
+ base::size(kFakeDecryptedData));
+
+ staging_buffer2_subresource.pData = decrypted_data.get();
+ EXPECT_CALL(*device_context_mock_.Get(),
+ Map(staging_buffer2.Get(), 0, D3D11_MAP_READ, _, _))
+ .WillOnce(
+ DoAll(SetArgPointee<4>(staging_buffer2_subresource), Return(S_OK)));
+ EXPECT_CALL(*device_context_mock_.Get(), Unmap(staging_buffer2.Get(), 0));
+
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks,
+ DecryptCallback(Decryptor::kSuccess,
+ OutputDataEquals(kFakeOutputData,
+ base::size(kFakeOutputData))));
+
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ DecoderBuffer::CopyFrom(kInput, base::size(kInput));
+ encrypted_buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig(kKeyId, kIv, subsamples));
+ encrypted_buffer->set_timestamp(kTestTimestamp);
+ decryptor_->Decrypt(Decryptor::kAudio, encrypted_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+ EXPECT_EQ(0, memcmp(staging_buffer1_subresource_buffer.get(), kInputEncrypted,
+ base::size(kInputEncrypted)));
+}
+
+// Verify that if the input is too big, it fails. This may be removed if the
+// implementation supports big input.
+TEST_F(D3D11DecryptorTest, DecryptInputTooBig) {
+ // Something pretty big to be an audio frame. The actual data size doesn't
+ // matter.
+ std::array<uint8_t, 1000000> kInput;
+ const SubsampleEntry kSubsample(0, base::size(kInput));
+ const std::string kKeyId = "some 16 byte id.";
+ const std::string kIv = "some 16 byte iv.";
+ const uint8_t kAnyKeyBlob[] = {3, 5, 38, 19};
+
+ CdmProxyContext::D3D11DecryptContext decrypt_context = {};
+ ComPtr<D3D11CryptoSessionMock> crypto_session_mock =
+ CreateD3D11Mock<D3D11CryptoSessionMock>();
+ decrypt_context.key_blob = kAnyKeyBlob;
+ decrypt_context.key_blob_size = base::size(kAnyKeyBlob);
+ decrypt_context.key_info_guid = TEST_GUID;
+ ON_CALL(mock_proxy_, GetD3D11DecryptContext(kKeyId))
+ .WillByDefault(Return(decrypt_context));
+
+ ComPtr<D3D11BufferMock> staging_buffer1 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> staging_buffer2 = CreateD3D11Mock<D3D11BufferMock>();
+ ComPtr<D3D11BufferMock> gpu_buffer = CreateD3D11Mock<D3D11BufferMock>();
+ // These values must be smaller than the input size. Which triggers the
+ // function to fail.
+ ON_CALL(*staging_buffer1.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*staging_buffer2.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+ ON_CALL(*gpu_buffer.Get(), GetDesc(_))
+ .WillByDefault(SetBufferDescSize(20000));
+
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(
+ BufferDescHas(D3D11_USAGE_STAGING, 0u,
+ D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(staging_buffer1.Get()), Return(S_OK)))
+ .WillOnce(DoAll(AddRefAndSetArgPointee<2>(staging_buffer2.Get()),
+ Return(S_OK)));
+ EXPECT_CALL(*device_mock_.Get(),
+ CreateBuffer(BufferDescHas(D3D11_USAGE_DEFAULT,
+ D3D11_BIND_RENDER_TARGET, 0u),
+ nullptr, _))
+ .WillOnce(
+ DoAll(AddRefAndSetArgPointee<2>(gpu_buffer.Get()), Return(S_OK)));
+
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks, DecryptCallback(Decryptor::kError, IsNull()));
+
+ scoped_refptr<DecoderBuffer> encrypted_buffer = TestDecoderBuffer(
+ kInput.data(), base::size(kInput), kKeyId, kIv, kSubsample);
+ decryptor_->Decrypt(Decryptor::kAudio, encrypted_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+}
+
+// If there is no decrypt config, it must be in the clear, so it shouldn't
+// change the output.
+TEST_F(D3D11DecryptorTest, NoDecryptConfig) {
+ const uint8_t kInput[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ scoped_refptr<DecoderBuffer> clear_buffer =
+ DecoderBuffer::CopyFrom(kInput, base::size(kInput));
+ clear_buffer->set_timestamp(kTestTimestamp);
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks,
+ DecryptCallback(Decryptor::kSuccess,
+ OutputDataEquals(kInput, base::size(kInput))));
+ decryptor_->Decrypt(Decryptor::kAudio, clear_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+}
+
+// The current decryptor cannot deal with pattern encryption.
+TEST_F(D3D11DecryptorTest, PatternDecryption) {
+ const uint8_t kInput[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ const std::string kKeyId = "some 16 byte id.";
+ const std::string kIv = "some 16 byte iv.";
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ DecoderBuffer::CopyFrom(kInput, base::size(kInput));
+ std::vector<SubsampleEntry> subsamples = {SubsampleEntry(0, 16)};
+ encrypted_buffer->set_decrypt_config(DecryptConfig::CreateCbcsConfig(
+ kKeyId, kIv, subsamples, EncryptionPattern(1, 9)));
+
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks, DecryptCallback(Decryptor::kError, IsNull()));
+ decryptor_->Decrypt(Decryptor::kAudio, encrypted_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+}
+
+// If there is no decrypt context, it's missing a key.
+TEST_F(D3D11DecryptorTest, NoDecryptContext) {
+ const uint8_t kInput[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ };
+ const SubsampleEntry kSubsample(0, base::size(kInput));
+ const std::string kKeyId = "some 16 byte id.";
+ const std::string kIv = "some 16 byte iv.";
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ TestDecoderBuffer(kInput, base::size(kInput), kKeyId, kIv, kSubsample);
+
+ EXPECT_CALL(mock_proxy_, GetD3D11DecryptContext(kKeyId))
+ .WillOnce(Return(base::nullopt));
+
+ CallbackMock callbacks;
+ EXPECT_CALL(callbacks, DecryptCallback(Decryptor::kNoKey, IsNull()));
+ decryptor_->Decrypt(Decryptor::kAudio, encrypted_buffer,
+ base::BindRepeating(&CallbackMock::DecryptCallback,
+ base::Unretained(&callbacks)));
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
index fc6640f9527..6c21797245d 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
@@ -8,6 +8,7 @@
#include "base/memory/ptr_util.h"
#include "base/trace_event/trace_event.h"
+#include "media/base/cdm_proxy_context.h"
#include "media/gpu/h264_decoder.h"
#include "media/gpu/h264_dpb.h"
#include "media/gpu/windows/d3d11_picture_buffer.h"
@@ -23,6 +24,24 @@
namespace media {
+using Status = H264Decoder::H264Accelerator::Status;
+
+namespace {
+
+// Converts SubsampleEntry to D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK.
+void AppendSubsamples(
+ const std::vector<SubsampleEntry>& from,
+ std::vector<D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK>* to) {
+ for (const auto& from_entry : from) {
+ D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK subsample = {};
+ subsample.ClearSize = from_entry.clear_bytes;
+ subsample.EncryptedSize = from_entry.cypher_bytes;
+ to->push_back(subsample);
+ }
+}
+
+} // namespace
+
class D3D11H264Picture : public H264Picture {
public:
D3D11H264Picture(D3D11PictureBuffer* picture)
@@ -43,10 +62,12 @@ D3D11H264Picture::~D3D11H264Picture() {
D3D11H264Accelerator::D3D11H264Accelerator(
D3D11VideoDecoderClient* client,
+ CdmProxyContext* cdm_proxy_context,
Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder,
Microsoft::WRL::ComPtr<ID3D11VideoDevice> video_device,
- Microsoft::WRL::ComPtr<ID3D11VideoContext> video_context)
+ Microsoft::WRL::ComPtr<ID3D11VideoContext1> video_context)
: client_(client),
+ cdm_proxy_context_(cdm_proxy_context),
video_decoder_(video_decoder),
video_device_(video_device),
video_context_(video_context) {}
@@ -61,7 +82,7 @@ scoped_refptr<H264Picture> D3D11H264Accelerator::CreateH264Picture() {
return base::MakeRefCounted<D3D11H264Picture>(picture);
}
-bool D3D11H264Accelerator::SubmitFrameMetadata(
+Status D3D11H264Accelerator::SubmitFrameMetadata(
const H264SPS* sps,
const H264PPS* pps,
const H264DPB& dpb,
@@ -69,22 +90,51 @@ bool D3D11H264Accelerator::SubmitFrameMetadata(
const H264Picture::Vector& ref_pic_listb0,
const H264Picture::Vector& ref_pic_listb1,
const scoped_refptr<H264Picture>& pic) {
+ const bool is_encrypted = pic->decrypt_config();
+ if (is_encrypted && !cdm_proxy_context_) {
+ DVLOG(1) << "The input is encrypted but there is no proxy context.";
+ return Status::kFail;
+ }
+
+ std::unique_ptr<D3D11_VIDEO_DECODER_BEGIN_FRAME_CRYPTO_SESSION> content_key;
+ // This decrypt context has to be outside the if block because pKeyInfo in
+ // D3D11_VIDEO_DECODER_BEGIN_FRAME_CRYPTO_SESSION is a pointer (to a GUID).
+ base::Optional<CdmProxyContext::D3D11DecryptContext> decrypt_context;
+ if (is_encrypted) {
+ decrypt_context = cdm_proxy_context_->GetD3D11DecryptContext(
+ pic->decrypt_config()->key_id());
+ if (!decrypt_context) {
+ DVLOG(1) << "Cannot find decrypt context for the frame.";
+ return Status::kTryAgain;
+ }
+
+ content_key =
+ std::make_unique<D3D11_VIDEO_DECODER_BEGIN_FRAME_CRYPTO_SESSION>();
+ content_key->pCryptoSession = decrypt_context->crypto_session;
+ content_key->pBlob = const_cast<void*>(decrypt_context->key_blob);
+ content_key->BlobSize = decrypt_context->key_blob_size;
+ content_key->pKeyInfoId = &decrypt_context->key_info_guid;
+ frame_iv_.assign(pic->decrypt_config()->iv().begin(),
+ pic->decrypt_config()->iv().end());
+ }
+
scoped_refptr<D3D11H264Picture> our_pic(
static_cast<D3D11H264Picture*>(pic.get()));
HRESULT hr;
for (;;) {
hr = video_context_->DecoderBeginFrame(
- video_decoder_.Get(), our_pic->picture->output_view().Get(), 0,
- nullptr);
+ video_decoder_.Get(), our_pic->picture->output_view().Get(),
+ content_key ? sizeof(*content_key) : 0, content_key.get());
if (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING) {
// Hardware is busy. We should make the call again.
// TODO(liberato): For now, just busy wait.
;
} else if (!SUCCEEDED(hr)) {
- LOG(ERROR) << "DecoderBeginFrame failed";
- return false;
+ LOG(ERROR) << "DecoderBeginFrame failed: "
+ << logging::SystemErrorCodeToString(hr);
+ return Status::kFail;
} else {
break;
}
@@ -100,17 +150,14 @@ bool D3D11H264Accelerator::SubmitFrameMetadata(
used_for_reference_flags_ = 0;
non_existing_frame_flags_ = 0;
- int i = 0;
-
// TODO(liberato): this is similar to H264Accelerator. can they share code?
- for (auto it = dpb.begin(); it != dpb.end(); it++) {
+ int i = 0;
+ for (auto it = dpb.begin(); it != dpb.end(); i++, it++) {
scoped_refptr<D3D11H264Picture> our_ref_pic(
static_cast<D3D11H264Picture*>(it->get()));
- if (!our_ref_pic->ref) {
- i++;
+ if (!our_ref_pic->ref)
continue;
- }
ref_frame_list_[i].Index7Bits = our_ref_pic->level_;
ref_frame_list_[i].AssociatedFlag = our_ref_pic->long_term;
field_order_cnt_list_[i][0] = our_ref_pic->top_field_order_cnt;
@@ -118,13 +165,12 @@ bool D3D11H264Accelerator::SubmitFrameMetadata(
frame_num_list_[i] = ref_frame_list_[i].AssociatedFlag
? our_ref_pic->long_term_pic_num
: our_ref_pic->frame_num;
- int ref = 3;
+ unsigned ref = 3;
used_for_reference_flags_ |= ref << (2 * i);
non_existing_frame_flags_ |= (our_ref_pic->nonexisting) << i;
- i++;
}
slice_info_.clear();
- return RetrieveBitstreamBuffer();
+ return RetrieveBitstreamBuffer() ? Status::kOk : Status::kFail;
}
bool D3D11H264Accelerator::RetrieveBitstreamBuffer() {
@@ -147,13 +193,15 @@ bool D3D11H264Accelerator::RetrieveBitstreamBuffer() {
return true;
}
-bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) {
+Status D3D11H264Accelerator::SubmitSlice(
+ const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) {
scoped_refptr<D3D11H264Picture> our_pic(
static_cast<D3D11H264Picture*>(pic.get()));
DXVA_PicParams_H264 pic_param = {};
@@ -167,6 +215,9 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
FROM_SPS_TO_PP2(wFrameHeightInMbsMinus1, pic_height_in_map_units_minus1);
pic_param.CurrPic.Index7Bits = our_pic->level_;
pic_param.CurrPic.AssociatedFlag = slice_hdr->bottom_field_flag;
+ // The H.264 specification now calls this |max_num_ref_frames|, while
+ // DXVA_PicParams_H264 continues to use the old name, |num_ref_frames|.
+ // See DirectX Video Acceleration for H.264/MPEG-4 AVC Decoding (4.2).
FROM_SPS_TO_PP2(num_ref_frames, max_num_ref_frames);
FROM_SLICE_TO_PP(field_pic_flag);
@@ -179,12 +230,13 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
FROM_PPS_TO_PP(constrained_intra_pred_flag);
FROM_PPS_TO_PP(weighted_pred_flag);
FROM_PPS_TO_PP(weighted_bipred_idc);
+ // From "DirectX Video Acceleration Specification for H.264/AVC Decoding":
+ // "The value shall be 1 unless the restricted-mode profile in use explicitly
+ // supports the value 0."
pic_param.MbsConsecutiveFlag = 1;
FROM_SPS_TO_PP(frame_mbs_only_flag);
FROM_PPS_TO_PP(transform_8x8_mode_flag);
- // TODO(liberato): sandersd@ believes that this should only be set for level
- // >= 3.1 . verify this and fix as needed.
- pic_param.MinLumaBipredSize8x8Flag = 1;
+ pic_param.MinLumaBipredSize8x8Flag = sps_.level_idc >= 31;
pic_param.IntraPicFlag = slice_hdr->IsISlice();
FROM_SPS_TO_PP(bit_depth_luma_minus8);
FROM_SPS_TO_PP(bit_depth_chroma_minus8);
@@ -208,13 +260,15 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
FROM_PPS_TO_PP(pic_init_qs_minus26);
FROM_PPS_TO_PP(chroma_qp_index_offset);
FROM_PPS_TO_PP(second_chroma_qp_index_offset);
+ // |ContinuationFlag| indicates that we've filled in the remaining fields.
pic_param.ContinuationFlag = 1;
FROM_PPS_TO_PP(pic_init_qp_minus26);
FROM_PPS_TO_PP2(num_ref_idx_l0_active_minus1,
num_ref_idx_l0_default_active_minus1);
FROM_PPS_TO_PP2(num_ref_idx_l1_active_minus1,
num_ref_idx_l1_default_active_minus1);
- // UNUSED: Reserved8BitsA
+ // UNUSED: Reserved8BitsA. Must be zero unless bit 13 of
+ // ConfigDecoderSpecific is set.
memcpy(pic_param.FrameNumList, frame_num_list_,
sizeof pic_param.FrameNumList);
pic_param.UsedForReferenceFlags = used_for_reference_flags_;
@@ -229,15 +283,18 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
FROM_PPS_TO_PP2(pic_order_present_flag,
bottom_field_pic_order_in_frame_present_flag);
FROM_PPS_TO_PP(num_slice_groups_minus1);
- CHECK_EQ(0u, pic_param.num_slice_groups_minus1);
- // UNUSED: slice_group_map_type
+ if (pic_param.num_slice_groups_minus1) {
+ // TODO(liberato): UMA?
+ // TODO(liberato): media log?
+ LOG(ERROR) << "num_slice_groups_minus1 == "
+ << pic_param.num_slice_groups_minus1;
+ return Status::kFail;
+ }
+ // UNUSED: slice_group_map_type (undocumented)
FROM_PPS_TO_PP(deblocking_filter_control_present_flag);
FROM_PPS_TO_PP(redundant_pic_cnt_present_flag);
- // UNUSED: Reserved8BitsB
- // UNUSED: slice_group_change_rate
- //
- //
- //
+ // UNUSED: Reserved8BitsB (unused, should always be zero).
+ // UNUSED: slice_group_change_rate (undocumented)
pic_param.StatusReportFeedbackNumber = 1;
@@ -248,7 +305,7 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
&buffer_size, &buffer);
if (!SUCCEEDED(hr)) {
LOG(ERROR) << "ReleaseDecoderBuffer (PictureParams) failed";
- return false;
+ return Status::kFail;
}
memcpy(buffer, &pic_param, sizeof(pic_param));
@@ -256,7 +313,7 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
if (!SUCCEEDED(hr)) {
LOG(ERROR) << "ReleaseDecoderBuffer (PictureParams) failed";
- return false;
+ return Status::kFail;
}
DXVA_Qmatrix_H264 iq_matrix_buf = {};
@@ -288,7 +345,7 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
&buffer);
if (!SUCCEEDED(hr)) {
LOG(ERROR) << "GetDecoderBuffer (QuantMatrix) failed";
- return false;
+ return Status::kFail;
}
memcpy(buffer, &iq_matrix_buf, sizeof(iq_matrix_buf));
hr = video_context_->ReleaseDecoderBuffer(
@@ -296,7 +353,7 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX);
if (!SUCCEEDED(hr)) {
LOG(ERROR) << "ReleaseDecoderBuffer (QuantMatrix) failed";
- return false;
+ return Status::kFail;
}
// Ideally all slices in a frame are put in the same bitstream buffer.
@@ -308,16 +365,42 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
size_t remaining_bitstream = out_bitstream_size;
size_t start_location = 0;
+ const bool is_encrypted = pic->decrypt_config();
+
+ if (is_encrypted) {
+ // For now, the entire frame has to fit into the bitstream buffer. This way
+ // the subsample ClearSize adjustment below should work.
+ if (bitstream_buffer_size_ < remaining_bitstream) {
+ LOG(ERROR) << "Input slice NALU (" << remaining_bitstream
+ << ") too big to fit in the bistream buffer ("
+ << bitstream_buffer_size_ << ").";
+ return Status::kFail;
+ }
+
+ AppendSubsamples(subsamples, &subsamples_);
+ if (!subsamples.empty()) {
+ // 3 added to clear bytes because a start code is prepended to the slice
+ // NALU.
+ // TODO(rkuroiwa): This should be done right after the start code is
+ // written to the buffer, but currently the start code is written in the
+ // loop (which is not the right place, there's only one slice NALU passed
+ // into this function) and it's not easy to identify where the subsample
+ // starts in the buffer.
+ subsamples_[subsamples_.size() - subsamples.size()].ClearSize += 3;
+ }
+ }
+
while (remaining_bitstream > 0) {
if (bitstream_buffer_size_ < remaining_bitstream &&
slice_info_.size() > 0) {
if (!SubmitSliceData()) {
LOG(ERROR) << "SubmitSliceData failed";
- return false;
+ return Status::kFail;
}
+
if (!RetrieveBitstreamBuffer()) {
LOG(ERROR) << "RetrieveBitstreamBuffer failed";
- return false;
+ return Status::kFail;
}
}
@@ -358,7 +441,7 @@ bool D3D11H264Accelerator::SubmitSlice(const H264PPS* pps,
bitstream_buffer_bytes_ += bytes_to_copy;
}
- return true;
+ return Status::kOk;
}
bool D3D11H264Accelerator::SubmitSliceData() {
@@ -392,7 +475,7 @@ bool D3D11H264Accelerator::SubmitSliceData() {
return false;
}
- D3D11_VIDEO_DECODER_BUFFER_DESC buffers[4] = {};
+ D3D11_VIDEO_DECODER_BUFFER_DESC1 buffers[4] = {};
buffers[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
buffers[0].DataOffset = 0;
buffers[0].DataSize = sizeof(DXVA_PicParams_H264);
@@ -402,37 +485,52 @@ bool D3D11H264Accelerator::SubmitSliceData() {
buffers[1].DataSize = sizeof(DXVA_Qmatrix_H264);
buffers[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
buffers[2].DataOffset = 0;
- buffers[2].DataSize = (UINT)(sizeof(slice_info_[0]) * slice_info_.size());
+ buffers[2].DataSize = sizeof(slice_info_[0]) * slice_info_.size();
buffers[3].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
buffers[3].DataOffset = 0;
- buffers[3].DataSize = (UINT)current_offset_;
+ buffers[3].DataSize = current_offset_;
+
+ if (!frame_iv_.empty()) {
+ buffers[3].pIV = frame_iv_.data();
+ buffers[3].IVSize = frame_iv_.size();
+ // Subsmaples matter iff there is IV, for decryption.
+ if (!subsamples_.empty()) {
+ buffers[3].pSubSampleMappingBlock = subsamples_.data();
+ buffers[3].SubSampleMappingCount = subsamples_.size();
+ }
+ }
- hr = video_context_->SubmitDecoderBuffers(video_decoder_.Get(), 4, buffers);
+ hr = video_context_->SubmitDecoderBuffers1(video_decoder_.Get(),
+ base::size(buffers), buffers);
current_offset_ = 0;
slice_info_.clear();
bitstream_buffer_bytes_ = nullptr;
bitstream_buffer_size_ = 0;
+ frame_iv_.clear();
+ subsamples_.clear();
if (!SUCCEEDED(hr)) {
- LOG(ERROR) << "SubmitDecoderBuffers failed";
+ LOG(ERROR) << "SubmitDecoderBuffers failed: "
+ << logging::SystemErrorCodeToString(hr);
return false;
}
return true;
}
-bool D3D11H264Accelerator::SubmitDecode(const scoped_refptr<H264Picture>& pic) {
+Status D3D11H264Accelerator::SubmitDecode(
+ const scoped_refptr<H264Picture>& pic) {
if (!SubmitSliceData()) {
LOG(ERROR) << "SubmitSliceData failed";
- return false;
+ return Status::kFail;
}
HRESULT hr = video_context_->DecoderEndFrame(video_decoder_.Get());
if (!SUCCEEDED(hr)) {
LOG(ERROR) << "DecoderEndFrame failed";
- return false;
+ return Status::kFail;
}
- return true;
+ return Status::kOk;
}
void D3D11H264Accelerator::Reset() {
@@ -452,7 +550,8 @@ bool D3D11H264Accelerator::OutputPicture(
const scoped_refptr<H264Picture>& pic) {
scoped_refptr<D3D11H264Picture> our_pic(
static_cast<D3D11H264Picture*>(pic.get()));
- client_->OutputResult(our_pic->picture);
+
+ client_->OutputResult(our_pic->picture, pic->get_colorspace());
return true;
}
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.h b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
index 173ff4336f2..11825d57dd4 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
@@ -5,7 +5,7 @@
#ifndef MEDIA_GPU_WINDOWS_D3D11_H264_ACCELERATOR_H_
#define MEDIA_GPU_WINDOWS_D3D11_H264_ACCELERATOR_H_
-#include <d3d11.h>
+#include <d3d11_1.h>
#include <d3d9.h>
#include <dxva.h>
#include <wrl/client.h>
@@ -23,41 +23,46 @@
#include "ui/gl/gl_image.h"
namespace media {
+class CdmProxyContext;
class D3D11H264Accelerator;
class D3D11PictureBuffer;
class D3D11VideoDecoderClient {
public:
virtual D3D11PictureBuffer* GetPicture() = 0;
- virtual void OutputResult(D3D11PictureBuffer* picture) = 0;
+ virtual void OutputResult(D3D11PictureBuffer* picture,
+ const VideoColorSpace& buffer_colorspace) = 0;
};
class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
public:
+ // |cdm_proxy_context| may be null for clear content.
D3D11H264Accelerator(
D3D11VideoDecoderClient* client,
+ CdmProxyContext* cdm_proxy_context,
Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder,
Microsoft::WRL::ComPtr<ID3D11VideoDevice> video_device,
- Microsoft::WRL::ComPtr<ID3D11VideoContext> video_context);
+ Microsoft::WRL::ComPtr<ID3D11VideoContext1> video_context);
~D3D11H264Accelerator() override;
// H264Decoder::H264Accelerator implementation.
scoped_refptr<H264Picture> CreateH264Picture() override;
- bool SubmitFrameMetadata(const H264SPS* sps,
- const H264PPS* pps,
- const H264DPB& dpb,
- const H264Picture::Vector& ref_pic_listp0,
- const H264Picture::Vector& ref_pic_listb0,
- const H264Picture::Vector& ref_pic_listb1,
- const scoped_refptr<H264Picture>& pic) override;
- bool SubmitSlice(const H264PPS* pps,
- const H264SliceHeader* slice_hdr,
- const H264Picture::Vector& ref_pic_list0,
- const H264Picture::Vector& ref_pic_list1,
- const scoped_refptr<H264Picture>& pic,
- const uint8_t* data,
- size_t size) override;
- bool SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
+ Status SubmitFrameMetadata(const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ const scoped_refptr<H264Picture>& pic) override;
+ Status SubmitSlice(const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ const scoped_refptr<H264Picture>& pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) override;
+ Status SubmitDecode(const scoped_refptr<H264Picture>& pic) override;
void Reset() override;
bool OutputPicture(const scoped_refptr<H264Picture>& pic) override;
@@ -66,10 +71,11 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
bool RetrieveBitstreamBuffer();
D3D11VideoDecoderClient* client_;
+ CdmProxyContext* const cdm_proxy_context_;
Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder_;
Microsoft::WRL::ComPtr<ID3D11VideoDevice> video_device_;
- Microsoft::WRL::ComPtr<ID3D11VideoContext> video_context_;
+ Microsoft::WRL::ComPtr<ID3D11VideoContext1> video_context_;
// This information set at the beginning of a frame and saved for processing
// all the slices.
@@ -86,6 +92,12 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
size_t bitstream_buffer_size_ = 0;
uint8_t* bitstream_buffer_bytes_ = nullptr;
+ // This contains the subsamples (clear and encrypted) of the slice data
+ // in D3D11_VIDEO_DECODER_BUFFER_BITSTREAM buffer.
+ std::vector<D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK> subsamples_;
+ // IV for the current frame.
+ std::vector<uint8_t> frame_iv_;
+
DISALLOW_COPY_AND_ASSIGN(D3D11H264Accelerator);
};
diff --git a/chromium/media/gpu/windows/d3d11_mocks.cc b/chromium/media/gpu/windows/d3d11_mocks.cc
index b2d15c1c86e..a32f2990b09 100644
--- a/chromium/media/gpu/windows/d3d11_mocks.cc
+++ b/chromium/media/gpu/windows/d3d11_mocks.cc
@@ -5,12 +5,24 @@
#include "media/gpu/windows/d3d11_mocks.h"
namespace media {
+D3D11CreateDeviceMock::D3D11CreateDeviceMock() = default;
+D3D11CreateDeviceMock::~D3D11CreateDeviceMock() = default;
+
D3D11Texture2DMock::D3D11Texture2DMock() = default;
D3D11Texture2DMock::~D3D11Texture2DMock() = default;
+D3D11BufferMock::D3D11BufferMock() = default;
+D3D11BufferMock::~D3D11BufferMock() = default;
+
D3D11DeviceMock::D3D11DeviceMock() = default;
D3D11DeviceMock::~D3D11DeviceMock() = default;
+DXGIDevice2Mock::DXGIDevice2Mock() = default;
+DXGIDevice2Mock::~DXGIDevice2Mock() = default;
+
+DXGIAdapter3Mock::DXGIAdapter3Mock() = default;
+DXGIAdapter3Mock::~DXGIAdapter3Mock() = default;
+
D3D11VideoDeviceMock::D3D11VideoDeviceMock() = default;
D3D11VideoDeviceMock::~D3D11VideoDeviceMock() = default;
diff --git a/chromium/media/gpu/windows/d3d11_mocks.h b/chromium/media/gpu/windows/d3d11_mocks.h
index 4e01f0dc4bc..522fd6f5406 100644
--- a/chromium/media/gpu/windows/d3d11_mocks.h
+++ b/chromium/media/gpu/windows/d3d11_mocks.h
@@ -6,8 +6,12 @@
#include <d3d11.h>
#include <d3d11_1.h>
+#include <dxgi1_4.h>
+#include <wrl/client.h>
+#include <wrl/implements.h>
#include "base/win/iunknown_impl.h"
+#include "media/gpu/windows/d3d11_create_device_cb.h"
#include "testing/gmock/include/gmock/gmock.h"
#define MOCK_STDCALL_METHOD0(Name, Types) \
@@ -42,6 +46,32 @@
namespace media {
+// Use this action when using SetArgPointee with COM pointers.
+// e.g.
+// EXPECT_CALL(*device_mock_.Get(), QueryInterface(IID_ID3D11VideoDevice, _))
+// .WillRepeatedly(DoAll(
+// AddRefAndSetArgPointee<1>(video_device_mock_.Get()), Return(S_OK)));
+ACTION_TEMPLATE(AddRefAndSetArgPointee,
+ HAS_1_TEMPLATE_PARAMS(int, k),
+ AND_1_VALUE_PARAMS(p)) {
+ p->AddRef();
+ *std::get<k>(args) = p;
+}
+
+// Use this function to create a mock so that they are ref-counted correctly.
+template <typename Interface>
+Microsoft::WRL::ComPtr<Interface> CreateD3D11Mock() {
+ return new Interface();
+}
+
+// Class for mocking D3D11CreateDevice() function.
+class D3D11CreateDeviceMock {
+ public:
+ D3D11CreateDeviceMock();
+ ~D3D11CreateDeviceMock();
+ MOCK_METHOD10(Create, D3D11CreateDeviceCB::RunType);
+};
+
template <class Interface>
class MockCOMInterface : public Interface, public base::win::IUnknownImpl {
public:
@@ -76,6 +106,21 @@ class D3D11Texture2DMock : public MockCOMInterface<ID3D11Texture2D> {
MOCK_STDCALL_METHOD1(GetDesc, void(D3D11_TEXTURE2D_DESC*));
};
+class D3D11BufferMock : public MockCOMInterface<ID3D11Buffer> {
+ public:
+ D3D11BufferMock();
+ ~D3D11BufferMock() override;
+ MOCK_STDCALL_METHOD1(GetDevice, void(ID3D11Device**));
+ MOCK_STDCALL_METHOD3(GetPrivateData, HRESULT(const GUID&, UINT*, void*));
+ MOCK_STDCALL_METHOD3(SetPrivateData, HRESULT(const GUID&, UINT, const void*));
+ MOCK_STDCALL_METHOD2(SetPrivateDataInterface,
+ HRESULT(const GUID&, const IUnknown*));
+ MOCK_STDCALL_METHOD1(GetType, void(D3D11_RESOURCE_DIMENSION*));
+ MOCK_STDCALL_METHOD1(SetEvictionPriority, void(UINT));
+ MOCK_STDCALL_METHOD0(GetEvictionPriority, UINT());
+ MOCK_STDCALL_METHOD1(GetDesc, void(D3D11_BUFFER_DESC*));
+};
+
// This classs must mock QueryInterface, since a lot of things are
// QueryInterfac()ed thru this class.
class D3D11DeviceMock : public MockCOMInterface<ID3D11Device> {
@@ -233,6 +278,71 @@ class D3D11DeviceMock : public MockCOMInterface<ID3D11Device> {
MOCK_STDCALL_METHOD0(GetExceptionMode, UINT());
};
+class DXGIDevice2Mock : public MockCOMInterface<IDXGIDevice2> {
+ public:
+ DXGIDevice2Mock();
+ ~DXGIDevice2Mock() override;
+
+ MOCK_STDCALL_METHOD1(EnqueueSetEvent, HRESULT(HANDLE));
+ MOCK_STDCALL_METHOD3(OfferResources,
+ HRESULT(UINT,
+ IDXGIResource* const*,
+ DXGI_OFFER_RESOURCE_PRIORITY));
+ MOCK_STDCALL_METHOD3(ReclaimResources,
+ HRESULT(UINT, IDXGIResource* const*, BOOL*));
+ MOCK_STDCALL_METHOD1(GetMaximumFrameLatency, HRESULT(UINT*));
+ MOCK_STDCALL_METHOD1(SetMaximumFrameLatency, HRESULT(UINT));
+
+ MOCK_STDCALL_METHOD5(CreateSurface,
+ HRESULT(const DXGI_SURFACE_DESC*,
+ UINT,
+ DXGI_USAGE,
+ const DXGI_SHARED_RESOURCE*,
+ IDXGISurface**));
+ MOCK_STDCALL_METHOD1(GetAdapter, HRESULT(IDXGIAdapter**));
+ MOCK_STDCALL_METHOD1(GetGPUThreadPriority, HRESULT(INT*));
+ MOCK_STDCALL_METHOD3(QueryResourceResidency,
+ HRESULT(IUnknown* const*, DXGI_RESIDENCY*, UINT));
+ MOCK_STDCALL_METHOD1(SetGPUThreadPriority, HRESULT(INT));
+
+ MOCK_STDCALL_METHOD2(GetParent, HRESULT(REFIID, void**));
+ MOCK_STDCALL_METHOD3(GetPrivateData, HRESULT(REFGUID, UINT*, void*));
+ MOCK_STDCALL_METHOD3(SetPrivateData, HRESULT(REFGUID, UINT, const void*));
+ MOCK_STDCALL_METHOD2(SetPrivateDataInterface,
+ HRESULT(REFGUID, const IUnknown*));
+};
+
+class DXGIAdapter3Mock : public MockCOMInterface<IDXGIAdapter3> {
+ public:
+ DXGIAdapter3Mock();
+ ~DXGIAdapter3Mock() override;
+
+ MOCK_STDCALL_METHOD3(QueryVideoMemoryInfo,
+ HRESULT(UINT,
+ DXGI_MEMORY_SEGMENT_GROUP,
+ DXGI_QUERY_VIDEO_MEMORY_INFO*));
+ MOCK_STDCALL_METHOD2(RegisterHardwareContentProtectionTeardownStatusEvent,
+ HRESULT(HANDLE, DWORD*));
+ MOCK_STDCALL_METHOD2(RegisterVideoMemoryBudgetChangeNotificationEvent,
+ HRESULT(HANDLE, DWORD*));
+ MOCK_STDCALL_METHOD3(SetVideoMemoryReservation,
+ HRESULT(UINT, DXGI_MEMORY_SEGMENT_GROUP, UINT64));
+ MOCK_STDCALL_METHOD1(UnregisterHardwareContentProtectionTeardownStatus,
+ void(DWORD));
+ MOCK_STDCALL_METHOD1(UnregisterVideoMemoryBudgetChangeNotification,
+ void(DWORD));
+ MOCK_STDCALL_METHOD1(GetDesc2, HRESULT(DXGI_ADAPTER_DESC2*));
+ MOCK_STDCALL_METHOD1(GetDesc1, HRESULT(DXGI_ADAPTER_DESC1*));
+ MOCK_STDCALL_METHOD2(CheckInterfaceSupport, HRESULT(REFGUID, LARGE_INTEGER*));
+ MOCK_STDCALL_METHOD2(EnumOutputs, HRESULT(UINT, IDXGIOutput**));
+ MOCK_STDCALL_METHOD1(GetDesc, HRESULT(DXGI_ADAPTER_DESC*));
+ MOCK_STDCALL_METHOD2(GetParent, HRESULT(REFIID, void**));
+ MOCK_STDCALL_METHOD3(GetPrivateData, HRESULT(REFGUID, UINT*, void*));
+ MOCK_STDCALL_METHOD3(SetPrivateData, HRESULT(REFGUID, UINT, const void*));
+ MOCK_STDCALL_METHOD2(SetPrivateDataInterface,
+ HRESULT(REFGUID, const IUnknown*));
+};
+
// TODO(crbug.com/788880): This may not be necessary. Tyr out and see if
// D3D11VideoDevice1Mock is sufficient. and if so, remove this.
class D3D11VideoDeviceMock : public MockCOMInterface<ID3D11VideoDevice> {
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.cc b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
index 1fbb749d030..645b819d314 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.cc
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
@@ -111,13 +111,13 @@ bool D3D11PictureBuffer::GpuResources::Init(
// Create the textures and attach them to the mailboxes.
std::vector<uint32_t> service_ids;
for (int texture_idx = 0; texture_idx < textures_per_picture; texture_idx++) {
- texture_refs_.push_back(decoder_helper->CreateTexture(
+ textures_.push_back(decoder_helper->CreateTexture(
target, GL_RGBA, size.width(), size.height(), GL_RGBA,
GL_UNSIGNED_BYTE));
- service_ids.push_back(texture_refs_[texture_idx]->service_id());
+ service_ids.push_back(textures_[texture_idx]->service_id());
mailbox_manager->ProduceTexture(mailboxes[texture_idx],
- texture_refs_[texture_idx]->texture());
+ textures_[texture_idx]->GetTextureBase());
}
// Create the stream for zero-copy use by gl.
@@ -184,11 +184,9 @@ bool D3D11PictureBuffer::GpuResources::Init(
gl_image_dxgi->SetTexture(angle_texture, level);
// Bind the image to each texture.
- for (size_t texture_idx = 0; texture_idx < texture_refs_.size();
- texture_idx++) {
- texture_manager->SetLevelImage(texture_refs_[texture_idx].get(),
- GL_TEXTURE_EXTERNAL_OES, 0, gl_image.get(),
- gpu::gles2::Texture::ImageState::BOUND);
+ for (size_t texture_idx = 0; texture_idx < textures_.size(); texture_idx++) {
+ textures_[texture_idx]->BindImage(gl_image.get(),
+ false /* client_managed */);
}
return true;
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.h b/chromium/media/gpu/windows/d3d11_picture_buffer.h
index 20ad12ea286..b8c3b1c9d56 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.h
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.h
@@ -109,7 +109,7 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
Microsoft::WRL::ComPtr<ID3D11Texture2D> angle_texture,
int textures_per_picture);
- std::vector<scoped_refptr<gpu::gles2::TextureRef>> texture_refs_;
+ std::vector<std::unique_ptr<gpu::gles2::AbstractTexture>> textures_;
private:
DISALLOW_COPY_AND_ASSIGN(GpuResources);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index ab4a0512f8e..3d051cc9e2a 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -4,8 +4,11 @@
#include "media/gpu/windows/d3d11_video_decoder.h"
+#include <d3d11_4.h>
+
#include "base/bind.h"
#include "base/callback.h"
+#include "base/metrics/histogram_macros.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_codecs.h"
@@ -40,7 +43,6 @@ base::Callback<void(Args...)> BindToCurrentThreadIfWeakPtr(
return media::BindToCurrentLoop(
base::Bind(&CallbackOnProperThread<T, Args...>, weak_ptr, cb));
}
-
} // namespace
namespace media {
@@ -70,6 +72,7 @@ D3D11VideoDecoder::D3D11VideoDecoder(
impl_task_runner_(std::move(gpu_task_runner)),
gpu_preferences_(gpu_preferences),
gpu_workarounds_(gpu_workarounds),
+ create_device_func_(base::BindRepeating(D3D11CreateDevice)),
weak_factory_(this) {
impl_weak_ = impl_->GetWeakPtr();
}
@@ -78,7 +81,10 @@ D3D11VideoDecoder::~D3D11VideoDecoder() {
// Post destruction to the main thread. When this executes, it will also
// cancel pending callbacks into |impl_| via |impl_weak_|. Callbacks out
// from |impl_| will be cancelled by |weak_factory_| when we return.
- impl_task_runner_->DeleteSoon(FROM_HERE, std::move(impl_));
+ if (impl_task_runner_->RunsTasksInCurrentSequence())
+ impl_.reset();
+ else
+ impl_task_runner_->DeleteSoon(FROM_HERE, std::move(impl_));
}
std::string D3D11VideoDecoder::GetDisplayName() const {
@@ -98,8 +104,14 @@ void D3D11VideoDecoder::Initialize(
return;
}
- // Bind our own init / output cb that hop to this thread, so we don't call the
- // originals on some other thread.
+ if (impl_task_runner_->RunsTasksInCurrentSequence()) {
+ impl_->Initialize(config, low_delay, cdm_context, init_cb, output_cb,
+ waiting_for_decryption_key_cb);
+ return;
+ }
+
+ // Bind our own init / output cb that hop to this thread, so we don't call
+ // the originals on some other thread.
// Important but subtle note: base::Bind will copy |config_| since it's a
// const ref.
// TODO(liberato): what's the lifetime of |cdm_context|?
@@ -115,6 +127,11 @@ void D3D11VideoDecoder::Initialize(
void D3D11VideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
const DecodeCB& decode_cb) {
+ if (impl_task_runner_->RunsTasksInCurrentSequence()) {
+ impl_->Decode(std::move(buffer), decode_cb);
+ return;
+ }
+
impl_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
@@ -123,6 +140,11 @@ void D3D11VideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
}
void D3D11VideoDecoder::Reset(const base::Closure& closure) {
+ if (impl_task_runner_->RunsTasksInCurrentSequence()) {
+ impl_->Reset(closure);
+ return;
+ }
+
impl_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VideoDecoder::Reset, impl_weak_,
BindToCurrentThreadIfWeakPtr(
@@ -144,31 +166,60 @@ int D3D11VideoDecoder::GetMaxDecodeRequests() const {
return impl_->GetMaxDecodeRequests();
}
+void D3D11VideoDecoder::SetCreateDeviceCallbackForTesting(
+ D3D11CreateDeviceCB callback) {
+ create_device_func_ = std::move(callback);
+}
+
+void D3D11VideoDecoder::SetWasSupportedReason(
+ D3D11VideoNotSupportedReason enum_value) {
+ UMA_HISTOGRAM_ENUMERATION("Media.D3D11.WasVideoSupported", enum_value);
+}
+
bool D3D11VideoDecoder::IsPotentiallySupported(
const VideoDecoderConfig& config) {
// TODO(liberato): All of this could be moved into MojoVideoDecoder, so that
// it could run on the client side and save the IPC hop.
+ // Make sure that we support at least 11.1.
+ D3D_FEATURE_LEVEL levels[] = {
+ D3D_FEATURE_LEVEL_11_1,
+ };
+ HRESULT hr = create_device_func_.Run(
+ nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, 0, levels, ARRAYSIZE(levels),
+ D3D11_SDK_VERSION, nullptr, nullptr, nullptr);
+
+ if (FAILED(hr)) {
+ SetWasSupportedReason(
+ D3D11VideoNotSupportedReason::kInsufficientD3D11FeatureLevel);
+ DVLOG(2) << "Insufficient D3D11 feature level";
+ return false;
+ }
+
// Must be H264.
+ // TODO(tmathmeyer): vp9 should be supported.
const bool is_h264 = config.profile() >= H264PROFILE_MIN &&
config.profile() <= H264PROFILE_MAX;
- if (!is_h264) {
+ if (is_h264) {
+ if (config.profile() == H264PROFILE_HIGH10PROFILE) {
+ // Must use NV12, which excludes HDR.
+ SetWasSupportedReason(D3D11VideoNotSupportedReason::kProfileNotSupported);
+ DVLOG(2) << "High 10 profile is not supported.";
+ return false;
+ }
+ } else {
DVLOG(2) << "Profile is not H264.";
return false;
}
- // Must use NV12, which excludes HDR.
- if (config.profile() == H264PROFILE_HIGH10PROFILE) {
- DVLOG(2) << "High 10 profile is not supported.";
- return false;
- }
-
// TODO(liberato): dxva checks IsHDR() in the target colorspace, but we don't
// have the target colorspace. It's commented as being for vpx, though, so
// we skip it here for now.
// Must use the validating decoder.
+ // TODO(tmathmeyer): support passthrough decoder. No logging to UMA since
+ // this condition should go away soon.
if (gpu_preferences_.use_passthrough_cmd_decoder) {
DVLOG(2) << "Must use validating decoder.";
return false;
@@ -176,15 +227,18 @@ bool D3D11VideoDecoder::IsPotentiallySupported(
// Must allow zero-copy of nv12 textures.
if (!gpu_preferences_.enable_zero_copy_dxgi_video) {
+ SetWasSupportedReason(D3D11VideoNotSupportedReason::kZeroCopyNv12Required);
DVLOG(2) << "Must allow zero-copy NV12.";
return false;
}
if (gpu_workarounds_.disable_dxgi_zero_copy_video) {
+ SetWasSupportedReason(D3D11VideoNotSupportedReason::kZeroCopyVideoRequired);
DVLOG(2) << "Must allow zero-copy video.";
return false;
}
+ SetWasSupportedReason(D3D11VideoNotSupportedReason::kVideoIsSupported);
return true;
}
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index 8f37514dbc9..a8c79a7fab6 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -12,11 +12,12 @@
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_preferences.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/base/video_decoder.h"
#include "media/gpu/media_gpu_export.h"
+#include "media/gpu/windows/d3d11_create_device_cb.h"
namespace media {
@@ -57,6 +58,9 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
// init without bothering with a thread hop.
bool IsPotentiallySupported(const VideoDecoderConfig& config);
+ // Override how we create D3D11 devices, to inject mocks.
+ void SetCreateDeviceCallbackForTesting(D3D11CreateDeviceCB callback);
+
protected:
// Owners should call Destroy(). This is automatic via
// std::default_delete<media::VideoDecoder> when held by a
@@ -71,6 +75,29 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
std::unique_ptr<D3D11VideoDecoderImpl> impl);
+ enum class D3D11VideoNotSupportedReason {
+ kVideoIsSupported = 0,
+
+ // D3D11 version 11.1 required.
+ kInsufficientD3D11FeatureLevel = 1,
+
+ // The video profile for a supported codec is not supported.
+ kProfileNotSupported = 2,
+
+ // GPU options: require zero copy.
+ kZeroCopyNv12Required = 3,
+
+ // GPU options: require zero copy.
+ kZeroCopyVideoRequired = 4,
+
+ // For UMA. Must be the last entry. It should be initialized to the
+ // numerically largest value above; if you add more entries, then please
+ // update this to the last one.
+ kMaxValue = kZeroCopyVideoRequired
+ };
+
+ void SetWasSupportedReason(D3D11VideoNotSupportedReason enum_value);
+
// The implementation, which we trampoline to the impl thread.
// This must be freed on the impl thread.
std::unique_ptr<D3D11VideoDecoderImpl> impl_;
@@ -84,6 +111,8 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
gpu::GpuPreferences gpu_preferences_;
gpu::GpuDriverBugWorkarounds gpu_workarounds_;
+ D3D11CreateDeviceCB create_device_func_;
+
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
index 9597ab64254..459ffba0483 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
@@ -73,19 +73,19 @@ void D3D11VideoDecoderImpl::Initialize(
// could use our own device, and run on the mojo thread, but texture sharing
// seems to be difficult.
device_ = gl::QueryD3D11DeviceObjectFromANGLE();
- device_->GetImmediateContext(device_context_.GetAddressOf());
+ device_->GetImmediateContext(device_context_.ReleaseAndGetAddressOf());
HRESULT hr;
// TODO(liberato): Handle cleanup better. Also consider being less chatty in
// the logs, since this will fall back.
- hr = device_context_.CopyTo(video_context_.GetAddressOf());
+ hr = device_context_.CopyTo(video_context_.ReleaseAndGetAddressOf());
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get device context");
return;
}
- hr = device_.CopyTo(video_device_.GetAddressOf());
+ hr = device_.CopyTo(video_device_.ReleaseAndGetAddressOf());
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get video device");
return;
@@ -127,9 +127,8 @@ void D3D11VideoDecoderImpl::Initialize(
D3D11_VIDEO_DECODER_DESC desc = {};
desc.Guid = decoder_guid;
- // TODO(liberato): where do these numbers come from?
- desc.SampleWidth = 1920;
- desc.SampleHeight = 1088;
+ desc.SampleWidth = config.coded_size().width();
+ desc.SampleHeight = config.coded_size().height();
desc.OutputFormat = DXGI_FORMAT_NV12;
UINT config_count = 0;
hr = video_device_->GetVideoDecoderConfigCount(&desc, &config_count);
@@ -162,16 +161,23 @@ void D3D11VideoDecoderImpl::Initialize(
memcpy(&decoder_guid_, &decoder_guid, sizeof decoder_guid_);
Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder;
- hr = video_device_->CreateVideoDecoder(&desc, &dec_config,
- video_decoder.GetAddressOf());
+ hr = video_device_->CreateVideoDecoder(
+ &desc, &dec_config, video_decoder.ReleaseAndGetAddressOf());
if (!video_decoder.Get()) {
NotifyError("Failed to create a video decoder");
return;
}
- accelerated_video_decoder_ =
- std::make_unique<H264Decoder>(std::make_unique<D3D11H264Accelerator>(
- this, video_decoder, video_device_, video_context_));
+ CdmProxyContext* proxy_context = nullptr;
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+ if (cdm_context)
+ proxy_context = cdm_context->GetCdmProxyContext();
+#endif
+
+ accelerated_video_decoder_ = std::make_unique<H264Decoder>(
+ std::make_unique<D3D11H264Accelerator>(this, proxy_context, video_decoder,
+ video_device_, video_context_),
+ config.color_space_info());
// |cdm_context| could be null for clear playback.
if (cdm_context) {
@@ -254,7 +260,7 @@ void D3D11VideoDecoderImpl::DoDecode() {
CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kAllocateNewSurfaces) {
CreatePictureBuffers();
- } else if (result == media::AcceleratedVideoDecoder::kNoKey) {
+ } else if (result == media::AcceleratedVideoDecoder::kTryAgain) {
state_ = State::kWaitingForNewKey;
// Note that another DoDecode() task would be posted in NotifyNewKey().
return;
@@ -322,7 +328,7 @@ void D3D11VideoDecoderImpl::CreatePictureBuffers() {
Microsoft::WRL::ComPtr<ID3D11Texture2D> out_texture;
HRESULT hr = device_->CreateTexture2D(&texture_desc, nullptr,
- out_texture.GetAddressOf());
+ out_texture.ReleaseAndGetAddressOf());
if (!SUCCEEDED(hr)) {
NotifyError("Failed to create a Texture2D for PictureBuffers");
return;
@@ -357,7 +363,9 @@ D3D11PictureBuffer* D3D11VideoDecoderImpl::GetPicture() {
return nullptr;
}
-void D3D11VideoDecoderImpl::OutputResult(D3D11PictureBuffer* buffer) {
+void D3D11VideoDecoderImpl::OutputResult(
+ D3D11PictureBuffer* buffer,
+ const VideoColorSpace& buffer_colorspace) {
buffer->set_in_client_use(true);
// Note: The pixel format doesn't matter.
@@ -367,7 +375,7 @@ void D3D11VideoDecoderImpl::OutputResult(D3D11PictureBuffer* buffer) {
// https://crbug.com/837337
double pixel_aspect_ratio = 1.0;
base::TimeDelta timestamp = buffer->timestamp_;
- auto frame = VideoFrame::WrapNativeTextures(
+ scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTextures(
PIXEL_FORMAT_NV12, buffer->mailbox_holders(),
VideoFrame::ReleaseMailboxCB(), visible_rect.size(), visible_rect,
GetNaturalSize(visible_rect, pixel_aspect_ratio), timestamp);
@@ -381,8 +389,12 @@ void D3D11VideoDecoderImpl::OutputResult(D3D11PictureBuffer* buffer) {
// that ALLOW_OVERLAY is required for encrypted video path.
frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
- if (is_encrypted_)
+ if (is_encrypted_) {
frame->metadata()->SetBoolean(VideoFrameMetadata::PROTECTED_VIDEO, true);
+ frame->metadata()->SetBoolean(VideoFrameMetadata::REQUIRE_OVERLAY, true);
+ }
+
+ frame->set_color_space(buffer_colorspace.ToGfxColorSpace());
output_cb_.Run(frame);
}
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
index e7e34de38d8..a62c8fd63ef 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
@@ -5,7 +5,7 @@
#ifndef MEDIA_GPU_D3D11_VIDEO_DECODER_IMPL_H_
#define MEDIA_GPU_D3D11_VIDEO_DECODER_IMPL_H_
-#include <d3d11.h>
+#include <d3d11_1.h>
#include <wrl/client.h>
#include <list>
@@ -19,6 +19,7 @@
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/base/callback_registry.h"
#include "media/base/video_decoder.h"
+#include "media/base/video_decoder_config.h"
#include "media/gpu/gles2_decoder_helper.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/windows/d3d11_h264_accelerator.h"
@@ -51,7 +52,8 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
// D3D11VideoDecoderClient implementation.
D3D11PictureBuffer* GetPicture() override;
- void OutputResult(D3D11PictureBuffer* buffer) override;
+ void OutputResult(D3D11PictureBuffer* buffer,
+ const VideoColorSpace& buffer_colorspace) override;
// Return a weak ptr, since D3D11VideoDecoder constructs callbacks for us.
base::WeakPtr<D3D11VideoDecoderImpl> GetWeakPtr();
@@ -93,7 +95,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
Microsoft::WRL::ComPtr<ID3D11Device> device_;
Microsoft::WRL::ComPtr<ID3D11DeviceContext> device_context_;
Microsoft::WRL::ComPtr<ID3D11VideoDevice> video_device_;
- Microsoft::WRL::ComPtr<ID3D11VideoContext> video_context_;
+ Microsoft::WRL::ComPtr<ID3D11VideoContext1> video_context_;
std::unique_ptr<AcceleratedVideoDecoder> accelerated_video_decoder_;
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index dc0f2f44796..86dd1e5b3dd 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -21,6 +21,10 @@
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
+using ::testing::DoAll;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SaveArg;
namespace media {
@@ -63,13 +67,25 @@ class D3D11VideoDecoderTest : public ::testing::Test {
void CreateDecoder() {
std::unique_ptr<MockD3D11VideoDecoderImpl> impl =
- std::make_unique<MockD3D11VideoDecoderImpl>();
+ std::make_unique<NiceMock<MockD3D11VideoDecoderImpl>>();
impl_ = impl.get();
gpu_task_runner_ = base::ThreadTaskRunnerHandle::Get();
- decoder_ = base::WrapUnique<VideoDecoder>(new D3D11VideoDecoder(
- gpu_task_runner_, gpu_preferences_, gpu_workarounds_, std::move(impl)));
+ // We store it in a std::unique_ptr<VideoDecoder> so that the default
+ // deleter works. The dtor is protected.
+ decoder_ = base::WrapUnique<VideoDecoder>(
+ d3d11_decoder_raw_ =
+ new D3D11VideoDecoder(gpu_task_runner_, gpu_preferences_,
+ gpu_workarounds_, std::move(impl)));
+ d3d11_decoder_raw_->SetCreateDeviceCallbackForTesting(
+ base::BindRepeating(&D3D11CreateDeviceMock::Create,
+ base::Unretained(&create_device_mock_)));
+
+ // Configure CreateDevice to succeed by default.
+ ON_CALL(create_device_mock_,
+ Create(_, D3D_DRIVER_TYPE_HARDWARE, _, _, _, _, _, _, _, _))
+ .WillByDefault(Return(S_OK));
}
enum InitExpectation {
@@ -88,7 +104,6 @@ class D3D11VideoDecoderTest : public ::testing::Test {
} else {
EXPECT_CALL(*this, MockInitCB(false));
}
-
decoder_->Initialize(config, low_delay, cdm_context,
base::BindRepeating(&D3D11VideoDecoderTest::MockInitCB,
base::Unretained(this)),
@@ -101,14 +116,48 @@ class D3D11VideoDecoderTest : public ::testing::Test {
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
std::unique_ptr<VideoDecoder> decoder_;
+ D3D11VideoDecoder* d3d11_decoder_raw_ = nullptr;
gpu::GpuPreferences gpu_preferences_;
gpu::GpuDriverBugWorkarounds gpu_workarounds_;
MockD3D11VideoDecoderImpl* impl_ = nullptr;
VideoDecoderConfig supported_config_;
+ D3D11CreateDeviceMock create_device_mock_;
MOCK_METHOD1(MockInitCB, void(bool));
};
+TEST_F(D3D11VideoDecoderTest, RequiresD3D11_1) {
+ D3D_FEATURE_LEVEL feature_levels[100];
+ int num_levels = 0;
+
+ CreateDecoder();
+
+ // Fail to create the D3D11 device, but record the results.
+ D3D11CreateDeviceCB create_device_cb = base::BindRepeating(
+ [](D3D_FEATURE_LEVEL* feature_levels_out, int* num_levels_out,
+ IDXGIAdapter*, D3D_DRIVER_TYPE, HMODULE, UINT,
+ const D3D_FEATURE_LEVEL* feature_levels, UINT num_levels, UINT,
+ ID3D11Device**, D3D_FEATURE_LEVEL*, ID3D11DeviceContext**) -> HRESULT {
+ memcpy(feature_levels_out, feature_levels,
+ num_levels * sizeof(feature_levels_out[0]));
+ *num_levels_out = num_levels;
+ return E_NOTIMPL;
+ },
+ feature_levels, &num_levels);
+ d3d11_decoder_raw_->SetCreateDeviceCallbackForTesting(
+ std::move(create_device_cb));
+ InitializeDecoder(supported_config_, kExpectFailure);
+
+ // Verify that it requests exactly 11.1, and nothing earlier.
+ // Later is okay.
+ bool min_is_d3d11_1 = false;
+ for (int i = 0; i < num_levels; i++) {
+ min_is_d3d11_1 |= feature_levels[i] == D3D_FEATURE_LEVEL_11_1;
+ ASSERT_TRUE(feature_levels[i] >= D3D_FEATURE_LEVEL_11_1);
+ }
+ ASSERT_TRUE(min_is_d3d11_1);
+}
+
TEST_F(D3D11VideoDecoderTest, SupportsH264) {
CreateDecoder();
// Make sure that we're testing H264.
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
index 0a3075e49d8..6ee71c3ccbc 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
@@ -50,7 +50,7 @@ class DummyGLImage : public gl::GLImage {
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
bool enable_blend,
- gfx::GpuFence* gpu_fence) override {
+ std::unique_ptr<gfx::GpuFence> gpu_fence) override {
return false;
}
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
@@ -291,7 +291,7 @@ bool PbufferPictureBuffer::InitializeTexture(
void PbufferPictureBuffer::ResetReuseFence() {
DCHECK_EQ(IN_CLIENT, state_);
if (!reuse_fence_ || !reuse_fence_->ResetSupported())
- reuse_fence_.reset(gl::GLFence::Create());
+ reuse_fence_ = gl::GLFence::Create();
else
reuse_fence_->ResetState();
state_ = WAITING_TO_REUSE;
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index 93e65ae651f..6488310acd0 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -44,8 +44,8 @@
#include "base/win/scoped_co_mem.h"
#include "base/win/windows_version.h"
#include "build/build_config.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/win/mf_helpers.h"
@@ -726,7 +726,6 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
support_copy_nv12_textures_(gpu_preferences.enable_nv12_dxgi_video &&
!workarounds.disable_nv12_dxgi_video),
support_delayed_copy_nv12_textures_(
- !gpu_preferences.use_passthrough_cmd_decoder &&
base::FeatureList::IsEnabled(kDelayCopyNV12Textures) &&
!workarounds.disable_delayed_copy_nv12),
use_dx11_(false),
@@ -893,9 +892,6 @@ bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
if (d3d9_.Get())
return true;
- if (media_log_)
- MEDIA_LOG(INFO, media_log_) << __func__ << ": Creating D3D9 device.";
-
HRESULT hr = E_FAIL;
hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.GetAddressOf());
@@ -1044,9 +1040,6 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
if (D3D11Device())
return true;
- if (media_log_)
- MEDIA_LOG(INFO, media_log_) << __func__ << ": Creating D3D11 device.";
-
HRESULT hr = create_dxgi_device_manager_(
&dx11_dev_manager_reset_token_, d3d11_device_manager_.GetAddressOf());
RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false);
@@ -1647,12 +1640,16 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
ULONG_PTR device_manager_to_use = NULL;
if (use_dx11_) {
CHECK(create_dxgi_device_manager_);
+ if (media_log_)
+ MEDIA_LOG(INFO, media_log_) << "Using D3D11 device for DXVA";
RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(),
"Failed to initialize DX11 device and manager",
PLATFORM_FAILURE, false);
device_manager_to_use =
reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.Get());
} else {
+ if (media_log_)
+ MEDIA_LOG(INFO, media_log_) << "Using D3D9 device for DXVA";
RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
"Failed to initialize D3D device and manager",
PLATFORM_FAILURE, false);
@@ -1765,8 +1762,6 @@ bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
UINT32 dx11_aware = 0;
attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware);
use_dx11_ = !!dx11_aware;
- if (media_log_)
- MEDIA_LOG(INFO, media_log_) << __func__ << ": Using DX11? " << use_dx11_;
}
use_keyed_mutex_ =
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index c5998d033a3..4aa00686433 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -30,7 +30,7 @@
#include "base/memory/weak_ptr.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/video_color_space.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/media_gpu_export.h"
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
index b60a843898e..302bd051f63 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
@@ -671,8 +671,9 @@ void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
}
main_client_task_runner_->PostTask(
- FROM_HERE, base::Bind(&Client::BitstreamBufferReady, main_client_,
- buffer_ref->id, size, keyframe, timestamp));
+ FROM_HERE,
+ base::Bind(&Client::BitstreamBufferReady, main_client_, buffer_ref->id,
+ BitstreamBufferMetadata(size, keyframe, timestamp)));
// Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT
// is returned to flush out all the output.
@@ -708,8 +709,9 @@ void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer(
main_client_task_runner_->PostTask(
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, main_client_, buffer_ref->id,
- encode_output->size(), encode_output->keyframe,
- encode_output->capture_timestamp));
+ BitstreamBufferMetadata(encode_output->size(),
+ encode_output->keyframe,
+ encode_output->capture_timestamp)));
}
void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index 0279aa5c0e5..d8e20f62c19 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -58,7 +58,7 @@ declare_args() {
enable_mpeg_h_audio_demuxing = proprietary_codecs && is_chromecast
enable_mse_mpeg2ts_stream_parser =
- (proprietary_codecs && is_chromecast) || use_fuzzing_engine
+ proprietary_codecs && (is_chromecast || use_fuzzing_engine)
# Enable parsing for the 'cbcs' encryption scheme added by MPEG Common
# Encryption 3rd Edition (ISO/IEC 23001-7), published 02/15/2016.
@@ -170,6 +170,45 @@ assert(
assert(!enable_cdm_host_verification || is_mac || is_win,
"CDM host verification is only supported on Mac and Windows.")
+_default_mojo_media_services = []
+_default_mojo_media_host = "none"
+
+# Default mojo_media_services and mojo_media_host on various platforms.
+# Can be overridden by gn build arguments from the --args command line flag
+# for local testing.
+if (enable_mojo_media) {
+ if (is_chromecast && is_cast_using_cma_backend && !is_android) {
+ _default_mojo_media_services = [
+ "cdm",
+ "renderer",
+ ]
+ _default_mojo_media_host = "browser"
+ } else if (is_android) {
+ # Both chrome for Android and cast for Android belongs to this case
+ _default_mojo_media_services = [
+ "cdm",
+ "audio_decoder",
+ "video_decoder",
+ ]
+ _default_mojo_media_host = "gpu"
+ } else if (is_mac || is_win) {
+ _default_mojo_media_services = [ "video_decoder" ]
+ _default_mojo_media_host = "gpu"
+ }
+
+ if (enable_library_cdms) {
+ _default_mojo_media_services += [ "cdm" ]
+
+ # Having a CDM running means it could require a CdmProxy running in the GPU
+ # process.
+ assert(
+ _default_mojo_media_host == "none" || _default_mojo_media_host == "gpu",
+ "For now, mojo_media_host should not overwrite it with a different " +
+ "value if it has been set.")
+ _default_mojo_media_host = "gpu"
+ }
+}
+
# Use another declare_args() to pick up possible overrides of enable_mojo_media
# from --args command line flags. See "gn help declare_args".
declare_args() {
@@ -182,7 +221,7 @@ declare_args() {
# Renderer. Cannot be used with the mojo Renderer above.
# - "video_decoder": Use mojo-based video decoder in the default media
# Renderer. Cannot be used with the mojo Renderer above.
- mojo_media_services = []
+ mojo_media_services = _default_mojo_media_services
# The process that the mojo MediaService runs in. By default, all services
# registered in |mojo_media_services| are hosted in the MediaService, with the
@@ -195,43 +234,7 @@ declare_args() {
# - "browser": Use mojo media service hosted in the browser process.
# - "gpu": Use mojo media service hosted in the gpu process.
# - "utility": Use mojo media service hosted in the utility process.
- mojo_media_host = "none"
-}
-
-# Default mojo_media_services and mojo_media_host on various platforms.
-# Can be overridden by gn build arguments from the --args command line flag
-# for local testing.
-if (enable_mojo_media) {
- if (is_chromecast && is_cast_using_cma_backend) {
- mojo_media_services = [
- "cdm",
- "renderer",
- ]
- mojo_media_host = "browser"
- } else if (is_android) {
- # Both chrome for Android and cast for ATV belongs to this case
- mojo_media_services = [
- "cdm",
- "audio_decoder",
- "video_decoder",
- ]
- mojo_media_host = "gpu"
- } else if (is_mac || is_win) {
- mojo_media_services += [ "video_decoder" ]
- mojo_media_host = "gpu"
- }
-
- if (enable_library_cdms) {
- mojo_media_services += [ "cdm" ]
- assert(
- mojo_media_host == "none" || mojo_media_host == "gpu",
- "For now, mojo_media_host should not overwrite it with a different " +
- "value if it has been set.")
-
- # Having a CDM running means it could require a CdmProxy running in the GPU
- # process.
- mojo_media_host = "gpu"
- }
+ mojo_media_host = _default_mojo_media_host
}
declare_args() {
diff --git a/chromium/media/midi/midi_manager_alsa.cc b/chromium/media/midi/midi_manager_alsa.cc
index 9d322cb67e5..2d9e42aa9e2 100644
--- a/chromium/media/midi/midi_manager_alsa.cc
+++ b/chromium/media/midi/midi_manager_alsa.cc
@@ -271,11 +271,11 @@ void MidiManagerAlsa::StartInitialization() {
// initialize these earlier, since they need to be destroyed by the
// thread that calls Finalize(), not the destructor thread (and we
// check this in the destructor).
- in_client_.reset(in_client.release());
- out_client_.reset(out_client.release());
- decoder_.reset(decoder.release());
- udev_.reset(udev.release());
- udev_monitor_.reset(udev_monitor.release());
+ in_client_ = std::move(in_client);
+ out_client_ = std::move(out_client);
+ decoder_ = std::move(decoder);
+ udev_ = std::move(udev);
+ udev_monitor_ = std::move(udev_monitor);
// Generate hotplug events for existing ports.
// TODO(agoode): Check the return value for failure.
diff --git a/chromium/media/mojo/BUILD.gn b/chromium/media/mojo/BUILD.gn
index 2fc424668eb..21dc54f037f 100644
--- a/chromium/media/mojo/BUILD.gn
+++ b/chromium/media/mojo/BUILD.gn
@@ -91,6 +91,6 @@ source_set("unit_tests") {
test("media_mojo_unittests") {
deps = [
":unit_tests",
- "//mojo/edk/test:run_all_unittests",
+ "//mojo/core/test:run_all_unittests",
]
}
diff --git a/chromium/media/mojo/DEPS b/chromium/media/mojo/DEPS
index 7216ea6714a..496a6c52e40 100644
--- a/chromium/media/mojo/DEPS
+++ b/chromium/media/mojo/DEPS
@@ -5,7 +5,7 @@ include_rules = [
"+mojo/public",
# For changing the bad message handler for tests.
- "+mojo/edk/embedder/embedder.h",
+ "+mojo/core/embedder/embedder.h",
# TODO(xhwang): Ideally media should not worry about sandbox. Find a way to
# remove this dependency.
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index 5100e5c976a..90b391acdd2 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -79,7 +79,10 @@ source_set("clients") {
}
source_set("jpeg_decode_accelerator") {
- visibility = [ "//content/browser" ]
+ visibility = [
+ "//content/browser",
+ "//media/capture:capture_lib",
+ ]
sources = [
"mojo_jpeg_decode_accelerator.cc",
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.cc b/chromium/media/mojo/clients/mojo_audio_decoder.cc
index 88aca0371d6..ef895111529 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.cc
@@ -38,6 +38,10 @@ std::string MojoAudioDecoder::GetDisplayName() const {
return "MojoAudioDecoder";
}
+bool MojoAudioDecoder::IsPlatformDecoder() const {
+ return true;
+}
+
void MojoAudioDecoder::Initialize(
const AudioDecoderConfig& config,
CdmContext* cdm_context,
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.h b/chromium/media/mojo/clients/mojo_audio_decoder.h
index a134ca0e0a1..e3f5b1e1e21 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.h
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.h
@@ -32,6 +32,7 @@ class MojoAudioDecoder : public AudioDecoder, public mojom::AudioDecoderClient {
// AudioDecoder implementation.
std::string GetDisplayName() const final;
+ bool IsPlatformDecoder() const final;
void Initialize(
const AudioDecoderConfig& config,
CdmContext* cdm_context,
diff --git a/chromium/media/mojo/clients/mojo_cdm.cc b/chromium/media/mojo/clients/mojo_cdm.cc
index d5f8e4c33a9..6e4f0c76013 100644
--- a/chromium/media/mojo/clients/mojo_cdm.cc
+++ b/chromium/media/mojo/clients/mojo_cdm.cc
@@ -20,6 +20,7 @@
#include "media/mojo/clients/mojo_decryptor.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/interfaces/decryptor.mojom.h"
+#include "media/mojo/interfaces/interface_factory.mojom.h"
#include "services/service_manager/public/cpp/connect.h"
#include "services/service_manager/public/mojom/interface_provider.mojom.h"
#include "url/origin.h"
@@ -41,14 +42,15 @@ void MojoCdm::Create(
const url::Origin& security_origin,
const CdmConfig& cdm_config,
mojom::ContentDecryptionModulePtr remote_cdm,
+ mojom::InterfaceFactory* interface_factory,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
const CdmCreatedCB& cdm_created_cb) {
- scoped_refptr<MojoCdm> mojo_cdm(
- new MojoCdm(std::move(remote_cdm), session_message_cb, session_closed_cb,
- session_keys_change_cb, session_expiration_update_cb));
+ scoped_refptr<MojoCdm> mojo_cdm(new MojoCdm(
+ std::move(remote_cdm), interface_factory, session_message_cb,
+ session_closed_cb, session_keys_change_cb, session_expiration_update_cb));
// |mojo_cdm| ownership is passed to the promise.
std::unique_ptr<CdmInitializedPromise> promise(
@@ -59,11 +61,13 @@ void MojoCdm::Create(
}
MojoCdm::MojoCdm(mojom::ContentDecryptionModulePtr remote_cdm,
+ mojom::InterfaceFactory* interface_factory,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb)
: remote_cdm_(std::move(remote_cdm)),
+ interface_factory_(interface_factory),
client_binding_(this),
task_runner_(base::ThreadTaskRunnerHandle::Get()),
cdm_id_(CdmContext::kInvalidCdmId),
@@ -303,14 +307,27 @@ Decryptor* MojoCdm::GetDecryptor() {
decryptor_task_runner_ = base::ThreadTaskRunnerHandle::Get();
DCHECK(decryptor_task_runner_->BelongsToCurrentThread());
+ if (decryptor_)
+ return decryptor_.get();
+
+ mojom::DecryptorPtr decryptor_ptr;
+
// Can be called on a different thread.
if (decryptor_ptr_info_.is_valid()) {
- DCHECK(!decryptor_);
- mojom::DecryptorPtr decryptor_ptr;
+ DVLOG(1) << __func__ << ": Using Decryptor exposed by the CDM directly";
decryptor_ptr.Bind(std::move(decryptor_ptr_info_));
- decryptor_.reset(new MojoDecryptor(std::move(decryptor_ptr)));
+ } else if (interface_factory_ && cdm_id_ != CdmContext::kInvalidCdmId) {
+ // TODO(xhwang): Pass back info on whether Decryptor is supported by the
+ // remote CDM.
+ DVLOG(1) << __func__ << ": Using Decryptor associated with CDM ID "
+ << cdm_id_ << ", typically hosted by CdmProxy in MediaService";
+ interface_factory_->CreateDecryptor(cdm_id_,
+ mojo::MakeRequest(&decryptor_ptr));
}
+ if (decryptor_ptr)
+ decryptor_.reset(new MojoDecryptor(std::move(decryptor_ptr)));
+
return decryptor_.get();
}
diff --git a/chromium/media/mojo/clients/mojo_cdm.h b/chromium/media/mojo/clients/mojo_cdm.h
index c883ec0dec0..21ffd39ad0e 100644
--- a/chromium/media/mojo/clients/mojo_cdm.h
+++ b/chromium/media/mojo/clients/mojo_cdm.h
@@ -33,6 +33,10 @@ class Origin;
namespace media {
+namespace mojom {
+class InterfaceFactory;
+}
+
class MojoDecryptor;
// A ContentDecryptionModule that proxies to a mojom::ContentDecryptionModule.
@@ -49,6 +53,7 @@ class MojoCdm : public ContentDecryptionModule,
const url::Origin& security_origin,
const CdmConfig& cdm_config,
mojom::ContentDecryptionModulePtr remote_cdm,
+ mojom::InterfaceFactory* interface_factory,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
@@ -84,6 +89,7 @@ class MojoCdm : public ContentDecryptionModule,
private:
MojoCdm(mojom::ContentDecryptionModulePtr remote_cdm,
+ mojom::InterfaceFactory* interface_factory,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
@@ -132,6 +138,7 @@ class MojoCdm : public ContentDecryptionModule,
THREAD_CHECKER(thread_checker_);
mojom::ContentDecryptionModulePtr remote_cdm_;
+ mojom::InterfaceFactory* interface_factory_;
mojo::Binding<ContentDecryptionModuleClient> client_binding_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.cc b/chromium/media/mojo/clients/mojo_cdm_factory.cc
index 9811c946fb3..18fa3abf81b 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.cc
@@ -64,8 +64,9 @@ void MojoCdmFactory::Create(
interface_factory_->CreateCdm(key_system, mojo::MakeRequest(&cdm_ptr));
MojoCdm::Create(key_system, security_origin, cdm_config, std::move(cdm_ptr),
- session_message_cb, session_closed_cb, session_keys_change_cb,
- session_expiration_update_cb, cdm_created_cb);
+ interface_factory_, session_message_cb, session_closed_cb,
+ session_keys_change_cb, session_expiration_update_cb,
+ cdm_created_cb);
}
} // namespace media
diff --git a/chromium/media/mojo/clients/mojo_cdm_unittest.cc b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
index 3fc9bc586cd..36e7a20376f 100644
--- a/chromium/media/mojo/clients/mojo_cdm_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
@@ -102,7 +102,7 @@ class MojoCdmTest : public ::testing::Test {
}
MojoCdm::Create(key_system, url::Origin::Create(GURL(kTestSecurityOrigin)),
- CdmConfig(), std::move(remote_cdm),
+ CdmConfig(), std::move(remote_cdm), nullptr,
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionClosed,
@@ -161,7 +161,7 @@ class MojoCdmTest : public ::testing::Test {
ExpectedResult expected_result) {
// Specify parameters needed to call CreateSessionAndGenerateRequest() in
// order to verify that the data is passed properly.
- const CdmSessionType session_type = CdmSessionType::TEMPORARY_SESSION;
+ const CdmSessionType session_type = CdmSessionType::kTemporary;
const EmeInitDataType data_type = EmeInitDataType::WEBM;
const std::vector<uint8_t> key_id(kKeyId, kKeyId + arraysize(kKeyId));
std::string created_session_id;
@@ -198,8 +198,7 @@ class MojoCdmTest : public ::testing::Test {
void LoadSessionAndExpect(const std::string& session_id,
ExpectedResult expected_result) {
- const CdmSessionType session_type =
- CdmSessionType::PERSISTENT_LICENSE_SESSION;
+ const CdmSessionType session_type = CdmSessionType::kPersistentLicense;
std::string loaded_session_id;
if (expected_result == CONNECTION_ERROR_BEFORE) {
diff --git a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
index 88d3908eb98..3d1ff5fdb5a 100644
--- a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
@@ -17,13 +17,13 @@
namespace media {
MojoJpegDecodeAccelerator::MojoJpegDecodeAccelerator(
- scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SequencedTaskRunner> io_task_runner,
mojom::JpegDecodeAcceleratorPtrInfo jpeg_decoder)
: io_task_runner_(std::move(io_task_runner)),
jpeg_decoder_info_(std::move(jpeg_decoder)) {}
MojoJpegDecodeAccelerator::~MojoJpegDecodeAccelerator() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
}
bool MojoJpegDecodeAccelerator::Initialize(
@@ -34,7 +34,7 @@ bool MojoJpegDecodeAccelerator::Initialize(
void MojoJpegDecodeAccelerator::InitializeAsync(Client* client,
InitCB init_cb) {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
jpeg_decoder_.Bind(std::move(jpeg_decoder_info_));
@@ -50,7 +50,7 @@ void MojoJpegDecodeAccelerator::InitializeAsync(Client* client,
void MojoJpegDecodeAccelerator::Decode(
const BitstreamBuffer& bitstream_buffer,
const scoped_refptr<VideoFrame>& video_frame) {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
DCHECK(jpeg_decoder_.is_bound());
DCHECK(
@@ -86,7 +86,7 @@ void MojoJpegDecodeAccelerator::OnInitializeDone(
InitCB init_cb,
JpegDecodeAccelerator::Client* client,
bool success) {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
if (success)
client_ = client;
@@ -97,7 +97,7 @@ void MojoJpegDecodeAccelerator::OnInitializeDone(
void MojoJpegDecodeAccelerator::OnDecodeAck(
int32_t bitstream_buffer_id,
::media::JpegDecodeAccelerator::Error error) {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
if (!client_)
return;
@@ -116,7 +116,7 @@ void MojoJpegDecodeAccelerator::OnDecodeAck(
}
void MojoJpegDecodeAccelerator::OnLostConnectionToJpegDecoder() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->RunsTasksInCurrentSequence());
OnDecodeAck(kInvalidBitstreamBufferId,
::media::JpegDecodeAccelerator::Error::PLATFORM_FAILURE);
}
diff --git a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.h b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.h
index 6b67a9078c8..6d8e78b9b51 100644
--- a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.h
+++ b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.h
@@ -14,18 +14,18 @@
#include "media/video/jpeg_decode_accelerator.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
// A JpegDecodeAccelerator, for use in the browser process, that proxies to a
// mojom::JpegDecodeAccelerator. Created on the owner's thread, otherwise
-// operating and deleted on the IO thread.
+// operating and deleted on |io_task_runner|.
class MojoJpegDecodeAccelerator : public JpegDecodeAccelerator {
public:
MojoJpegDecodeAccelerator(
- scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SequencedTaskRunner> io_task_runner,
mojom::JpegDecodeAcceleratorPtrInfo jpeg_decoder);
~MojoJpegDecodeAccelerator() override;
@@ -46,8 +46,7 @@ class MojoJpegDecodeAccelerator : public JpegDecodeAccelerator {
::media::JpegDecodeAccelerator::Error error);
void OnLostConnectionToJpegDecoder();
- // Browser IO task runner.
- scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> io_task_runner_;
Client* client_ = nullptr;
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index 7d84d38c81c..d01ecb420a4 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -127,6 +127,10 @@ MojoVideoDecoder::~MojoVideoDecoder() {
request_overlay_info_cb_.Run(false, ProvideOverlayInfoCB());
}
+bool MojoVideoDecoder::IsPlatformDecoder() const {
+ return true;
+}
+
std::string MojoVideoDecoder::GetDisplayName() const {
return "MojoVideoDecoder";
}
@@ -369,11 +373,22 @@ void MojoVideoDecoder::Stop() {
has_connection_error_ = true;
+ // |init_cb_| is likely to reentrantly destruct |this|, so we check for that
+ // using an on-stack WeakPtr.
+ // TODO(sandersd): Update the VideoDecoder API to be explicit about what
+ // reentrancy is allowed, and therefore which callbacks must be posted.
+ base::WeakPtr<MojoVideoDecoder> weak_this = weak_this_;
+
if (!init_cb_.is_null())
base::ResetAndReturn(&init_cb_).Run(false);
+ if (!weak_this)
+ return;
- for (const auto& pending_decode : pending_decodes_)
+ for (const auto& pending_decode : pending_decodes_) {
pending_decode.second.Run(DecodeStatus::DECODE_ERROR);
+ if (!weak_this)
+ return;
+ }
pending_decodes_.clear();
if (!reset_cb_.is_null())
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index f3fe82ae196..aa6cea8ff35 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -44,6 +44,7 @@ class MojoVideoDecoder final : public VideoDecoder,
// VideoDecoder implementation.
std::string GetDisplayName() const final;
+ bool IsPlatformDecoder() const final;
void Initialize(
const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
index da480d132a7..c211bc7803f 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
@@ -34,10 +34,9 @@ class VideoEncodeAcceleratorClient
void RequireBitstreamBuffers(uint32_t input_count,
const gfx::Size& input_coded_size,
uint32_t output_buffer_size) override;
- void BitstreamBufferReady(int32_t bitstream_buffer_id,
- uint32_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) override;
+ void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) override;
void NotifyError(VideoEncodeAccelerator::Error error) override;
private:
@@ -67,14 +66,11 @@ void VideoEncodeAcceleratorClient::RequireBitstreamBuffers(
void VideoEncodeAcceleratorClient::BitstreamBufferReady(
int32_t bitstream_buffer_id,
- uint32_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) {
+ const media::BitstreamBufferMetadata& metadata) {
DVLOG(2) << __func__ << " bitstream_buffer_id=" << bitstream_buffer_id
- << ", payload_size=" << payload_size
- << "B, key_frame=" << key_frame;
- client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame,
- timestamp);
+ << ", payload_size=" << metadata.payload_size_bytes
+ << "B, key_frame=" << metadata.key_frame;
+ client_->BitstreamBufferReady(bitstream_buffer_id, metadata);
}
void VideoEncodeAcceleratorClient::NotifyError(
@@ -195,6 +191,19 @@ void MojoVideoEncodeAccelerator::RequestEncodingParametersChange(
DVLOG(2) << __func__;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(vea_.is_bound());
+
+ media::VideoBitrateAllocation bitrate_allocation;
+ bitrate_allocation.SetBitrate(0, 0, bitrate);
+ vea_->RequestEncodingParametersChange(bitrate_allocation, framerate);
+}
+
+void MojoVideoEncodeAccelerator::RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate,
+ uint32_t framerate) {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(vea_.is_bound());
+
vea_->RequestEncodingParametersChange(bitrate, framerate);
}
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.h b/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
index 0e6bcead241..c39d6cddd2c 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
@@ -48,6 +48,8 @@ class MojoVideoEncodeAccelerator : public VideoEncodeAccelerator {
void UseOutputBitstreamBuffer(const BitstreamBuffer& buffer) override;
void RequestEncodingParametersChange(uint32_t bitrate,
uint32_t framerate_num) override;
+ void RequestEncodingParametersChange(const VideoBitrateAllocation& bitrate,
+ uint32_t framerate) override;
void Destroy() override;
private:
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
index 201fd2fede1..6189263bdba 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
@@ -16,6 +16,7 @@
using ::testing::_;
using ::testing::InSequence;
+using ::testing::Invoke;
namespace media {
@@ -63,8 +64,9 @@ class MockMojoVideoEncodeAccelerator : public mojom::VideoEncodeAccelerator {
EncodeCallback callback) override {
EXPECT_NE(-1, configured_bitstream_buffer_id_);
EXPECT_TRUE(client_);
- client_->BitstreamBufferReady(configured_bitstream_buffer_id_, 0, keyframe,
- frame->timestamp());
+ client_->BitstreamBufferReady(
+ configured_bitstream_buffer_id_,
+ BitstreamBufferMetadata(0, keyframe, frame->timestamp()));
configured_bitstream_buffer_id_ = -1;
DoEncode(frame, keyframe);
@@ -83,7 +85,8 @@ class MockMojoVideoEncodeAccelerator : public mojom::VideoEncodeAccelerator {
MOCK_METHOD2(DoUseOutputBitstreamBuffer,
void(int32_t, mojo::ScopedSharedBufferHandle*));
- MOCK_METHOD2(RequestEncodingParametersChange, void(uint32_t, uint32_t));
+ MOCK_METHOD2(RequestEncodingParametersChange,
+ void(const media::VideoBitrateAllocation&, uint32_t));
void set_initialization_success(bool success) {
initialization_success_ = success;
@@ -104,8 +107,8 @@ class MockVideoEncodeAcceleratorClient : public VideoEncodeAccelerator::Client {
MOCK_METHOD3(RequireBitstreamBuffers,
void(unsigned int, const gfx::Size&, size_t));
- MOCK_METHOD4(BitstreamBufferReady,
- void(int32_t, size_t, bool, base::TimeDelta));
+ MOCK_METHOD2(BitstreamBufferReady,
+ void(int32_t, const media::BitstreamBufferMetadata&));
MOCK_METHOD1(NotifyError, void(VideoEncodeAccelerator::Error));
private:
@@ -217,9 +220,12 @@ TEST_F(MojoVideoEncodeAcceleratorTest, EncodeOneFrame) {
// The remote end of the mojo Pipe doesn't receive |video_frame| itself.
EXPECT_CALL(*mock_mojo_vea(), DoEncode(_, is_keyframe));
- EXPECT_CALL(*mock_vea_client,
- BitstreamBufferReady(kBitstreamBufferId, _, is_keyframe,
- video_frame->timestamp()));
+ EXPECT_CALL(*mock_vea_client, BitstreamBufferReady(kBitstreamBufferId, _))
+ .WillOnce(Invoke([is_keyframe, &video_frame](
+ int32_t, const BitstreamBufferMetadata& metadata) {
+ EXPECT_EQ(is_keyframe, metadata.key_frame);
+ EXPECT_EQ(metadata.timestamp, video_frame->timestamp());
+ }));
mojo_vea()->Encode(video_frame, is_keyframe);
base::RunLoop().RunUntilIdle();
@@ -228,18 +234,47 @@ TEST_F(MojoVideoEncodeAcceleratorTest, EncodeOneFrame) {
// Tests that a RequestEncodingParametersChange() ripples through correctly.
TEST_F(MojoVideoEncodeAcceleratorTest, EncodingParametersChange) {
- const uint32_t kNewBitrate = 123123u;
const uint32_t kNewFramerate = 321321u;
+ const uint32_t kNewBitrate = 123123u;
+ VideoBitrateAllocation bitrate_allocation;
+ bitrate_allocation.SetBitrate(0, 0, kNewBitrate);
// In a real world scenario, we should go through an Initialize() prologue,
// but we can skip that in unit testing.
- EXPECT_CALL(*mock_mojo_vea(),
- RequestEncodingParametersChange(kNewBitrate, kNewFramerate));
+ EXPECT_CALL(*mock_mojo_vea(), RequestEncodingParametersChange(
+ bitrate_allocation, kNewFramerate));
mojo_vea()->RequestEncodingParametersChange(kNewBitrate, kNewFramerate);
base::RunLoop().RunUntilIdle();
}
+// Tests that a RequestEncodingParametersChange() works with multi-dimensional
+// bitrate allocatio.
+TEST_F(MojoVideoEncodeAcceleratorTest,
+ EncodingParametersWithBitrateAllocation) {
+ const uint32_t kNewFramerate = 321321u;
+ const size_t kMaxNumBitrates = VideoBitrateAllocation::kMaxSpatialLayers *
+ VideoBitrateAllocation::kMaxTemporalLayers;
+
+ // Verify translation of VideoBitrateAllocation into vector of bitrates for
+ // everything from empty array up to max number of layers.
+ VideoBitrateAllocation bitrate_allocation;
+ for (size_t i = 0; i <= kMaxNumBitrates; ++i) {
+ if (i > 0) {
+ int layer_bitrate = i * 1000;
+ const size_t si = (i - 1) / VideoBitrateAllocation::kMaxTemporalLayers;
+ const size_t ti = (i - 1) % VideoBitrateAllocation::kMaxTemporalLayers;
+ bitrate_allocation.SetBitrate(si, ti, layer_bitrate);
+ }
+
+ EXPECT_CALL(*mock_mojo_vea(), RequestEncodingParametersChange(
+ bitrate_allocation, kNewFramerate));
+ mojo_vea()->RequestEncodingParametersChange(bitrate_allocation,
+ kNewFramerate);
+ base::RunLoop().RunUntilIdle();
+ }
+}
+
// This test verifies the Initialize() communication prologue fails when the
// FakeVEA is configured to do so.
TEST_F(MojoVideoEncodeAcceleratorTest, InitializeFailure) {
diff --git a/chromium/media/mojo/common/mojo_shared_buffer_video_frame.cc b/chromium/media/mojo/common/mojo_shared_buffer_video_frame.cc
index 9698d2911af..e53c65ec851 100644
--- a/chromium/media/mojo/common/mojo_shared_buffer_video_frame.cc
+++ b/chromium/media/mojo/common/mojo_shared_buffer_video_frame.cc
@@ -93,27 +93,37 @@ scoped_refptr<MojoSharedBufferVideoFrame> MojoSharedBufferVideoFrame::Create(
return nullptr;
}
+ // Compute the number of bytes needed on each row.
+ const size_t y_row_bytes = RowBytes(kYPlane, format, coded_size.width());
+ const size_t u_row_bytes = RowBytes(kUPlane, format, coded_size.width());
+ const size_t v_row_bytes = RowBytes(kVPlane, format, coded_size.width());
+
// Safe given sizeof(size_t) >= sizeof(int32_t).
size_t y_stride_size_t = y_stride;
size_t u_stride_size_t = u_stride;
size_t v_stride_size_t = v_stride;
- if (y_stride_size_t < RowBytes(kYPlane, format, coded_size.width()) ||
- u_stride_size_t < RowBytes(kUPlane, format, coded_size.width()) ||
- v_stride_size_t < RowBytes(kVPlane, format, coded_size.width())) {
+ if (y_stride_size_t < y_row_bytes || u_stride_size_t < u_row_bytes ||
+ v_stride_size_t < v_row_bytes) {
DLOG(ERROR) << __func__ << " Invalid stride";
return nullptr;
}
- base::CheckedNumeric<size_t> y_rows =
- Rows(kYPlane, format, coded_size.height());
- base::CheckedNumeric<size_t> u_rows =
- Rows(kUPlane, format, coded_size.height());
- base::CheckedNumeric<size_t> v_rows =
- Rows(kVPlane, format, coded_size.height());
-
- base::CheckedNumeric<size_t> y_bound = y_rows * y_stride + y_offset;
- base::CheckedNumeric<size_t> u_bound = u_rows * u_stride + u_offset;
- base::CheckedNumeric<size_t> v_bound = v_rows * v_stride + v_offset;
+ const size_t y_rows = Rows(kYPlane, format, coded_size.height());
+ const size_t u_rows = Rows(kUPlane, format, coded_size.height());
+ const size_t v_rows = Rows(kVPlane, format, coded_size.height());
+
+ // The last row only needs RowBytes() and not a full stride. This is to avoid
+ // problems if the U and V data is interleaved (where |stride| is double the
+ // number of bytes actually needed).
+ base::CheckedNumeric<size_t> y_bound = base::CheckAdd(
+ y_offset, base::CheckMul(base::CheckSub(y_rows, 1), y_stride_size_t),
+ y_row_bytes);
+ base::CheckedNumeric<size_t> u_bound = base::CheckAdd(
+ u_offset, base::CheckMul(base::CheckSub(u_rows, 1), u_stride_size_t),
+ u_row_bytes);
+ base::CheckedNumeric<size_t> v_bound = base::CheckAdd(
+ v_offset, base::CheckMul(base::CheckSub(v_rows, 1), v_stride_size_t),
+ v_row_bytes);
if (!y_bound.IsValid() || !u_bound.IsValid() || !v_bound.IsValid() ||
y_bound.ValueOrDie() > data_size || u_bound.ValueOrDie() > data_size ||
diff --git a/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc b/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
index 68f463f3014..c2fce253ecf 100644
--- a/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
+++ b/chromium/media/mojo/common/mojo_shared_buffer_video_frame_unittest.cc
@@ -175,4 +175,54 @@ TEST(MojoSharedBufferVideoFrameTest, TestDestructionCallback) {
EXPECT_TRUE(callback_called);
}
+TEST(MojoSharedBufferVideoFrameTest, InterleavedData) {
+ const VideoPixelFormat format = PIXEL_FORMAT_I420;
+ const int kWidth = 32;
+ const int kHeight = 18;
+ const base::TimeDelta kTimestamp = base::TimeDelta::FromMicroseconds(1338);
+ gfx::Size size(kWidth, kHeight);
+ gfx::Rect visible_rect(size);
+
+ // Create interlaced UV data, which are each 1/4 the size of the Y data.
+ const size_t y_offset = 0;
+ const size_t u_offset =
+ VideoFrame::PlaneSize(format, VideoFrame::kYPlane, size).GetArea();
+ const size_t v_offset =
+ u_offset + VideoFrame::RowBytes(VideoFrame::kUPlane, format, kWidth);
+ const int32_t y_stride =
+ VideoFrame::RowBytes(VideoFrame::kYPlane, format, kWidth);
+ const int32_t u_stride = y_stride;
+ const int32_t v_stride = y_stride;
+
+ // Allocate some shared memory.
+ size_t requested_size = VideoFrame::AllocationSize(format, size);
+ mojo::ScopedSharedBufferHandle handle =
+ mojo::SharedBufferHandle::Create(requested_size);
+ ASSERT_TRUE(handle.is_valid());
+
+ // Allocate frame.
+ scoped_refptr<MojoSharedBufferVideoFrame> frame =
+ MojoSharedBufferVideoFrame::Create(format, size, visible_rect, size,
+ std::move(handle), requested_size,
+ y_offset, u_offset, v_offset, y_stride,
+ u_stride, v_stride, kTimestamp);
+ ASSERT_TRUE(frame.get());
+ EXPECT_EQ(frame->format(), format);
+
+ // The offsets should be set appropriately.
+ EXPECT_EQ(frame->PlaneOffset(VideoFrame::kYPlane), y_offset);
+ EXPECT_EQ(frame->PlaneOffset(VideoFrame::kUPlane), u_offset);
+ EXPECT_EQ(frame->PlaneOffset(VideoFrame::kVPlane), v_offset);
+
+ // The strides should be set appropriately.
+ EXPECT_EQ(frame->stride(VideoFrame::kYPlane), y_stride);
+ EXPECT_EQ(frame->stride(VideoFrame::kUPlane), u_stride);
+ EXPECT_EQ(frame->stride(VideoFrame::kVPlane), v_stride);
+
+ // The data pointers for each plane should be set.
+ EXPECT_TRUE(frame->data(VideoFrame::kYPlane));
+ EXPECT_TRUE(frame->data(VideoFrame::kUPlane));
+ EXPECT_TRUE(frame->data(VideoFrame::kVPlane));
+}
+
} // namespace media
diff --git a/chromium/media/mojo/interfaces/BUILD.gn b/chromium/media/mojo/interfaces/BUILD.gn
index 0b6fa72f858..82eaea97814 100644
--- a/chromium/media/mojo/interfaces/BUILD.gn
+++ b/chromium/media/mojo/interfaces/BUILD.gn
@@ -47,6 +47,10 @@ mojom("interfaces") {
]
}
+ if (is_chromecast) {
+ sources += [ "application_session_id_manager.mojom" ]
+ }
+
public_deps = [
"//gpu/ipc/common:interfaces",
"//mojo/public/mojom/base",
diff --git a/chromium/media/mojo/interfaces/application_session_id_manager.mojom b/chromium/media/mojo/interfaces/application_session_id_manager.mojom
new file mode 100644
index 00000000000..27cc9c2f579
--- /dev/null
+++ b/chromium/media/mojo/interfaces/application_session_id_manager.mojom
@@ -0,0 +1,13 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.mojom;
+
+// This is a service attached to RenderFrameHost.
+// This is only used for Chromecast.
+interface ApplicationSessionIdManager {
+ // Returns the |application_session_id| associated with the
+ // RenderFrameHost.
+ GetApplicationSessionId() => (string application_session_id);
+}; \ No newline at end of file
diff --git a/chromium/media/mojo/interfaces/audio_data_pipe.mojom b/chromium/media/mojo/interfaces/audio_data_pipe.mojom
index be0fa566679..188b6885c8a 100644
--- a/chromium/media/mojo/interfaces/audio_data_pipe.mojom
+++ b/chromium/media/mojo/interfaces/audio_data_pipe.mojom
@@ -4,10 +4,13 @@
module media.mojom;
-// Used by audio streams for realtime-ish data transfer. Used for both input
-// and output streams, see AudioDeviceThread, AudioSyncReader, and
-// AudioInputSyncWriter. |socket| is a base::SyncSocket used for signaling and
-// |shared_memory| is used for the actual audio data.
+import "mojo/public/mojom/base/shared_memory.mojom";
+
+// Used by audio streams for realtime-ish data transfer. ReadWriteAudioDataPipe
+// is used for output streams and ReadOnlyAudioDataPipe is used for input
+// streams, see AudioDeviceThread, AudioSyncReader, and AudioInputSyncWriter.
+// |socket| is a base::SyncSocket used for signaling and |shared_memory| is used
+// for the actual audio data.
//
// When using a pull model, the code pulling the data writes |n| (an unsigned
// 32 bit sequence number) to the socket, and the source from which data is
@@ -22,7 +25,12 @@ module media.mojom;
// in which case the next buffer can be pushed without waiting for the previous
// one to be consumed. This model used is to deliver microphone data to a
// consumer.
-struct AudioDataPipe {
- handle<shared_buffer> shared_memory;
+struct ReadWriteAudioDataPipe {
+ mojo_base.mojom.UnsafeSharedMemoryRegion shared_memory;
+ handle socket;
+};
+
+struct ReadOnlyAudioDataPipe {
+ mojo_base.mojom.ReadOnlySharedMemoryRegion shared_memory;
handle socket;
};
diff --git a/chromium/media/mojo/interfaces/audio_logging.mojom b/chromium/media/mojo/interfaces/audio_logging.mojom
index 36027ed23b7..d8484274e61 100644
--- a/chromium/media/mojo/interfaces/audio_logging.mojom
+++ b/chromium/media/mojo/interfaces/audio_logging.mojom
@@ -36,3 +36,21 @@ interface AudioLog {
// Called when an audio component wants to forward a log message.
OnLogMessage(string message);
};
+
+enum AudioLogComponent {
+ kInputController,
+ kOutputController,
+ kOutputStream,
+};
+
+// This interface is implemented by the browser process to allow clients to
+// create AudioLog instances for tracking the behavior of an audio component.
+// The only client for this interface is the audio service.
+interface AudioLogFactory {
+ // Creates an AudioLog object for tracking the behavior for one instance of
+ // the given |component|. Each instance of an "owning" class must create its
+ // own AudioLog. The created AudioLog object is bound to |audio_log_request|.
+ CreateAudioLog(AudioLogComponent component,
+ int32 component_id,
+ AudioLog& audio_log_request);
+};
diff --git a/chromium/media/mojo/interfaces/audio_output_stream.mojom b/chromium/media/mojo/interfaces/audio_output_stream.mojom
index 7fa1fe4c934..3d901f57edd 100644
--- a/chromium/media/mojo/interfaces/audio_output_stream.mojom
+++ b/chromium/media/mojo/interfaces/audio_output_stream.mojom
@@ -78,5 +78,5 @@ interface AudioOutputStreamProviderClient {
// to transfer the audio data.
// TODO(https://crbug.com/787806): Currently, this will be called at most
// once. In the future, it may be called several times.
- Created(AudioOutputStream stream, AudioDataPipe data_pipe);
+ Created(AudioOutputStream stream, ReadWriteAudioDataPipe data_pipe);
};
diff --git a/chromium/media/mojo/interfaces/key_system_support.mojom b/chromium/media/mojo/interfaces/key_system_support.mojom
index 628c4bdf6e0..6fe2f6d6b64 100644
--- a/chromium/media/mojo/interfaces/key_system_support.mojom
+++ b/chromium/media/mojo/interfaces/key_system_support.mojom
@@ -4,21 +4,36 @@
module media.mojom;
+import "media/mojo/interfaces/content_decryption_module.mojom";
import "media/mojo/interfaces/media_types.mojom";
+// TODO(xhwang): Use "set" instead of "array" if supported by mojom.
+// TODO(crbug.com/796725) Find a way to include profiles and levels for
+// supported codecs.
+struct KeySystemCapability {
+ // Software secure codecs and encryption schemes supported by the CDM.
+ array<VideoCodec> video_codecs;
+ array<EncryptionMode> encryption_schemes;
+
+ // Hardware secure codecs and encryption schemes supported by the CDM,
+ // directly or indirectly through CdmProxy.
+ array<VideoCodec> hw_secure_video_codecs;
+ array<EncryptionMode> hw_secure_encryption_schemes;
+
+ // Session types supported in software secure mode if no
+ // |hw_secure_video_codecs| is supported, or in both modes otherwise.
+ array<CdmSessionType> session_types;
+};
+
interface KeySystemSupport {
- // Query to determine if the browser supports the key system |key_system|.
- // If supported, |is_supported| = true and the remaining properties indicate
- // the codecs supported, if the key system supports persistent licenses, and
- // the set of encryption schemes supported. KeySystemSupport implementation
- // is in the browser process, as it maintains the list of installed key
- // systems. Clients run in the renderer process.
- // TODO(crbug.com/796725) Find a way to include profiles and levels for
- // |supported_video_codecs|.
+ // Query to determine if the browser supports the |key_system|. If supported,
+ // |key_system_capability| is non-null indicating supported capability.
+ // KeySystemSupport implementation is in the browser process, as it maintains
+ // the list of registered CDMs, and hardware secure support check also needs
+ // to run in the browser process because the render process is sandboxed.
+ // KeySystemSupport clients run in the renderer process.
+ // TODO(crbug.com/853264): Make this an async call.
[Sync]
IsKeySystemSupported(string key_system)
- => (bool is_supported,
- array<VideoCodec> supported_video_codecs,
- bool supports_persistent_license,
- array<EncryptionMode> supported_encryption_schemes);
+ => (bool is_supported, KeySystemCapability? key_system_capability);
};
diff --git a/chromium/media/mojo/interfaces/media_drm_storage.mojom b/chromium/media/mojo/interfaces/media_drm_storage.mojom
index a67493c7130..a67c8540960 100644
--- a/chromium/media/mojo/interfaces/media_drm_storage.mojom
+++ b/chromium/media/mojo/interfaces/media_drm_storage.mojom
@@ -13,15 +13,21 @@ struct SessionData {
// Allows MediaDrmBridge to store and retrieve persistent data. This is needed
// for features like per-origin provisioning and persistent license support.
+// The persistent data stored by MediaDrmStorage is auxiliary data, which will
+// be used by MediaDrmBridge to retrieve the actual license. MediaDrm in media
+// service is the true source for the persistent license and origin
+// provisioning.
interface MediaDrmStorage {
- // Initializes |this| and return a random orign ID which can identify the
- // current origin. The origin ID should be randomly generated if it doesn't
- // exist. |origin_id| must be valid.
- // This should not modify anything in the storage.
+ // Initializes |this| and returns a random identifier than can be used to
+ // identify the current origin. The origin ID should be randomly generated if
+ // it doesn't exist. |origin_id| must be valid and unique among all origins.
+ // If origin information doesn't exist, the implementation will persist the
+ // the information (e.g. origin ID, provision time) in the storage.
Initialize() => (mojo_base.mojom.UnguessableToken origin_id);
// Saves origin information (e.g. origin ID, provision time) in the storage
- // after MediaDrm is provisioned for current origin.
+ // after MediaDrm is provisioned for current origin. It will clear all
+ // existing persistent session data for the origin.
OnProvisioned() => (bool success);
// Saves persistent session data for |session_id|.
diff --git a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc b/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc
index fc0bd489be7..c414581c790 100644
--- a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc
+++ b/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc
@@ -73,4 +73,4 @@ bool StructTraits<media::mojom::VideoDecoderConfigDataView,
return true;
}
-} // namespace mojo \ No newline at end of file
+} // namespace mojo
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator.mojom b/chromium/media/mojo/interfaces/video_encode_accelerator.mojom
index d2846b0ff76..a8eb9ec286b 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator.mojom
+++ b/chromium/media/mojo/interfaces/video_encode_accelerator.mojom
@@ -36,6 +36,12 @@ interface VideoEncodeAcceleratorProvider {
CreateVideoEncodeAccelerator(VideoEncodeAccelerator& request);
};
+// Class that describes how video bitrate, in bps, is allocated across temporal
+// and spatial layers. See media::VideoBitrateAllocation for more details.
+struct VideoBitrateAllocation {
+ array<int32> bitrates;
+};
+
interface VideoEncodeAccelerator {
// See media::VideoEncodeAccelerator::Error
enum Error {
@@ -60,7 +66,22 @@ interface VideoEncodeAccelerator {
UseOutputBitstreamBuffer(int32 bitstream_buffer_id,
handle<shared_buffer> buffer);
- RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
+ RequestEncodingParametersChange(
+ VideoBitrateAllocation bitrate_allocation,
+ uint32 framerate);
+};
+
+struct Vp8Metadata {
+ bool non_reference;
+ uint8 temporal_idx;
+ bool layer_sync;
+};
+
+struct BitstreamBufferMetadata {
+ uint32 payload_size_bytes;
+ bool key_frame;
+ mojo_base.mojom.TimeDelta timestamp;
+ Vp8Metadata? vp8;
};
interface VideoEncodeAcceleratorClient {
@@ -69,8 +90,8 @@ interface VideoEncodeAcceleratorClient {
gfx.mojom.Size input_coded_size,
uint32 output_buffer_size);
- BitstreamBufferReady(int32 bitstream_buffer_id, uint32 payload_size,
- bool key_frame, mojo_base.mojom.TimeDelta timestamp);
+ BitstreamBufferReady(int32 bitstream_buffer_id,
+ BitstreamBufferMetadata metadata);
NotifyError(VideoEncodeAccelerator.Error error);
};
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator.typemap b/chromium/media/mojo/interfaces/video_encode_accelerator.typemap
index b43f3474f8b..70be7dbfea1 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator.typemap
+++ b/chromium/media/mojo/interfaces/video_encode_accelerator.typemap
@@ -11,10 +11,17 @@ traits_headers =
sources = [
"//media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc",
+ "//media/mojo/interfaces/video_encode_accelerator_typemap_traits.h",
]
deps = [
"//base",
+ "//media",
]
-type_mappings = [ "media.mojom.VideoEncodeAccelerator.Error=media::VideoEncodeAccelerator::Error" ]
+type_mappings = [
+ "media.mojom.BitstreamBufferMetadata=media::BitstreamBufferMetadata",
+ "media.mojom.VideoBitrateAllocation=media::VideoBitrateAllocation",
+ "media.mojom.VideoEncodeAccelerator.Error=media::VideoEncodeAccelerator::Error",
+ "media.mojom.Vp8Metadata=media::Vp8Metadata",
+]
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc b/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc
index cfb6b225881..3deea35e8ef 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc
+++ b/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.cc
@@ -5,6 +5,8 @@
#include "media/mojo/interfaces/video_encode_accelerator_typemap_traits.h"
#include "base/logging.h"
+#include "media/base/video_bitrate_allocation.h"
+#include "mojo/public/cpp/base/time_mojom_traits.h"
namespace mojo {
@@ -45,4 +47,71 @@ bool EnumTraits<media::mojom::VideoEncodeAccelerator::Error,
return false;
}
+// static
+std::vector<int32_t> StructTraits<media::mojom::VideoBitrateAllocationDataView,
+ media::VideoBitrateAllocation>::
+ bitrates(const media::VideoBitrateAllocation& bitrate_allocation) {
+ std::vector<int32_t> bitrates;
+ int sum_bps = 0;
+ for (size_t si = 0; si < media::VideoBitrateAllocation::kMaxSpatialLayers;
+ ++si) {
+ for (size_t ti = 0; ti < media::VideoBitrateAllocation::kMaxTemporalLayers;
+ ++ti) {
+ if (sum_bps == bitrate_allocation.GetSumBps()) {
+ // The rest is all zeros, no need to iterate further.
+ return bitrates;
+ }
+ const int layer_bitrate = bitrate_allocation.GetBitrateBps(si, ti);
+ bitrates.emplace_back(layer_bitrate);
+ sum_bps += layer_bitrate;
+ }
+ }
+ return bitrates;
+}
+
+// static
+bool StructTraits<media::mojom::VideoBitrateAllocationDataView,
+ media::VideoBitrateAllocation>::
+ Read(media::mojom::VideoBitrateAllocationDataView data,
+ media::VideoBitrateAllocation* out_bitrate_allocation) {
+ ArrayDataView<int32_t> bitrates;
+ data.GetBitratesDataView(&bitrates);
+ size_t size = bitrates.size();
+ if (size > media::VideoBitrateAllocation::kMaxSpatialLayers *
+ media::VideoBitrateAllocation::kMaxTemporalLayers) {
+ return false;
+ }
+ for (size_t i = 0; i < size; ++i) {
+ const int32_t bitrate = bitrates[i];
+ const size_t si = i / media::VideoBitrateAllocation::kMaxTemporalLayers;
+ const size_t ti = i % media::VideoBitrateAllocation::kMaxTemporalLayers;
+ if (!out_bitrate_allocation->SetBitrate(si, ti, bitrate)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+// static
+bool StructTraits<media::mojom::BitstreamBufferMetadataDataView,
+ media::BitstreamBufferMetadata>::
+ Read(media::mojom::BitstreamBufferMetadataDataView data,
+ media::BitstreamBufferMetadata* out_metadata) {
+ out_metadata->payload_size_bytes = data.payload_size_bytes();
+ out_metadata->key_frame = data.key_frame();
+ if (!data.ReadTimestamp(&out_metadata->timestamp)) {
+ return false;
+ }
+ return data.ReadVp8(&out_metadata->vp8);
+}
+
+// static
+bool StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata>::Read(
+ media::mojom::Vp8MetadataDataView data,
+ media::Vp8Metadata* out_metadata) {
+ out_metadata->non_reference = data.non_reference();
+ out_metadata->temporal_idx = data.temporal_idx();
+ out_metadata->layer_sync = data.layer_sync();
+ return true;
+}
} // namespace mojo
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.h b/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.h
index 5929c3ab9f3..96af90c60a4 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.h
+++ b/chromium/media/mojo/interfaces/video_encode_accelerator_typemap_traits.h
@@ -20,6 +20,58 @@ struct EnumTraits<media::mojom::VideoEncodeAccelerator::Error,
media::VideoEncodeAccelerator::Error* out);
};
+template <>
+class StructTraits<media::mojom::VideoBitrateAllocationDataView,
+ media::VideoBitrateAllocation> {
+ public:
+ static std::vector<int32_t> bitrates(
+ const media::VideoBitrateAllocation& bitrate_allocation);
+
+ static bool Read(media::mojom::VideoBitrateAllocationDataView data,
+ media::VideoBitrateAllocation* out_bitrate_allocation);
+};
+
+template <>
+class StructTraits<media::mojom::BitstreamBufferMetadataDataView,
+ media::BitstreamBufferMetadata> {
+ public:
+ static size_t payload_size_bytes(const media::BitstreamBufferMetadata& bbm) {
+ return bbm.payload_size_bytes;
+ }
+ static bool key_frame(const media::BitstreamBufferMetadata& bbm) {
+ return bbm.key_frame;
+ }
+ static base::TimeDelta timestamp(const media::BitstreamBufferMetadata& bbm) {
+ return bbm.timestamp;
+ }
+ static const base::Optional<media::Vp8Metadata>& vp8(
+ const media::BitstreamBufferMetadata& bbm) {
+ return bbm.vp8;
+ }
+
+ static bool Read(media::mojom::BitstreamBufferMetadataDataView data,
+ media::BitstreamBufferMetadata* out_metadata);
+};
+
+template <>
+class StructTraits<media::mojom::Vp8MetadataDataView, media::Vp8Metadata> {
+ public:
+ static bool non_reference(const media::Vp8Metadata& vp8) {
+ return vp8.non_reference;
+ }
+
+ static uint8_t temporal_idx(const media::Vp8Metadata& vp8) {
+ return vp8.temporal_idx;
+ }
+
+ static bool layer_sync(const media::Vp8Metadata& vp8) {
+ return vp8.layer_sync;
+ }
+
+ static bool Read(media::mojom::Vp8MetadataDataView data,
+ media::Vp8Metadata* out_metadata);
+};
+
} // namespace mojo
#endif // MEDIA_MOJO_INTERFACES_VIDEO_ENCODE_ACCELERATOR_TYPEMAP_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/watch_time_recorder.mojom b/chromium/media/mojo/interfaces/watch_time_recorder.mojom
index 9326c013837..ef69b676dba 100644
--- a/chromium/media/mojo/interfaces/watch_time_recorder.mojom
+++ b/chromium/media/mojo/interfaces/watch_time_recorder.mojom
@@ -12,15 +12,23 @@ import "url/mojom/origin.mojom";
// Structure describing immutable properties for the current watch time report.
// If any of these properties change a new WatchTimeRecorder will be requested.
struct PlaybackProperties {
- AudioCodec audio_codec; // Note: We may not know the codec during all
- VideoCodec video_codec; // playbacks (HLS, remoting, etc).
- bool has_audio; // Note: Due to the above, we also need these
- bool has_video; // booleans for audio and video presence.
- bool is_background; // Is report for playback in the background?
- bool is_muted; // Is report for muted playback?
+ bool has_audio;
+ bool has_video;
+ bool is_background; // Is report for playback in the background?
+ bool is_muted; // Is report for muted playback?
bool is_mse;
bool is_eme;
bool is_embedded_media_experience; // Playback from 'Downloads' on Android.
+};
+
+// Structure describing mutable properties for the current watch time report.
+// The WatchTimeRecorder will use changes of these properties only for UKM
+// reporting and will not interrupt UMA reporting for changes.
+struct SecondaryPlaybackProperties {
+ AudioCodec audio_codec; // Note: We may not know the codec during all
+ VideoCodec video_codec; // playbacks (HLS, remoting, etc).
+ string audio_decoder_name;
+ string video_decoder_name;
gfx.mojom.Size natural_size; // Size of video frame; (0, 0) if audio only.
};
@@ -55,14 +63,9 @@ interface WatchTimeRecorder {
// finalizing UKM watch time.
OnError(PipelineStatus status);
- // Lazily sets the audio or video decoder name respectively. Must only be
- // called once. If these values change, a new recorder should be created.
- //
- // Note: We have setters for these instead of putting them in the properties
- // structure because we don't know decoder name for cast/remoting/hls and we
- // don't have a convenient hook to tell us those cases are engaged
- SetAudioDecoderName(string name);
- SetVideoDecoderName(string name);
+ // Updates properties that the recorder will create a new UKM BasicPlayback
+ // record for, but for which UMA will continue accruing.
+ UpdateSecondaryProperties(SecondaryPlaybackProperties secondary_properties);
// Lazily sets the autoplay status of the player. Must not be called multiple
// times with different values.
@@ -71,6 +74,12 @@ interface WatchTimeRecorder {
// starts but the reporter is created before.
SetAutoplayInitiated(bool value);
+ // Updates the duration maintained by the recorder. May be called any number
+ // of times during playback. Duration is rounded to the most significant digit
+ // when greater than 1 second for privacy protection. E.g., 14s will become
+ // 10s, and 15s will become 20s. May be called any number of times.
+ OnDurationChanged(mojo_base.mojom.TimeDelta duration);
+
// Indicates that an underflow event has occurred while collecting watch time.
// Used to report mean values for rebuffering metrics. As with watch time,
// this is an absolute count and not relative since the last call.
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index 938b4c6a003..99bbf748eea 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -177,7 +177,7 @@ source_set("unit_tests") {
"//components/ukm:test_support",
"//media:test_support",
"//media/mojo:test_support",
- "//mojo/edk",
+ "//mojo/core/embedder",
"//mojo/public/interfaces/bindings/tests:test_interfaces",
"//services/metrics/public/cpp:ukm_builders",
"//testing/gmock",
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.h b/chromium/media/mojo/services/gpu_mojo_media_client.h
index 37acd5ab568..9bbf3ccfa5e 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.h
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.h
@@ -11,8 +11,8 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/cdm/cdm_proxy.h"
#include "media/mojo/services/mojo_media_client.h"
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index 14df0447a7d..1465a28b893 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -116,7 +116,8 @@ void InterfaceFactoryImpl::CreateRenderer(
// audio device ID. See interface_factory.mojom.
const std::string& audio_device_id = type_specific_id;
auto renderer = mojo_media_client_->CreateRenderer(
- base::ThreadTaskRunnerHandle::Get(), media_log_, audio_device_id);
+ interfaces_.get(), base::ThreadTaskRunnerHandle::Get(), media_log_,
+ audio_device_id);
if (!renderer) {
DLOG(ERROR) << "Renderer creation failed.";
return;
diff --git a/chromium/media/mojo/services/media_manifest.json b/chromium/media/mojo/services/media_manifest.json
index ca7971ecb25..3e34a78740e 100644
--- a/chromium/media/mojo/services/media_manifest.json
+++ b/chromium/media/mojo/services/media_manifest.json
@@ -7,7 +7,8 @@
"media:media": [ "media.mojom.MediaService" ]
},
"requires": {
- "*": [ "app" ]
+ "*": [ "app" ],
+ "chromecast": [ "multizone" ]
}
}
}
diff --git a/chromium/media/mojo/services/media_metrics_provider.cc b/chromium/media/mojo/services/media_metrics_provider.cc
index 7cf8d55e93d..68b61055398 100644
--- a/chromium/media/mojo/services/media_metrics_provider.cc
+++ b/chromium/media/mojo/services/media_metrics_provider.cc
@@ -4,6 +4,7 @@
#include "media/mojo/services/media_metrics_provider.h"
+#include "base/logging.h"
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "media/mojo/services/watch_time_recorder.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
@@ -16,8 +17,9 @@ constexpr char kInvalidInitialize[] = "Initialize() was not called correctly.";
static uint64_t g_player_id = 0;
-MediaMetricsProvider::MediaMetricsProvider(VideoDecodePerfHistory* perf_history)
- : player_id_(g_player_id++), perf_history_(perf_history) {}
+MediaMetricsProvider::MediaMetricsProvider(
+ VideoDecodePerfHistory::SaveCallback save_cb)
+ : player_id_(g_player_id++), save_cb_(save_cb) {}
MediaMetricsProvider::~MediaMetricsProvider() {
// UKM may be unavailable in content_shell or other non-chrome/ builds; it
@@ -49,10 +51,11 @@ MediaMetricsProvider::~MediaMetricsProvider() {
}
// static
-void MediaMetricsProvider::Create(VideoDecodePerfHistory* perf_history,
+void MediaMetricsProvider::Create(VideoDecodePerfHistory::SaveCallback save_cb,
mojom::MediaMetricsProviderRequest request) {
- mojo::MakeStrongBinding(std::make_unique<MediaMetricsProvider>(perf_history),
- std::move(request));
+ mojo::MakeStrongBinding(
+ std::make_unique<MediaMetricsProvider>(std::move(save_cb)),
+ std::move(request));
}
void MediaMetricsProvider::Initialize(bool is_mse,
@@ -118,9 +121,14 @@ void MediaMetricsProvider::AcquireVideoDecodeStatsRecorder(
return;
}
+ if (save_cb_.is_null()) {
+ DVLOG(3) << __func__ << " Ignoring request, SaveCallback is null";
+ return;
+ }
+
mojo::MakeStrongBinding(
std::make_unique<VideoDecodeStatsRecorder>(
- untrusted_top_origin_, is_top_frame_, player_id_, perf_history_),
+ untrusted_top_origin_, is_top_frame_, player_id_, save_cb_),
std::move(request));
}
diff --git a/chromium/media/mojo/services/media_metrics_provider.h b/chromium/media/mojo/services/media_metrics_provider.h
index a11dcadce30..a870c583c45 100644
--- a/chromium/media/mojo/services/media_metrics_provider.h
+++ b/chromium/media/mojo/services/media_metrics_provider.h
@@ -11,6 +11,7 @@
#include "media/base/timestamp_constants.h"
#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
+#include "media/mojo/services/video_decode_perf_history.h"
#include "url/origin.h"
namespace media {
@@ -20,12 +21,12 @@ class VideoDecodePerfHistory;
class MEDIA_MOJO_EXPORT MediaMetricsProvider
: public mojom::MediaMetricsProvider {
public:
- explicit MediaMetricsProvider(VideoDecodePerfHistory* perf_history);
+ explicit MediaMetricsProvider(VideoDecodePerfHistory::SaveCallback save_cb);
~MediaMetricsProvider() override;
// Creates a MediaMetricsProvider, |perf_history| may be nullptr if perf
// history database recording is disabled.
- static void Create(VideoDecodePerfHistory* perf_history,
+ static void Create(VideoDecodePerfHistory::SaveCallback save_cb,
mojom::MediaMetricsProviderRequest request);
private:
@@ -62,7 +63,7 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
base::TimeDelta time_to_first_frame_ = kNoTimestamp;
base::TimeDelta time_to_play_ready_ = kNoTimestamp;
- VideoDecodePerfHistory* const perf_history_;
+ const VideoDecodePerfHistory::SaveCallback save_cb_;
DISALLOW_COPY_AND_ASSIGN(MediaMetricsProvider);
};
diff --git a/chromium/media/mojo/services/media_metrics_provider_unittest.cc b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
index 7b2a6a91756..62e43286386 100644
--- a/chromium/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
@@ -30,7 +30,8 @@ class MediaMetricsProviderTest : public testing::Test {
~MediaMetricsProviderTest() override { base::RunLoop().RunUntilIdle(); }
void Initialize(bool is_mse, bool is_top_frame, const std::string& origin) {
- MediaMetricsProvider::Create(nullptr, mojo::MakeRequest(&provider_));
+ MediaMetricsProvider::Create(VideoDecodePerfHistory::SaveCallback(),
+ mojo::MakeRequest(&provider_));
provider_->Initialize(is_mse, is_top_frame,
url::Origin::Create(GURL(origin)));
}
diff --git a/chromium/media/mojo/services/media_service_factory.h b/chromium/media/mojo/services/media_service_factory.h
index 8dd062587e7..3a35524fbee 100644
--- a/chromium/media/mojo/services/media_service_factory.h
+++ b/chromium/media/mojo/services/media_service_factory.h
@@ -10,8 +10,8 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/cdm/cdm_proxy.h"
#include "media/mojo/services/media_mojo_export.h"
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.cc b/chromium/media/mojo/services/mojo_audio_input_stream.cc
index 3762dbb608c..87518ab4c70 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.cc
@@ -85,16 +85,14 @@ void MojoAudioInputStream::OnStreamCreated(
return;
}
- mojo::ScopedSharedBufferHandle buffer_handle =
- mojo::WrapReadOnlySharedMemoryRegion(std::move(shared_memory_region));
mojo::ScopedHandle socket_handle =
mojo::WrapPlatformFile(foreign_socket->Release());
- DCHECK(buffer_handle.is_valid());
DCHECK(socket_handle.is_valid());
base::ResetAndReturn(&stream_created_callback_)
- .Run({base::in_place, std::move(buffer_handle), std::move(socket_handle)},
+ .Run({base::in_place, std::move(shared_memory_region),
+ std::move(socket_handle)},
initially_muted);
}
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.h b/chromium/media/mojo/services/mojo_audio_input_stream.h
index 26c4af1ef56..291d280d2e2 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.h
@@ -24,7 +24,7 @@ class MEDIA_MOJO_EXPORT MojoAudioInputStream
public AudioInputDelegate::EventHandler {
public:
using StreamCreatedCallback =
- base::OnceCallback<void(mojom::AudioDataPipePtr, bool)>;
+ base::OnceCallback<void(mojom::ReadOnlyAudioDataPipePtr, bool)>;
using CreateDelegateCallback =
base::OnceCallback<std::unique_ptr<AudioInputDelegate>(
AudioInputDelegate::EventHandler*)>;
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
index a193f4fc665..2984bbf4a74 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
@@ -97,8 +97,9 @@ class MockClient : public mojom::AudioInputStreamClient {
public:
MockClient() = default;
- void Initialized(mojom::AudioDataPipePtr data_pipe, bool initially_muted) {
- ASSERT_TRUE(data_pipe->shared_memory.is_valid());
+ void Initialized(mojom::ReadOnlyAudioDataPipePtr data_pipe,
+ bool initially_muted) {
+ ASSERT_TRUE(data_pipe->shared_memory.IsValid());
ASSERT_TRUE(data_pipe->socket.is_valid());
base::PlatformFile fd;
@@ -106,8 +107,7 @@ class MockClient : public mojom::AudioInputStreamClient {
socket_ = std::make_unique<base::CancelableSyncSocket>(fd);
EXPECT_NE(socket_->handle(), base::CancelableSyncSocket::kInvalidHandle);
- region_ = mojo::UnwrapReadOnlySharedMemoryRegion(
- std::move(data_pipe->shared_memory));
+ region_ = std::move(data_pipe->shared_memory);
EXPECT_TRUE(region_.IsValid());
GotNotification(initially_muted);
@@ -131,7 +131,8 @@ std::unique_ptr<AudioInputDelegate> CreateNoDelegate(
return nullptr;
}
-void NotCalled(mojom::AudioDataPipePtr data_pipe, bool initially_muted) {
+void NotCalled(mojom::ReadOnlyAudioDataPipePtr data_pipe,
+ bool initially_muted) {
EXPECT_TRUE(false) << "The StreamCreated callback was called despite the "
"test expecting it not to.";
}
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.cc b/chromium/media/mojo/services/mojo_audio_output_stream.cc
index 5cdfb6ca5fd..1e44a819cf0 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/callback_helpers.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/sync_socket.h"
#include "mojo/public/cpp/system/platform_handle.h"
@@ -74,12 +74,9 @@ void MojoAudioOutputStream::OnStreamCreated(
return;
}
- mojo::ScopedSharedBufferHandle buffer_handle =
- mojo::WrapUnsafeSharedMemoryRegion(std::move(shared_memory_region));
mojo::ScopedHandle socket_handle =
mojo::WrapPlatformFile(foreign_socket->Release());
- DCHECK(buffer_handle.is_valid());
DCHECK(socket_handle.is_valid());
mojom::AudioOutputStreamPtr stream;
@@ -89,7 +86,7 @@ void MojoAudioOutputStream::OnStreamCreated(
&MojoAudioOutputStream::StreamConnectionLost, base::Unretained(this)));
std::move(stream_created_callback_)
- .Run(std::move(stream), {base::in_place, std::move(buffer_handle),
+ .Run(std::move(stream), {base::in_place, std::move(shared_memory_region),
std::move(socket_handle)});
}
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.h b/chromium/media/mojo/services/mojo_audio_output_stream.h
index 31c1fcf5eef..1556ca59032 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.h
@@ -24,7 +24,7 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStream
public:
using StreamCreatedCallback =
base::OnceCallback<void(mojom::AudioOutputStreamPtr,
- media::mojom::AudioDataPipePtr)>;
+ media::mojom::ReadWriteAudioDataPipePtr)>;
using CreateDelegateCallback =
base::OnceCallback<std::unique_ptr<AudioOutputDelegate>(
AudioOutputDelegate::EventHandler*)>;
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
index e559976433e..c0e1dfe0f85 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
@@ -12,7 +12,7 @@
#include "build/build_config.h"
#include "media/audio/audio_output_delegate.h"
#include "media/base/audio_parameters.h"
-#include "mojo/edk/embedder/embedder.h"
+#include "mojo/core/embedder/embedder.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -68,7 +68,7 @@ std::unique_ptr<AudioOutputDelegate> CreateFakeDelegate(
TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
base::MessageLoop loop;
bool got_bad_message = false;
- mojo::edk::SetDefaultProcessErrorCallback(
+ mojo::core::SetDefaultProcessErrorCallback(
base::BindRepeating([](bool* got_bad_message,
const std::string& s) { *got_bad_message = true; },
&got_bad_message));
@@ -96,14 +96,15 @@ TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
- mojo::edk::SetDefaultProcessErrorCallback(mojo::edk::ProcessErrorCallback());
+ mojo::core::SetDefaultProcessErrorCallback(
+ mojo::core::ProcessErrorCallback());
}
TEST(MojoAudioOutputStreamProviderTest,
Bitstream_BadMessageOnNonAndoirdPlatforms) {
base::MessageLoop loop;
bool got_bad_message = false;
- mojo::edk::SetDefaultProcessErrorCallback(
+ mojo::core::SetDefaultProcessErrorCallback(
base::BindRepeating([](bool* got_bad_message,
const std::string& s) { *got_bad_message = true; },
&got_bad_message));
@@ -135,7 +136,8 @@ TEST(MojoAudioOutputStreamProviderTest,
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
#endif
- mojo::edk::SetDefaultProcessErrorCallback(mojo::edk::ProcessErrorCallback());
+ mojo::core::SetDefaultProcessErrorCallback(
+ mojo::core::ProcessErrorCallback());
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
index 508e845d35f..1206cc08e41 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/sync_socket.h"
@@ -96,8 +96,8 @@ class MockClient {
public:
MockClient() = default;
- void Initialize(mojom::AudioDataPipePtr data_pipe) {
- ASSERT_TRUE(data_pipe->shared_memory.is_valid());
+ void Initialize(mojom::ReadWriteAudioDataPipePtr data_pipe) {
+ ASSERT_TRUE(data_pipe->shared_memory.IsValid());
ASSERT_TRUE(data_pipe->socket.is_valid());
base::PlatformFile fd;
@@ -105,17 +105,7 @@ class MockClient {
socket_ = std::make_unique<base::CancelableSyncSocket>(fd);
EXPECT_NE(socket_->handle(), base::CancelableSyncSocket::kInvalidHandle);
- size_t memory_length;
- base::SharedMemoryHandle shmem_handle;
- mojo::UnwrappedSharedMemoryHandleProtection protection;
- EXPECT_EQ(mojo::UnwrapSharedMemoryHandle(
- std::move(data_pipe->shared_memory), &shmem_handle,
- &memory_length, &protection),
- MOJO_RESULT_OK);
- EXPECT_EQ(protection,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
- buffer_ = std::make_unique<base::SharedMemory>(shmem_handle,
- false /* read_only */);
+ shared_memory_region_ = std::move(data_pipe->shared_memory);
GotNotification();
}
@@ -123,7 +113,7 @@ class MockClient {
MOCK_METHOD0(GotNotification, void());
private:
- std::unique_ptr<base::SharedMemory> buffer_;
+ base::UnsafeSharedMemoryRegion shared_memory_region_;
std::unique_ptr<base::CancelableSyncSocket> socket_;
};
@@ -132,7 +122,7 @@ std::unique_ptr<AudioOutputDelegate> CreateNoDelegate(
return nullptr;
}
-void NotCalled(mojom::AudioOutputStreamPtr, mojom::AudioDataPipePtr) {
+void NotCalled(mojom::AudioOutputStreamPtr, mojom::ReadWriteAudioDataPipePtr) {
ADD_FAILURE() << "The StreamCreated callback was called despite the test "
"expecting it not to.";
}
@@ -159,7 +149,7 @@ class MojoAudioOutputStreamTest : public Test {
protected:
void CreatedStream(mojom::AudioOutputStreamPtr stream,
- mojom::AudioDataPipePtr data_pipe) {
+ mojom::ReadWriteAudioDataPipePtr data_pipe) {
EXPECT_EQ(mojo::FuseMessagePipes(pending_stream_request_.PassMessagePipe(),
stream.PassInterface().PassHandle()),
MOJO_RESULT_OK);
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator.cc b/chromium/media/mojo/services/mojo_cdm_allocator.cc
index a98504ac131..67df94cff45 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator.cc
+++ b/chromium/media/mojo/services/mojo_cdm_allocator.cc
@@ -149,13 +149,10 @@ class MojoCdmVideoFrame : public VideoFrameImpl {
Stride(kYPlane), Stride(kUPlane), Stride(kVPlane),
base::TimeDelta::FromMicroseconds(Timestamp()));
- // If |frame| is not created something is wrong with the video frame data
- // returned by the CDM. Catch it here rather than returning a null frame
- // to the renderer.
- // TODO(crbug.com/829443). Monitor crashes to see if this happens.
- CHECK(frame);
-
- frame->SetMojoSharedBufferDoneCB(mojo_shared_buffer_done_cb_);
+ // |frame| could fail to be created if the memory can't be mapped into
+ // this address space.
+ if (frame)
+ frame->SetMojoSharedBufferDoneCB(mojo_shared_buffer_done_cb_);
return frame;
}
diff --git a/chromium/media/mojo/services/mojo_cdm_proxy.cc b/chromium/media/mojo/services/mojo_cdm_proxy.cc
index d5161b5940e..95a1e6ee49a 100644
--- a/chromium/media/mojo/services/mojo_cdm_proxy.cc
+++ b/chromium/media/mojo/services/mojo_cdm_proxy.cc
@@ -43,9 +43,8 @@ cdm::CdmProxyClient::Protocol ToCdmProtocol(CdmProxy::Protocol protocol) {
switch (protocol) {
case CdmProxy::Protocol::kNone:
return cdm::CdmProxyClient::Protocol::kNone;
- case CdmProxy::Protocol::kIntelConvergedSecurityAndManageabilityEngine:
- return cdm::CdmProxyClient::Protocol::
- kIntelConvergedSecurityAndManageabilityEngine;
+ case CdmProxy::Protocol::kIntel:
+ return cdm::CdmProxyClient::Protocol::kIntel;
}
NOTREACHED() << "Unexpected protocol: " << static_cast<int32_t>(protocol);
diff --git a/chromium/media/mojo/services/mojo_cdm_service_context.cc b/chromium/media/mojo/services/mojo_cdm_service_context.cc
index 49e71b5e111..394e33bf36a 100644
--- a/chromium/media/mojo/services/mojo_cdm_service_context.cc
+++ b/chromium/media/mojo/services/mojo_cdm_service_context.cc
@@ -5,6 +5,7 @@
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "base/logging.h"
+#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/content_decryption_module.h"
#include "media/cdm/cdm_context_ref_impl.h"
@@ -38,6 +39,13 @@ class CdmProxyContextRef : public CdmContextRef, public CdmContext {
private:
// CdmContext implementation.
+ std::unique_ptr<CallbackRegistration> RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) final {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return cdm_context_ ? cdm_context_->RegisterNewKeyCB(std::move(new_key_cb))
+ : nullptr;
+ }
+
Decryptor* GetDecryptor() final {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return cdm_context_ ? cdm_context_->GetDecryptor() : nullptr;
diff --git a/chromium/media/mojo/services/mojo_media_client.cc b/chromium/media/mojo/services/mojo_media_client.cc
index 83ef569ff33..04b7a7227ac 100644
--- a/chromium/media/mojo/services/mojo_media_client.cc
+++ b/chromium/media/mojo/services/mojo_media_client.cc
@@ -38,6 +38,7 @@ std::unique_ptr<VideoDecoder> MojoMediaClient::CreateVideoDecoder(
}
std::unique_ptr<Renderer> MojoMediaClient::CreateRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
const std::string& audio_device_id) {
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 47c8b9712d6..fbbd81929ec 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -64,6 +64,7 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
// Returns the Renderer to be used by MojoRendererService.
// TODO(hubbe): Find out whether we should pass in |target_color_space| here.
virtual std::unique_ptr<Renderer> CreateRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
const std::string& audio_device_id);
diff --git a/chromium/media/mojo/services/mojo_media_log.cc b/chromium/media/mojo/services/mojo_media_log.cc
index 30941d4bab8..3d14f274928 100644
--- a/chromium/media/mojo/services/mojo_media_log.cc
+++ b/chromium/media/mojo/services/mojo_media_log.cc
@@ -5,12 +5,16 @@
#include "media/mojo/services/mojo_media_log.h"
#include "base/logging.h"
+#include "base/threading/sequenced_task_runner_handle.h"
namespace media {
-MojoMediaLog::MojoMediaLog(
- scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log)
- : remote_media_log_(std::move(remote_media_log)) {
+MojoMediaLog::MojoMediaLog(mojom::MediaLogAssociatedPtrInfo remote_media_log,
+ scoped_refptr<base::SequencedTaskRunner> task_runner)
+ : remote_media_log_(std::move(remote_media_log)),
+ task_runner_(std::move(task_runner)),
+ weak_ptr_factory_(this) {
+ weak_this_ = weak_ptr_factory_.GetWeakPtr();
DVLOG(1) << __func__;
}
@@ -21,7 +25,27 @@ MojoMediaLog::~MojoMediaLog() {
void MojoMediaLog::AddEvent(std::unique_ptr<MediaLogEvent> event) {
DVLOG(1) << __func__;
DCHECK(event);
- (**remote_media_log_).AddEvent(*event);
+
+ // Don't post unless we need to. Otherwise, we can order a log entry after
+ // our own destruction. While safe, this loses the message. This can happen,
+ // for example, when we're logging why a VideoDecoder failed to initialize.
+ // It will be destroyed synchronously when Initialize returns.
+ //
+ // Also, we post here, so this is the base case. :)
+ if (task_runner_->RunsTasksInCurrentSequence()) {
+ remote_media_log_->AddEvent(*event);
+ return;
+ }
+
+ // From other threads, it's okay to post without worrying about losing a
+ // message. This is because any message that's causally related to the object
+ // (and thus MediaLog) being destroyed hopefully posts the result back to the
+ // same sequence as |task_runner_| after we do. Of course, async destruction
+ // (e.g., the renderer destroys a MojoVideoDecoder) can still lose messages,
+ // but that's really a race.
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MojoMediaLog::AddEvent, weak_this_, std::move(event)));
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_media_log.h b/chromium/media/mojo/services/mojo_media_log.h
index 78e526f359a..ee9347bbd3c 100644
--- a/chromium/media/mojo/services/mojo_media_log.h
+++ b/chromium/media/mojo/services/mojo_media_log.h
@@ -9,6 +9,8 @@
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
+#include "base/memory/weak_ptr.h"
+#include "base/sequenced_task_runner.h"
#include "media/base/media_log.h"
#include "media/mojo/interfaces/media_log.mojom.h"
@@ -17,15 +19,23 @@ namespace media {
class MojoMediaLog final : public MediaLog {
public:
// TODO(sandersd): Template on Ptr type to support non-associated.
- explicit MojoMediaLog(
- scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log);
+ explicit MojoMediaLog(mojom::MediaLogAssociatedPtrInfo remote_media_log,
+ scoped_refptr<base::SequencedTaskRunner> task_runner);
~MojoMediaLog() final;
- // MediaLog implementation.
+ // MediaLog implementation. May be called from any thread, but will only
+ // use |remote_media_log_| on |task_runner_|.
void AddEvent(std::unique_ptr<MediaLogEvent> event) override;
private:
- scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log_;
+ mojom::MediaLogAssociatedPtr remote_media_log_;
+
+ // The mojo service thread on which we'll access |remote_media_log_|.
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+
+ base::WeakPtr<MojoMediaLog> weak_this_;
+
+ base::WeakPtrFactory<MojoMediaLog> weak_ptr_factory_;
DISALLOW_COPY_AND_ASSIGN(MojoMediaLog);
};
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index ccfd012bcbc..f5738360fb2 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -136,9 +136,11 @@ void MojoVideoDecoderService::Construct(
client_.Bind(std::move(client));
- media_log_ = std::make_unique<MojoMediaLog>(
- mojom::ThreadSafeMediaLogAssociatedPtr::Create(
- std::move(media_log), base::ThreadTaskRunnerHandle::Get()));
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner =
+ base::ThreadTaskRunnerHandle::Get();
+
+ media_log_ =
+ std::make_unique<MojoMediaLog>(std::move(media_log), task_runner);
video_frame_handle_releaser_ =
mojo::MakeStrongBinding(std::make_unique<VideoFrameHandleReleaserImpl>(),
@@ -148,8 +150,7 @@ void MojoVideoDecoderService::Construct(
new MojoDecoderBufferReader(std::move(decoder_buffer_pipe)));
decoder_ = mojo_media_client_->CreateVideoDecoder(
- base::ThreadTaskRunnerHandle::Get(), media_log_.get(),
- std::move(command_buffer_id),
+ task_runner, media_log_.get(), std::move(command_buffer_id),
base::Bind(&MojoVideoDecoderService::OnDecoderRequestedOverlayInfo,
weak_this_),
target_color_space);
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.cc b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.cc
index 836a947413e..ba9174585e0 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.cc
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.cc
@@ -153,14 +153,17 @@ void MojoVideoEncodeAcceleratorService::UseOutputBitstreamBuffer(
}
void MojoVideoEncodeAcceleratorService::RequestEncodingParametersChange(
- uint32_t bitrate,
+ const media::VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate) {
- DVLOG(2) << __func__ << " bitrate=" << bitrate << " framerate=" << framerate;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!encoder_)
return;
- encoder_->RequestEncodingParametersChange(bitrate, framerate);
+
+ DVLOG(2) << __func__ << " bitrate=" << bitrate_allocation.GetSumBps()
+ << " framerate=" << framerate;
+
+ encoder_->RequestEncodingParametersChange(bitrate_allocation, framerate);
}
void MojoVideoEncodeAcceleratorService::RequireBitstreamBuffers(
@@ -183,18 +186,15 @@ void MojoVideoEncodeAcceleratorService::RequireBitstreamBuffers(
void MojoVideoEncodeAcceleratorService::BitstreamBufferReady(
int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) {
+ const media::BitstreamBufferMetadata& metadata) {
DVLOG(2) << __func__ << " bitstream_buffer_id=" << bitstream_buffer_id
- << ", payload_size=" << payload_size
- << "B, key_frame=" << key_frame;
+ << ", payload_size=" << metadata.payload_size_bytes
+ << "B, key_frame=" << metadata.key_frame;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!vea_client_)
return;
- vea_client_->BitstreamBufferReady(bitstream_buffer_id, payload_size,
- key_frame, timestamp);
+ vea_client_->BitstreamBufferReady(bitstream_buffer_id, metadata);
}
void MojoVideoEncodeAcceleratorService::NotifyError(
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
index 8c6991f7aa3..0efdae6eee3 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
@@ -63,8 +63,9 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorService
EncodeCallback callback) override;
void UseOutputBitstreamBuffer(int32_t bitstream_buffer_id,
mojo::ScopedSharedBufferHandle buffer) override;
- void RequestEncodingParametersChange(uint32_t bitrate,
- uint32_t framerate) override;
+ void RequestEncodingParametersChange(
+ const media::VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) override;
private:
friend class MojoVideoEncodeAcceleratorIntegrationTest;
@@ -74,10 +75,9 @@ class MEDIA_MOJO_EXPORT MojoVideoEncodeAcceleratorService
void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) override;
- void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) override;
+ void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const media::BitstreamBufferMetadata& metadata) override;
void NotifyError(::media::VideoEncodeAccelerator::Error error) override;
const CreateAndInitializeVideoEncodeAcceleratorCallback create_vea_callback_;
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
index 85975bbe122..ffd09434753 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
@@ -6,7 +6,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
-#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_preferences.h"
#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
#include "media/mojo/services/mojo_video_encode_accelerator_service.h"
#include "media/video/fake_video_encode_accelerator.h"
@@ -52,8 +52,8 @@ class MockMojoVideoEncodeAcceleratorClient
MOCK_METHOD3(RequireBitstreamBuffers,
void(uint32_t, const gfx::Size&, uint32_t));
- MOCK_METHOD4(BitstreamBufferReady,
- void(int32_t, uint32_t, bool, base::TimeDelta));
+ MOCK_METHOD2(BitstreamBufferReady,
+ void(int32_t, const media::BitstreamBufferMetadata&));
MOCK_METHOD1(NotifyError, void(VideoEncodeAccelerator::Error));
private:
@@ -158,7 +158,7 @@ TEST_F(MojoVideoEncodeAcceleratorServiceTest, EncodeOneFrame) {
{
const auto video_frame = VideoFrame::CreateBlackFrame(kInputVisibleSize);
EXPECT_CALL(*mock_mojo_vea_client(),
- BitstreamBufferReady(kBitstreamBufferId, _, _, _));
+ BitstreamBufferReady(kBitstreamBufferId, _));
mojo_vea_service()->Encode(video_frame, true /* is_keyframe */,
base::DoNothing());
@@ -173,12 +173,48 @@ TEST_F(MojoVideoEncodeAcceleratorServiceTest, EncodingParametersChange) {
const uint32_t kNewBitrate = 123123u;
const uint32_t kNewFramerate = 321321u;
- mojo_vea_service()->RequestEncodingParametersChange(kNewBitrate,
+ VideoBitrateAllocation bitrate_allocation;
+ bitrate_allocation.SetBitrate(0, 0, kNewBitrate);
+ mojo_vea_service()->RequestEncodingParametersChange(bitrate_allocation,
kNewFramerate);
base::RunLoop().RunUntilIdle();
ASSERT_TRUE(fake_vea());
- EXPECT_EQ(kNewBitrate, fake_vea()->stored_bitrates().front());
+ VideoBitrateAllocation expected_allocation;
+ expected_allocation.SetBitrate(0, 0, kNewBitrate);
+ EXPECT_EQ(expected_allocation,
+ fake_vea()->stored_bitrate_allocations().back());
+}
+
+// Tests that a RequestEncodingParametersChange() ripples through correctly.
+TEST_F(MojoVideoEncodeAcceleratorServiceTest,
+ EncodingParametersWithBitrateAllocation) {
+ CreateMojoVideoEncodeAccelerator();
+ BindAndInitialize();
+
+ const uint32_t kNewFramerate = 321321u;
+ const size_t kMaxNumBitrates = VideoBitrateAllocation::kMaxSpatialLayers *
+ VideoBitrateAllocation::kMaxTemporalLayers;
+
+ // Verify translation of VideoBitrateAllocation into vector of bitrates for
+ // everything from empty array up to max number of layers.
+ VideoBitrateAllocation bitrate_allocation;
+ for (size_t i = 0; i <= kMaxNumBitrates; ++i) {
+ if (i > 0) {
+ int layer_bitrate = i * 1000;
+ const size_t si = (i - 1) / VideoBitrateAllocation::kMaxTemporalLayers;
+ const size_t ti = (i - 1) % VideoBitrateAllocation::kMaxTemporalLayers;
+ bitrate_allocation.SetBitrate(si, ti, layer_bitrate);
+ }
+
+ mojo_vea_service()->RequestEncodingParametersChange(bitrate_allocation,
+ kNewFramerate);
+ base::RunLoop().RunUntilIdle();
+
+ ASSERT_TRUE(fake_vea());
+ EXPECT_EQ(bitrate_allocation,
+ fake_vea()->stored_bitrate_allocations().back());
+ }
}
// This test verifies that MojoVEA::Initialize() fails with an invalid |client|.
@@ -278,7 +314,9 @@ TEST_F(MojoVideoEncodeAcceleratorServiceTest, CallsBeforeInitializeAreIgnored) {
{
const uint32_t kNewBitrate = 123123u;
const uint32_t kNewFramerate = 321321u;
- mojo_vea_service()->RequestEncodingParametersChange(kNewBitrate,
+ media::VideoBitrateAllocation bitrate_allocation;
+ bitrate_allocation.SetBitrate(0, 0, kNewBitrate);
+ mojo_vea_service()->RequestEncodingParametersChange(bitrate_allocation,
kNewFramerate);
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/mojo/services/test_mojo_media_client.cc b/chromium/media/mojo/services/test_mojo_media_client.cc
index 6c3ce3d4427..144c0b2d67d 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.cc
+++ b/chromium/media/mojo/services/test_mojo_media_client.cc
@@ -57,6 +57,7 @@ void TestMojoMediaClient::Initialize(
}
std::unique_ptr<Renderer> TestMojoMediaClient::CreateRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
const std::string& /* audio_device_id */) {
diff --git a/chromium/media/mojo/services/test_mojo_media_client.h b/chromium/media/mojo/services/test_mojo_media_client.h
index d5adbeb790e..ac517bbd764 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.h
+++ b/chromium/media/mojo/services/test_mojo_media_client.h
@@ -30,6 +30,7 @@ class TestMojoMediaClient : public MojoMediaClient {
// MojoMediaClient implementation.
void Initialize(service_manager::Connector* connector) final;
std::unique_ptr<Renderer> CreateRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
const std::string& audio_device_id) final;
diff --git a/chromium/media/mojo/services/video_decode_perf_history.cc b/chromium/media/mojo/services/video_decode_perf_history.cc
index f074b4ed4cb..3ee0ae8d1f2 100644
--- a/chromium/media/mojo/services/video_decode_perf_history.cc
+++ b/chromium/media/mojo/services/video_decode_perf_history.cc
@@ -9,6 +9,7 @@
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/strings/stringprintf.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/video_codecs.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "services/metrics/public/cpp/ukm_builders.h"
@@ -111,7 +112,7 @@ void VideoDecodePerfHistory::AssessStats(
// this will be janky.
// No stats? Lets be optimistic.
- if (!stats) {
+ if (!stats || stats->frames_decoded == 0) {
*is_power_efficient = true;
*is_smooth = true;
return;
@@ -144,7 +145,7 @@ void VideoDecodePerfHistory::OnGotStatsForRequest(
AssessStats(stats.get(), &is_smooth, &is_power_efficient);
- if (stats) {
+ if (stats && stats->frames_decoded) {
DCHECK(database_success);
percent_dropped =
static_cast<double>(stats->frames_dropped) / stats->frames_decoded;
@@ -169,6 +170,11 @@ void VideoDecodePerfHistory::OnGotStatsForRequest(
std::move(got_info_cb).Run(is_smooth, is_power_efficient);
}
+VideoDecodePerfHistory::SaveCallback VideoDecodePerfHistory::GetSaveCallback() {
+ return base::BindRepeating(&VideoDecodePerfHistory::SavePerfRecord,
+ weak_ptr_factory_.GetWeakPtr());
+}
+
void VideoDecodePerfHistory::SavePerfRecord(
const url::Origin& untrusted_top_frame_origin,
bool is_top_frame,
@@ -362,4 +368,27 @@ void VideoDecodePerfHistory::OnClearedHistory(base::OnceClosure clear_done_cb) {
std::move(clear_done_cb).Run();
}
+void VideoDecodePerfHistory::GetVideoDecodeStatsDB(GetCB get_db_cb) {
+ DVLOG(3) << __func__;
+ DCHECK(get_db_cb);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (db_init_status_ == FAILED) {
+ std::move(get_db_cb).Run(nullptr);
+ return;
+ }
+
+ // Defer this request until the DB is initialized.
+ if (db_init_status_ != COMPLETE) {
+ init_deferred_api_calls_.push_back(
+ base::BindOnce(&VideoDecodePerfHistory::GetVideoDecodeStatsDB,
+ weak_ptr_factory_.GetWeakPtr(), std::move(get_db_cb)));
+ InitDatabase();
+ return;
+ }
+
+ // DB is already initialized. BindToCurrentLoop to avoid reentrancy.
+ std::move(BindToCurrentLoop(std::move(get_db_cb))).Run(db_.get());
+}
+
} // namespace media
diff --git a/chromium/media/mojo/services/video_decode_perf_history.h b/chromium/media/mojo/services/video_decode_perf_history.h
index 8fe4f9efb96..5ec8691dce4 100644
--- a/chromium/media/mojo/services/video_decode_perf_history.h
+++ b/chromium/media/mojo/services/video_decode_perf_history.h
@@ -15,6 +15,7 @@
#include "base/supports_user_data.h"
#include "media/base/video_codecs.h"
#include "media/capabilities/video_decode_stats_db.h"
+#include "media/capabilities/video_decode_stats_db_provider.h"
#include "media/mojo/interfaces/video_decode_perf_history.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding_set.h"
@@ -45,6 +46,7 @@ namespace media {
// sequence.
class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
: public mojom::VideoDecodePerfHistory,
+ public VideoDecodeStatsDBProvider,
public base::SupportsUserData::Data {
public:
explicit VideoDecodePerfHistory(
@@ -59,20 +61,27 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
void GetPerfInfo(mojom::PredictionFeaturesPtr features,
GetPerfInfoCallback got_info_cb) override;
- // Save a record of the given performance stats for the described stream.
- // Saving is generally fire-and-forget, but |save_done_cb| may be optionally
+ // Provides a callback for saving a stats record for the described stream.
+ // This callback will silently fail if called after |this| is destroyed.
+ // Saving is generally fire-and-forget, but |save_done_cb| may be provided
// for tests to know the save is complete.
- void SavePerfRecord(const url::Origin& untrusted_top_frame_origin,
- bool is_top_frame,
- mojom::PredictionFeatures features,
- mojom::PredictionTargets targets,
- uint64_t player_id,
- base::OnceClosure save_done_cb = base::OnceClosure());
+ using SaveCallback = base::RepeatingCallback<void(
+ const url::Origin& untrusted_top_frame_origin,
+ bool is_top_frame,
+ mojom::PredictionFeatures features,
+ mojom::PredictionTargets targets,
+ uint64_t player_id,
+ base::OnceClosure save_done_cb)>;
+ SaveCallback GetSaveCallback();
// Clear all history from the underlying database. Run |clear_done_cb| when
// complete.
void ClearHistory(base::OnceClosure clear_done_cb);
+ // From VideoDecodeStatsDBProvider. |cb| receives a pointer to the
+ // *initialized* VideoDecodeStatsDB, or null in case of error.
+ void GetVideoDecodeStatsDB(GetCB cb) override;
+
private:
friend class VideoDecodePerfHistoryTest;
@@ -101,6 +110,14 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
// Callback from |db_->Initialize()|.
void OnDatabaseInit(bool success);
+ // Initiate saving of the provided record. See GetSaveCallback().
+ void SavePerfRecord(const url::Origin& untrusted_top_frame_origin,
+ bool is_top_frame,
+ mojom::PredictionFeatures features,
+ mojom::PredictionTargets targets,
+ uint64_t player_id,
+ base::OnceClosure save_done_cb);
+
// Internal callback for database queries made from GetPerfInfo() (mojo API).
// Assesses performance from database stats and passes results to
// |got_info_cb|.
diff --git a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
index fa36b55ae79..f0b7f728c2d 100644
--- a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
+++ b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
@@ -9,6 +9,7 @@
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "base/task_scheduler/post_task.h"
+#include "base/test/bind_test_util.h"
#include "base/test/scoped_task_environment.h"
#include "components/ukm/test_ukm_recorder.h"
#include "media/capabilities/video_decode_stats_db.h"
@@ -20,6 +21,9 @@
#include "url/origin.h"
using UkmEntry = ukm::builders::Media_VideoDecodePerfRecord;
+using testing::Eq;
+using testing::IsNull;
+using testing::_;
namespace {
@@ -158,6 +162,8 @@ class VideoDecodePerfHistoryTest : public testing::Test {
// the operation has completed. See CompleteDBInitOnClearedHistory().
MOCK_METHOD0(MockOnClearedHistory, void());
+ MOCK_METHOD1(MockGetVideoDecodeStatsDBCB, void(VideoDecodeStatsDB* db));
+
mojom::PredictionFeatures MakeFeatures(VideoCodecProfile profile,
gfx::Size video_size,
int frames_per_sec) {
@@ -187,6 +193,20 @@ class VideoDecodePerfHistoryTest : public testing::Test {
return targets;
}
+ void SavePerfRecord(const url::Origin& untrusted_top_frame_origin,
+ bool is_top_frame,
+ mojom::PredictionFeatures features,
+ mojom::PredictionTargets targets,
+ uint64_t player_id) {
+ // Null saved done CB. Save is verified separately via GetPerfInfo() after
+ // save completes.
+ base::OnceClosure save_done_cb;
+
+ perf_history_->GetSaveCallback().Run(untrusted_top_frame_origin,
+ is_top_frame, features, targets,
+ player_id, std::move(save_done_cb));
+ }
+
protected:
using VideoDescKey = VideoDecodeStatsDB::VideoDescKey;
using DecodeStatsEntry = VideoDecodeStatsDB::DecodeStatsEntry;
@@ -238,18 +258,16 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_Smooth) {
kFramesDecoded * kMaxSmoothDroppedFramesPercent + 1;
// Add the entries.
- perf_history_->SavePerfRecord(
- kOrigin, kIsTopFrame,
- MakeFeatures(kKnownProfile, kKownSize, kSmoothFrameRate),
- MakeTargets(kFramesDecoded, kSmoothFramesDropped,
- kNotPowerEfficientFramesDecoded),
- kPlayerId);
- perf_history_->SavePerfRecord(
- kOrigin, kIsTopFrame,
- MakeFeatures(kKnownProfile, kKownSize, kNotSmoothFrameRate),
- MakeTargets(kFramesDecoded, kNotSmoothFramesDropped,
- kNotPowerEfficientFramesDecoded),
- kPlayerId);
+ SavePerfRecord(kOrigin, kIsTopFrame,
+ MakeFeatures(kKnownProfile, kKownSize, kSmoothFrameRate),
+ MakeTargets(kFramesDecoded, kSmoothFramesDropped,
+ kNotPowerEfficientFramesDecoded),
+ kPlayerId);
+ SavePerfRecord(kOrigin, kIsTopFrame,
+ MakeFeatures(kKnownProfile, kKownSize, kNotSmoothFrameRate),
+ MakeTargets(kFramesDecoded, kNotSmoothFramesDropped,
+ kNotPowerEfficientFramesDecoded),
+ kPlayerId);
// Verify perf history returns is_smooth = true for the smooth entry.
EXPECT_CALL(*this, MockGetPerfInfoCB(kIsSmooth, kIsNotPowerEfficient));
@@ -279,9 +297,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_Smooth) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- base::RunLoop run_loop;
- base::PostTask(FROM_HERE, base::BindOnce(run_loop.QuitWhenIdleClosure()));
- run_loop.Run();
+ scoped_task_environment_.RunUntilIdle();
}
}
@@ -318,19 +334,19 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_PowerEfficient) {
kFramesDecoded * kMaxSmoothDroppedFramesPercent + 1;
// Add the entries.
- perf_history_->SavePerfRecord(
+ SavePerfRecord(
kOrigin, kIsTopFrame,
MakeFeatures(kPowerEfficientProfile, kKownSize, kSmoothFrameRate),
MakeTargets(kFramesDecoded, kSmoothFramesDropped,
kPowerEfficientFramesDecoded),
kPlayerId);
- perf_history_->SavePerfRecord(
+ SavePerfRecord(
kOrigin, kIsTopFrame,
MakeFeatures(kNotPowerEfficientProfile, kKownSize, kSmoothFrameRate),
MakeTargets(kFramesDecoded, kSmoothFramesDropped,
kNotPowerEfficientFramesDecoded),
kPlayerId);
- perf_history_->SavePerfRecord(
+ SavePerfRecord(
kOrigin, kIsTopFrame,
MakeFeatures(kPowerEfficientProfile, kKownSize, kNotSmoothFrameRate),
MakeTargets(kFramesDecoded, kNotSmoothFramesDropped,
@@ -373,9 +389,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_PowerEfficient) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- base::RunLoop run_loop;
- base::PostTask(FROM_HERE, base::BindOnce(run_loop.QuitWhenIdleClosure()));
- run_loop.Run();
+ scoped_task_environment_.RunUntilIdle();
}
}
@@ -402,9 +416,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_FailedInitialize) {
GetFakeDB()->CompleteInitialize(false);
// Allow initialize-deferred API calls to complete.
- base::RunLoop run_loop;
- base::PostTask(FROM_HERE, base::BindOnce(run_loop.QuitWhenIdleClosure()));
- run_loop.Run();
+ scoped_task_environment_.RunUntilIdle();
}
}
@@ -426,7 +438,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, AppendAndDestroyStats) {
const int kFramesDecoded = 1000;
const int kManyFramesDropped = kFramesDecoded / 2;
const int kFramesPowerEfficient = kFramesDecoded;
- perf_history_->SavePerfRecord(
+ SavePerfRecord(
kOrigin, kIsTopFrame, MakeFeatures(kProfile, kSize, kFrameRate),
MakeTargets(kFramesDecoded, kManyFramesDropped, kFramesPowerEfficient),
kPlayerId);
@@ -459,9 +471,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, AppendAndDestroyStats) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- base::RunLoop run_loop;
- base::PostTask(FROM_HERE, base::BindOnce(run_loop.QuitWhenIdleClosure()));
- run_loop.Run();
+ scoped_task_environment_.RunUntilIdle();
}
const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
@@ -482,6 +492,70 @@ TEST_P(VideoDecodePerfHistoryParamTest, AppendAndDestroyStats) {
}
}
+TEST_P(VideoDecodePerfHistoryParamTest, GetVideoDecodeStatsDB) {
+ // NOTE: The when the DB initialization is deferred, All EXPECT_CALLs are then
+ // delayed until we db_->CompleteInitialize(). testing::InSequence enforces
+ // that EXPECT_CALLs arrive in top-to-bottom order.
+ bool defer_initialize = GetParam();
+ testing::InSequence dummy;
+
+ // Complete initialization in advance of API calls when not asked to defer.
+ if (!defer_initialize)
+ PreInitializeDB(/* success */ true);
+
+ // Request a pointer to VideoDecodeStatsDB and verify the callback.
+ EXPECT_CALL(*this, MockGetVideoDecodeStatsDBCB(_))
+ .WillOnce([&](const auto* db_ptr) {
+ // Not able to simply use a matcher because the DB does not exist at the
+ // time we setup the EXPECT_CALL.
+ EXPECT_EQ(GetFakeDB(), db_ptr);
+ });
+
+ perf_history_->GetVideoDecodeStatsDB(
+ base::BindOnce(&VideoDecodePerfHistoryTest::MockGetVideoDecodeStatsDBCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+
+ // Complete successful deferred DB initialization (see comment at top of test)
+ if (defer_initialize) {
+ GetFakeDB()->CompleteInitialize(true);
+
+ // Allow initialize-deferred API calls to complete.
+ scoped_task_environment_.RunUntilIdle();
+ }
+}
+
+TEST_P(VideoDecodePerfHistoryParamTest,
+ GetVideoDecodeStatsDB_FailedInitialize) {
+ // NOTE: The when the DB initialization is deferred, All EXPECT_CALLs are then
+ // delayed until we db_->CompleteInitialize(). testing::InSequence enforces
+ // that EXPECT_CALLs arrive in top-to-bottom order.
+ bool defer_initialize = GetParam();
+ testing::InSequence dummy;
+
+ // Complete initialization in advance of API calls when not asked to defer.
+ if (!defer_initialize)
+ PreInitializeDB(/* success */ false);
+
+ // Request a pointer to VideoDecodeStatsDB and verify the callback provides
+ // a nullptr due to failed initialization.
+ EXPECT_CALL(*this, MockGetVideoDecodeStatsDBCB(IsNull()));
+ perf_history_->GetVideoDecodeStatsDB(
+ base::BindOnce(&VideoDecodePerfHistoryTest::MockGetVideoDecodeStatsDBCB,
+ base::Unretained(this)));
+
+ scoped_task_environment_.RunUntilIdle();
+
+ // Complete failed deferred DB initialization (see comment at top of test)
+ if (defer_initialize) {
+ GetFakeDB()->CompleteInitialize(false);
+
+ // Allow initialize-deferred API calls to complete.
+ scoped_task_environment_.RunUntilIdle();
+ }
+}
+
INSTANTIATE_TEST_CASE_P(VaryDBInitTiming,
VideoDecodePerfHistoryParamTest,
::testing::Values(true, false));
@@ -509,7 +583,7 @@ TEST_F(VideoDecodePerfHistoryTest, AppendWhileDestroying) {
// With DB reinitialization still pending, save a record that indicates
// NOT smooth performance.
- perf_history_->SavePerfRecord(
+ SavePerfRecord(
kOrigin, kIsTopFrame, MakeFeatures(kProfile, kSize, kFrameRate),
MakeTargets(kFramesDecoded, kManyFramesDropped, kFramesPowerEfficient),
kPlayerId);
@@ -527,9 +601,7 @@ TEST_F(VideoDecodePerfHistoryTest, AppendWhileDestroying) {
GetFakeDB()->CompleteInitialize(/* success */ true);
// Allow initialize-deferred API calls to complete.
- base::RunLoop run_loop;
- base::PostTask(FROM_HERE, base::BindOnce(run_loop.QuitWhenIdleClosure()));
- run_loop.Run();
+ scoped_task_environment_.RunUntilIdle();
}
} // namespace media
diff --git a/chromium/media/mojo/services/video_decode_stats_recorder.cc b/chromium/media/mojo/services/video_decode_stats_recorder.cc
index f6866c0ad62..46f12e729c5 100644
--- a/chromium/media/mojo/services/video_decode_stats_recorder.cc
+++ b/chromium/media/mojo/services/video_decode_stats_recorder.cc
@@ -5,7 +5,6 @@
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "base/memory/ptr_util.h"
-#include "media/mojo/services/video_decode_perf_history.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "base/logging.h"
@@ -16,11 +15,15 @@ VideoDecodeStatsRecorder::VideoDecodeStatsRecorder(
const url::Origin& untrusted_top_frame_origin,
bool is_top_frame,
uint64_t player_id,
- VideoDecodePerfHistory* perf_history)
+ VideoDecodePerfHistory::SaveCallback save_cb)
: untrusted_top_frame_origin_(untrusted_top_frame_origin),
is_top_frame_(is_top_frame),
- perf_history_(perf_history),
+ save_cb_(std::move(save_cb)),
player_id_(player_id) {
+ // Only bother to make the recorder when able to save stats. Checking here
+ // instead of silently failing below.
+ CHECK(!save_cb_.is_null());
+
DVLOG(2) << __func__
<< " untrusted_top_frame_origin:" << untrusted_top_frame_origin
<< " is_top_frame:" << is_top_frame;
@@ -68,7 +71,7 @@ void VideoDecodeStatsRecorder::UpdateRecord(
void VideoDecodeStatsRecorder::FinalizeRecord() {
if (features_.profile == VIDEO_CODEC_PROFILE_UNKNOWN ||
- targets_.frames_decoded == 0 || !perf_history_) {
+ targets_.frames_decoded == 0) {
return;
}
@@ -80,8 +83,10 @@ void VideoDecodeStatsRecorder::FinalizeRecord() {
<< " power efficient decoded:"
<< targets_.frames_decoded_power_efficient;
- perf_history_->SavePerfRecord(untrusted_top_frame_origin_, is_top_frame_,
- features_, targets_, player_id_);
+ // Final argument is an empty save-done-callback. No action to take if save
+ // fails (DB already records UMAs on failure). Callback mainly used by tests.
+ save_cb_.Run(untrusted_top_frame_origin_, is_top_frame_, features_, targets_,
+ player_id_, base::OnceClosure());
}
} // namespace media
diff --git a/chromium/media/mojo/services/video_decode_stats_recorder.h b/chromium/media/mojo/services/video_decode_stats_recorder.h
index 8445aeddc77..e432d4cb48c 100644
--- a/chromium/media/mojo/services/video_decode_stats_recorder.h
+++ b/chromium/media/mojo/services/video_decode_stats_recorder.h
@@ -12,13 +12,12 @@
#include "media/base/video_codecs.h"
#include "media/mojo/interfaces/video_decode_stats_recorder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
+#include "media/mojo/services/video_decode_perf_history.h"
#include "services/service_manager/public/cpp/bind_source_info.h"
#include "url/gurl.h"
namespace media {
-class VideoDecodePerfHistory;
-
// See mojom::VideoDecodeStatsRecorder for documentation.
class MEDIA_MOJO_EXPORT VideoDecodeStatsRecorder
: public mojom::VideoDecodeStatsRecorder {
@@ -29,7 +28,7 @@ class MEDIA_MOJO_EXPORT VideoDecodeStatsRecorder
VideoDecodeStatsRecorder(const url::Origin& untrusted_top_frame_origin,
bool is_top_frame,
uint64_t player_id,
- VideoDecodePerfHistory* perf_history);
+ VideoDecodePerfHistory::SaveCallback save_cb);
~VideoDecodeStatsRecorder() override;
// mojom::VideoDecodeStatsRecorder implementation:
@@ -43,7 +42,7 @@ class MEDIA_MOJO_EXPORT VideoDecodeStatsRecorder
const url::Origin untrusted_top_frame_origin_;
const bool is_top_frame_;
- VideoDecodePerfHistory* const perf_history_;
+ const VideoDecodePerfHistory::SaveCallback save_cb_;
const uint64_t player_id_;
mojom::PredictionFeatures features_;
diff --git a/chromium/media/mojo/services/watch_time_recorder.cc b/chromium/media/mojo/services/watch_time_recorder.cc
index e34b0e3aeb9..de615f91cfa 100644
--- a/chromium/media/mojo/services/watch_time_recorder.cc
+++ b/chromium/media/mojo/services/watch_time_recorder.cc
@@ -5,6 +5,7 @@
#include "media/mojo/services/watch_time_recorder.h"
#include <algorithm>
+#include <cmath>
#include "base/hash.h"
#include "base/metrics/histogram_functions.h"
@@ -115,6 +116,15 @@ static void RecordRebuffersCount(base::StringPiece key, int underflow_count) {
base::UmaHistogramCounts100(key.as_string(), underflow_count);
}
+WatchTimeRecorder::WatchTimeUkmRecord::WatchTimeUkmRecord(
+ mojom::SecondaryPlaybackPropertiesPtr properties)
+ : secondary_properties(std::move(properties)) {}
+
+WatchTimeRecorder::WatchTimeUkmRecord::WatchTimeUkmRecord(
+ WatchTimeUkmRecord&& record) = default;
+
+WatchTimeRecorder::WatchTimeUkmRecord::~WatchTimeUkmRecord() = default;
+
WatchTimeRecorder::WatchTimeRecorder(mojom::PlaybackPropertiesPtr properties,
const url::Origin& untrusted_top_origin,
bool is_top_frame,
@@ -184,7 +194,8 @@ void WatchTimeRecorder::FinalizeWatchTime(
}
// At finalize, update the aggregate entry.
- aggregate_watch_time_info_[kv.first] += kv.second;
+ if (!ukm_records_.empty())
+ ukm_records_.back().aggregate_watch_time_info[kv.first] += kv.second;
}
// If we're not finalizing everything, we're done after removing keys.
@@ -213,7 +224,8 @@ void WatchTimeRecorder::FinalizeWatchTime(
}
// Ensure values are cleared in case the reporter is reused.
- total_underflow_count_ += underflow_count_;
+ if (!ukm_records_.empty())
+ ukm_records_.back().total_underflow_count += underflow_count_;
underflow_count_ = 0;
watch_time_info_.clear();
}
@@ -222,14 +234,76 @@ void WatchTimeRecorder::OnError(PipelineStatus status) {
pipeline_status_ = status;
}
-void WatchTimeRecorder::SetAudioDecoderName(const std::string& name) {
- DCHECK(audio_decoder_name_.empty());
- audio_decoder_name_ = name;
-}
+void WatchTimeRecorder::UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties) {
+ bool last_record_was_unfinalized = false;
+ if (!ukm_records_.empty()) {
+ auto& last_record = ukm_records_.back();
+
+ // Skip unchanged property updates.
+ if (secondary_properties->Equals(*last_record.secondary_properties))
+ return;
+
+ // If a property just changes from an unknown to a known value, allow the
+ // update without creating a whole new record.
+ if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec ||
+ last_record.secondary_properties->video_codec == kUnknownVideoCodec ||
+ last_record.secondary_properties->audio_decoder_name.empty() ||
+ last_record.secondary_properties->video_decoder_name.empty()) {
+ auto temp_props = last_record.secondary_properties.Clone();
+ if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec)
+ temp_props->audio_codec = secondary_properties->audio_codec;
+ if (last_record.secondary_properties->video_codec == kUnknownVideoCodec)
+ temp_props->video_codec = secondary_properties->video_codec;
+ if (last_record.secondary_properties->audio_decoder_name.empty()) {
+ temp_props->audio_decoder_name =
+ secondary_properties->audio_decoder_name;
+ }
+ if (last_record.secondary_properties->video_decoder_name.empty()) {
+ temp_props->video_decoder_name =
+ secondary_properties->video_decoder_name;
+ }
+ if (temp_props->Equals(*secondary_properties)) {
+ last_record.secondary_properties = std::move(temp_props);
+ return;
+ }
+ }
-void WatchTimeRecorder::SetVideoDecoderName(const std::string& name) {
- DCHECK(video_decoder_name_.empty());
- video_decoder_name_ = name;
+ // Flush any existing watch time for the current UKM record. The client is
+ // responsible for ensuring recent watch time has been reported before
+ // updating the secondary properties.
+ for (auto& kv : watch_time_info_)
+ last_record.aggregate_watch_time_info[kv.first] += kv.second;
+ last_record.total_underflow_count += underflow_count_;
+
+ // If we flushed any watch time or underflow counts which hadn't been
+ // finalized we'll need to ensure the eventual Finalize() correctly accounts
+ // for those values at the time of the secondary property update.
+ last_record_was_unfinalized = !watch_time_info_.empty() || underflow_count_;
+ }
+ ukm_records_.emplace_back(std::move(secondary_properties));
+
+ // We're still in the middle of ongoing watch time updates. So offset the
+ // future records by their current values; this is done by setting the initial
+ // value of each unfinalized record to the negative of its current value.
+ //
+ // These values will be made positive by the next Finalize() call; which is
+ // guaranteed to be called at least one more time; either at destruction or by
+ // the client. This ensures we report the correct amount of watch time that
+ // has elapsed since the secondary properties were updated.
+ //
+ // E.g., consider the case where there's a pending watch time entry for
+ // kAudioAll=10s and the next RecordWatchTime() call would be kAudioAll=25s.
+ // Without offsetting, if UpdateSecondaryProperties() is called before the
+ // next RecordWatchTime() we'll end up recording kAudioAll=25s as the amount
+ // of watch time for the new set of secondary properties, which isn't correct.
+ // We instead want to report kAudioAll = 25s - 10s = 15s.
+ if (last_record_was_unfinalized) {
+ auto& last_record = ukm_records_.back();
+ last_record.total_underflow_count = -underflow_count_;
+ for (auto& kv : watch_time_info_)
+ last_record.aggregate_watch_time_info[kv.first] = -kv.second;
+ }
}
void WatchTimeRecorder::SetAutoplayInitiated(bool value) {
@@ -237,6 +311,10 @@ void WatchTimeRecorder::SetAutoplayInitiated(bool value) {
autoplay_initiated_ = value;
}
+void WatchTimeRecorder::OnDurationChanged(base::TimeDelta duration) {
+ duration_ = duration;
+}
+
void WatchTimeRecorder::UpdateUnderflowCount(int32_t count) {
underflow_count_ = count;
}
@@ -249,108 +327,136 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
if (!ukm_recorder)
return;
- const int32_t source_id = ukm_recorder->GetNewSourceID();
-
- // TODO(crbug.com/787209): Stop getting origin from the renderer.
- ukm_recorder->UpdateSourceURL(source_id, untrusted_top_origin_.GetURL());
- ukm::builders::Media_BasicPlayback builder(source_id);
-
- builder.SetIsTopFrame(is_top_frame_);
- builder.SetIsBackground(properties_->is_background);
- builder.SetIsMuted(properties_->is_muted);
- builder.SetPlayerID(player_id_);
-
- bool recorded_all_metric = false;
- for (auto& kv : aggregate_watch_time_info_) {
- if (kv.first == WatchTimeKey::kAudioAll ||
- kv.first == WatchTimeKey::kAudioBackgroundAll ||
- kv.first == WatchTimeKey::kAudioVideoAll ||
- kv.first == WatchTimeKey::kAudioVideoMutedAll ||
- kv.first == WatchTimeKey::kAudioVideoBackgroundAll ||
- kv.first == WatchTimeKey::kVideoAll ||
- kv.first == WatchTimeKey::kVideoBackgroundAll) {
- // Only one of these keys should be present.
- DCHECK(!recorded_all_metric);
- recorded_all_metric = true;
-
- builder.SetWatchTime(kv.second.InMilliseconds());
- if (total_underflow_count_) {
- builder.SetMeanTimeBetweenRebuffers(
- (kv.second / total_underflow_count_).InMilliseconds());
- }
- } else if (kv.first == WatchTimeKey::kAudioAc ||
- kv.first == WatchTimeKey::kAudioBackgroundAc ||
- kv.first == WatchTimeKey::kAudioVideoAc ||
- kv.first == WatchTimeKey::kAudioVideoMutedAc ||
- kv.first == WatchTimeKey::kAudioVideoBackgroundAc ||
- kv.first == WatchTimeKey::kVideoAc ||
- kv.first == WatchTimeKey::kVideoBackgroundAc) {
- builder.SetWatchTime_AC(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioBattery ||
- kv.first == WatchTimeKey::kAudioBackgroundBattery ||
- kv.first == WatchTimeKey::kAudioVideoBattery ||
- kv.first == WatchTimeKey::kAudioVideoMutedBattery ||
- kv.first == WatchTimeKey::kAudioVideoBackgroundBattery ||
- kv.first == WatchTimeKey::kVideoBattery ||
- kv.first == WatchTimeKey::kVideoBackgroundBattery) {
- builder.SetWatchTime_Battery(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioNativeControlsOn ||
- kv.first == WatchTimeKey::kAudioVideoNativeControlsOn ||
- kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOn ||
- kv.first == WatchTimeKey::kVideoNativeControlsOn) {
- builder.SetWatchTime_NativeControlsOn(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioNativeControlsOff ||
- kv.first == WatchTimeKey::kAudioVideoNativeControlsOff ||
- kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOff ||
- kv.first == WatchTimeKey::kVideoNativeControlsOff) {
- builder.SetWatchTime_NativeControlsOff(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioVideoDisplayFullscreen ||
- kv.first == WatchTimeKey::kAudioVideoMutedDisplayFullscreen ||
- kv.first == WatchTimeKey::kVideoDisplayFullscreen) {
- builder.SetWatchTime_DisplayFullscreen(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioVideoDisplayInline ||
- kv.first == WatchTimeKey::kAudioVideoMutedDisplayInline ||
- kv.first == WatchTimeKey::kVideoDisplayInline) {
- builder.SetWatchTime_DisplayInline(kv.second.InMilliseconds());
- } else if (kv.first == WatchTimeKey::kAudioVideoDisplayPictureInPicture ||
- kv.first ==
- WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture ||
- kv.first == WatchTimeKey::kVideoDisplayPictureInPicture) {
- builder.SetWatchTime_DisplayPictureInPicture(kv.second.InMilliseconds());
+ // Round duration to the most significant digit in milliseconds for privacy.
+ base::Optional<uint64_t> clamped_duration_ms;
+ if (duration_ != kNoTimestamp && duration_ != kInfiniteDuration) {
+ clamped_duration_ms = duration_.InMilliseconds();
+ if (duration_ > base::TimeDelta::FromSeconds(1)) {
+ // Turns 54321 => 10000.
+ const uint64_t base =
+ std::pow(10, static_cast<uint64_t>(std::log10(*clamped_duration_ms)));
+ // Turns 54321 => 4321.
+ const uint64_t modulus = *clamped_duration_ms % base;
+ // Turns 54321 => 50000 and 55321 => 60000
+ clamped_duration_ms =
+ *clamped_duration_ms - modulus + (modulus < base / 2 ? 0 : base);
}
}
- // See note in mojom::PlaybackProperties about why we have both of these.
- builder.SetAudioCodec(properties_->audio_codec);
- builder.SetVideoCodec(properties_->video_codec);
- builder.SetHasAudio(properties_->has_audio);
- builder.SetHasVideo(properties_->has_video);
+ for (auto& ukm_record : ukm_records_) {
+ const int32_t source_id = ukm_recorder->GetNewSourceID();
+
+ // TODO(crbug.com/787209): Stop getting origin from the renderer.
+ ukm_recorder->UpdateSourceURL(source_id, untrusted_top_origin_.GetURL());
+ ukm::builders::Media_BasicPlayback builder(source_id);
+
+ builder.SetIsTopFrame(is_top_frame_);
+ builder.SetIsBackground(properties_->is_background);
+ builder.SetIsMuted(properties_->is_muted);
+ builder.SetPlayerID(player_id_);
+ if (clamped_duration_ms.has_value())
+ builder.SetDuration(*clamped_duration_ms);
+
+ bool recorded_all_metric = false;
+ for (auto& kv : ukm_record.aggregate_watch_time_info) {
+ DCHECK_GE(kv.second, base::TimeDelta());
+
+ if (kv.first == WatchTimeKey::kAudioAll ||
+ kv.first == WatchTimeKey::kAudioBackgroundAll ||
+ kv.first == WatchTimeKey::kAudioVideoAll ||
+ kv.first == WatchTimeKey::kAudioVideoMutedAll ||
+ kv.first == WatchTimeKey::kAudioVideoBackgroundAll ||
+ kv.first == WatchTimeKey::kVideoAll ||
+ kv.first == WatchTimeKey::kVideoBackgroundAll) {
+ // Only one of these keys should be present.
+ DCHECK(!recorded_all_metric);
+ recorded_all_metric = true;
+
+ builder.SetWatchTime(kv.second.InMilliseconds());
+ if (ukm_record.total_underflow_count) {
+ builder.SetMeanTimeBetweenRebuffers(
+ (kv.second / ukm_record.total_underflow_count).InMilliseconds());
+ }
+ } else if (kv.first == WatchTimeKey::kAudioAc ||
+ kv.first == WatchTimeKey::kAudioBackgroundAc ||
+ kv.first == WatchTimeKey::kAudioVideoAc ||
+ kv.first == WatchTimeKey::kAudioVideoMutedAc ||
+ kv.first == WatchTimeKey::kAudioVideoBackgroundAc ||
+ kv.first == WatchTimeKey::kVideoAc ||
+ kv.first == WatchTimeKey::kVideoBackgroundAc) {
+ builder.SetWatchTime_AC(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioBattery ||
+ kv.first == WatchTimeKey::kAudioBackgroundBattery ||
+ kv.first == WatchTimeKey::kAudioVideoBattery ||
+ kv.first == WatchTimeKey::kAudioVideoMutedBattery ||
+ kv.first == WatchTimeKey::kAudioVideoBackgroundBattery ||
+ kv.first == WatchTimeKey::kVideoBattery ||
+ kv.first == WatchTimeKey::kVideoBackgroundBattery) {
+ builder.SetWatchTime_Battery(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioNativeControlsOn ||
+ kv.first == WatchTimeKey::kAudioVideoNativeControlsOn ||
+ kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOn ||
+ kv.first == WatchTimeKey::kVideoNativeControlsOn) {
+ builder.SetWatchTime_NativeControlsOn(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioNativeControlsOff ||
+ kv.first == WatchTimeKey::kAudioVideoNativeControlsOff ||
+ kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOff ||
+ kv.first == WatchTimeKey::kVideoNativeControlsOff) {
+ builder.SetWatchTime_NativeControlsOff(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioVideoDisplayFullscreen ||
+ kv.first == WatchTimeKey::kAudioVideoMutedDisplayFullscreen ||
+ kv.first == WatchTimeKey::kVideoDisplayFullscreen) {
+ builder.SetWatchTime_DisplayFullscreen(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioVideoDisplayInline ||
+ kv.first == WatchTimeKey::kAudioVideoMutedDisplayInline ||
+ kv.first == WatchTimeKey::kVideoDisplayInline) {
+ builder.SetWatchTime_DisplayInline(kv.second.InMilliseconds());
+ } else if (kv.first == WatchTimeKey::kAudioVideoDisplayPictureInPicture ||
+ kv.first ==
+ WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture ||
+ kv.first == WatchTimeKey::kVideoDisplayPictureInPicture) {
+ builder.SetWatchTime_DisplayPictureInPicture(
+ kv.second.InMilliseconds());
+ }
+ }
- // We convert decoder names to a hash and then translate that hash to a zero
- // valued enum to avoid burdening the rest of the decoder code base. This was
- // the simplest and most effective solution for the following reasons:
- //
- // - We can't report hashes to UKM since the privacy team worries they may
- // end up as hashes of user data.
- // - Given that decoders are defined and implemented all over the code base
- // it's unwieldly to have a single location which defines all decoder names.
- // - Due to the above, no single media/ location has access to all names.
- //
- builder.SetAudioDecoderName(
- static_cast<int64_t>(ConvertAudioDecoderNameToEnum(audio_decoder_name_)));
- builder.SetVideoDecoderName(
- static_cast<int64_t>(ConvertVideoDecoderNameToEnum(video_decoder_name_)));
-
- builder.SetIsEME(properties_->is_eme);
- builder.SetIsMSE(properties_->is_mse);
- builder.SetLastPipelineStatus(pipeline_status_);
- builder.SetRebuffersCount(total_underflow_count_);
- builder.SetVideoNaturalWidth(properties_->natural_size.width());
- builder.SetVideoNaturalHeight(properties_->natural_size.height());
- builder.SetAutoplayInitiated(autoplay_initiated_.value_or(false));
- builder.Record(ukm_recorder);
-
- aggregate_watch_time_info_.clear();
+ // See note in mojom::PlaybackProperties about why we have both of these.
+ builder.SetAudioCodec(ukm_record.secondary_properties->audio_codec);
+ builder.SetVideoCodec(ukm_record.secondary_properties->video_codec);
+ builder.SetHasAudio(properties_->has_audio);
+ builder.SetHasVideo(properties_->has_video);
+
+ // We convert decoder names to a hash and then translate that hash to a zero
+ // valued enum to avoid burdening the rest of the decoder code base. This
+ // was the simplest and most effective solution for the following reasons:
+ //
+ // - We can't report hashes to UKM since the privacy team worries they may
+ // end up as hashes of user data.
+ // - Given that decoders are defined and implemented all over the code base
+ // it's unwieldly to have a single location which defines all decoder
+ // names.
+ // - Due to the above, no single media/ location has access to all names.
+ //
+ builder.SetAudioDecoderName(
+ static_cast<int64_t>(ConvertAudioDecoderNameToEnum(
+ ukm_record.secondary_properties->audio_decoder_name)));
+ builder.SetVideoDecoderName(
+ static_cast<int64_t>(ConvertVideoDecoderNameToEnum(
+ ukm_record.secondary_properties->video_decoder_name)));
+
+ builder.SetIsEME(properties_->is_eme);
+ builder.SetIsMSE(properties_->is_mse);
+ builder.SetLastPipelineStatus(pipeline_status_);
+ builder.SetRebuffersCount(ukm_record.total_underflow_count);
+ builder.SetVideoNaturalWidth(
+ ukm_record.secondary_properties->natural_size.width());
+ builder.SetVideoNaturalHeight(
+ ukm_record.secondary_properties->natural_size.height());
+ builder.SetAutoplayInitiated(autoplay_initiated_.value_or(false));
+ builder.Record(ukm_recorder);
+ }
+
+ ukm_records_.clear();
}
WatchTimeRecorder::ExtendedMetricsKeyMap::ExtendedMetricsKeyMap(
diff --git a/chromium/media/mojo/services/watch_time_recorder.h b/chromium/media/mojo/services/watch_time_recorder.h
index 82a7b670594..95d7f067c23 100644
--- a/chromium/media/mojo/services/watch_time_recorder.h
+++ b/chromium/media/mojo/services/watch_time_recorder.h
@@ -34,10 +34,10 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
void FinalizeWatchTime(
const std::vector<WatchTimeKey>& watch_time_keys) override;
void OnError(PipelineStatus status) override;
- void SetAudioDecoderName(const std::string& name) override;
- void SetVideoDecoderName(const std::string& name) override;
+ void UpdateSecondaryProperties(
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties) override;
void SetAutoplayInitiated(bool value) override;
-
+ void OnDurationChanged(base::TimeDelta duration) override;
void UpdateUnderflowCount(int32_t count) override;
private:
@@ -77,17 +77,30 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
using WatchTimeInfo = base::flat_map<WatchTimeKey, base::TimeDelta>;
WatchTimeInfo watch_time_info_;
- // Sum of all watch time data since the last complete finalize.
- WatchTimeInfo aggregate_watch_time_info_;
+ // Aggregate record of all watch time for a given set of secondary properties.
+ struct WatchTimeUkmRecord {
+ explicit WatchTimeUkmRecord(
+ mojom::SecondaryPlaybackPropertiesPtr properties);
+ WatchTimeUkmRecord(WatchTimeUkmRecord&& record);
+ ~WatchTimeUkmRecord();
+
+ // Properties for this segment of UKM watch time.
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties;
+
+ // Sum of all watch time data since the last complete finalize.
+ WatchTimeInfo aggregate_watch_time_info;
+
+ // Total underflow count for this segment of UKM watch time.
+ int total_underflow_count = 0;
+ };
+
+ // List of all watch time segments. A new entry is added for every secondary
+ // property update.
+ std::vector<WatchTimeUkmRecord> ukm_records_;
int underflow_count_ = 0;
- int total_underflow_count_ = 0;
PipelineStatus pipeline_status_ = PIPELINE_OK;
-
- // Decoder name associated with this recorder. Reported to UKM as a
- // base::PersistentHash().
- std::string audio_decoder_name_;
- std::string video_decoder_name_;
+ base::TimeDelta duration_ = kNoTimestamp;
base::Optional<bool> autoplay_initiated_;
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index a9e2e0a428f..560e47db194 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -12,7 +12,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
-#include "base/test/histogram_tester.h"
+#include "base/test/metrics/histogram_tester.h"
#include "base/test/test_message_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "components/ukm/test_ukm_recorder.h"
@@ -52,7 +52,8 @@ class WatchTimeRecorderTest : public testing::Test {
kDiscardedWatchTimeAudioVideoMse,
kDiscardedWatchTimeAudioVideoEme}) {
ResetMetricRecorders();
- MediaMetricsProvider::Create(nullptr, mojo::MakeRequest(&provider_));
+ MediaMetricsProvider::Create(VideoDecodePerfHistory::SaveCallback(),
+ mojo::MakeRequest(&provider_));
}
~WatchTimeRecorderTest() override { base::RunLoop().RunUntilIdle(); }
@@ -69,8 +70,7 @@ class WatchTimeRecorderTest : public testing::Test {
bool is_mse,
bool is_encrypted) {
Initialize(mojom::PlaybackProperties::New(
- kUnknownAudioCodec, kUnknownVideoCodec, has_audio, has_video, false,
- false, is_mse, is_encrypted, false, gfx::Size(800, 600)));
+ has_audio, has_video, false, false, is_mse, is_encrypted, false));
}
void ExpectWatchTime(const std::vector<base::StringPiece>& keys,
@@ -172,6 +172,9 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
// Values for |is_background| and |is_muted| don't matter in this test since
// they don't prevent the muted or background keys from being recorded.
Initialize(true, false, true, true);
+ wtr_->UpdateSecondaryProperties(mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "", "", gfx::Size(800, 600)));
+
wtr_->RecordWatchTime(WatchTimeKey::kWatchTimeKeyMax, kWatchTime1);
wtr_->RecordWatchTime(key, kWatchTime1);
wtr_->RecordWatchTime(key, kWatchTime2);
@@ -352,6 +355,8 @@ TEST_F(WatchTimeRecorderTest, TestRebufferingMetrics) {
TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
Initialize(true, false, true, true);
+ wtr_->UpdateSecondaryProperties(mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "", "", gfx::Size(800, 600)));
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(5);
for (auto key : computation_keys_)
@@ -384,9 +389,12 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
- gfx::Size(800, 600));
+ true, true, false, false, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
// Verify that UKM is reported along with the watch time.
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(4);
@@ -419,8 +427,8 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
@@ -428,9 +436,9 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
- properties->natural_size.width());
+ secondary_properties->natural_size.width());
EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
- properties->natural_size.height());
+ secondary_properties->natural_size.height());
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
@@ -450,9 +458,12 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
- gfx::Size(800, 600));
+ true, true, false, false, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
// Finalize everything. UKM is only recorded at destruction, so this should do
// nothing.
@@ -482,8 +493,8 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
@@ -491,9 +502,9 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
- properties->natural_size.width());
+ secondary_properties->natural_size.width());
EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
- properties->natural_size.height());
+ secondary_properties->natural_size.height());
EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
@@ -513,9 +524,12 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
- gfx::Size(800, 600));
+ true, true, false, false, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(4);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
@@ -530,8 +544,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
@@ -539,9 +553,9 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
- properties->natural_size.width());
+ secondary_properties->natural_size.width());
EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
- properties->natural_size.height());
+ secondary_properties->natural_size.height());
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
@@ -560,9 +574,12 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- kCodecOpus, kCodecVP9, true, true, false, false, true, true, false,
- gfx::Size(800, 600));
+ true, true, false, false, true, true, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecOpus, kCodecVP9, "", "",
+ gfx::Size(800, 600));
Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
const base::TimeDelta kWatchTime2 = kWatchTime * 2;
@@ -586,13 +603,14 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
wtr_->UpdateUnderflowCount(3);
wtr_->OnError(PIPELINE_ERROR_DECODE);
- const std::string kAudioDecoderName = "MojoAudioDecoder";
- const std::string kVideoDecoderName = "MojoVideoDecoder";
- wtr_->SetAudioDecoderName(kAudioDecoderName);
- wtr_->SetVideoDecoderName(kVideoDecoderName);
+ secondary_properties->audio_decoder_name = "MojoAudioDecoder";
+ secondary_properties->video_decoder_name = "MojoVideoDecoder";
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
wtr_->SetAutoplayInitiated(true);
+ wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(9500));
+
wtr_.reset();
base::RunLoop().RunUntilIdle();
@@ -621,10 +639,13 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 2);
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 5);
+ // Duration should be rounded up.
+ EXPECT_UKM(UkmEntry::kDurationName, 10000000);
+
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
@@ -632,18 +653,21 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_ERROR_DECODE);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 3);
EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
- properties->natural_size.width());
+ secondary_properties->natural_size.width());
EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
- properties->natural_size.height());
+ secondary_properties->natural_size.height());
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, true);
}
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- kCodecAAC, kCodecH264, true, true, true, true, false, false, false,
- gfx::Size(800, 600));
+ true, true, true, true, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoBackgroundAll, kWatchTime);
@@ -658,8 +682,108 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
- EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
- EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties->natural_size.height());
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+
+ EXPECT_NO_UKM(UkmEntry::kDurationName);
+ EXPECT_NO_UKM(UkmEntry::kMeanTimeBetweenRebuffersName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_ACName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_BatteryName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_NativeControlsOnName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_NativeControlsOffName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayFullscreenName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayInlineName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayPictureInPictureName);
+ }
+}
+
+TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
+
+ wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(12345));
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(1u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties->natural_size.height());
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+
+ // Duration should be rounded to the most significant digit.
+ EXPECT_UKM(UkmEntry::kDurationName, 10000000);
+
+ EXPECT_NO_UKM(UkmEntry::kMeanTimeBetweenRebuffersName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_ACName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_BatteryName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_NativeControlsOnName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_NativeControlsOffName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayFullscreenName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayInlineName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTime_DisplayPictureInPictureName);
+ }
+}
+
+TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, false, false, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
+ mojom::SecondaryPlaybackProperties::New(kCodecAAC, kCodecH264, "", "",
+ gfx::Size(800, 600));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
+
+ wtr_->OnDurationChanged(kInfiniteDuration);
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(1u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
@@ -667,14 +791,17 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
- properties->natural_size.width());
+ secondary_properties->natural_size.width());
EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
- properties->natural_size.height());
+ secondary_properties->natural_size.height());
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 0);
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+ // Duration should be unrecorded when infinite.
+ EXPECT_NO_UKM(UkmEntry::kDurationName);
+ EXPECT_NO_UKM(UkmEntry::kWatchTimeName);
EXPECT_NO_UKM(UkmEntry::kMeanTimeBetweenRebuffersName);
EXPECT_NO_UKM(UkmEntry::kWatchTime_ACName);
EXPECT_NO_UKM(UkmEntry::kWatchTime_BatteryName);
@@ -686,6 +813,313 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
}
}
+// Might happen due to timing issues, so ensure no crashes.
+TEST_F(WatchTimeRecorderTest, NoSecondaryProperties) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, true, true, false);
+ Initialize(properties.Clone());
+
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(0u, entries.size());
+}
+
+TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, true, true, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
+ mojom::SecondaryPlaybackProperties::New(
+ kUnknownAudioCodec, kUnknownVideoCodec, "", "", gfx::Size(800, 600));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
+
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
+
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ gfx::Size(800, 600));
+ wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
+
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // Since we only transitioned unknown values to known values, there should be
+ // only a single UKM entry.
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(1u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 1);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties2->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties2->natural_size.height());
+ EXPECT_NO_UKM(UkmEntry::kDurationName);
+ }
+}
+
+TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, true, true, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecOpus, kCodecVP9, "MojoAudioDecoder", "MojoVideoDecoder",
+ gfx::Size(400, 300));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
+
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ const int kUnderflowCount1 = 2;
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
+ wtr_->UpdateUnderflowCount(kUnderflowCount1);
+
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ gfx::Size(800, 600));
+ wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
+
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+ const int kUnderflowCount2 = 3;
+
+ // Watch time and underflow counts continue to accumulate during property
+ // changes, so we report the sum here instead of just kWatchTime2.
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll,
+ kWatchTime1 + kWatchTime2);
+ wtr_->UpdateUnderflowCount(kUnderflowCount1 + kUnderflowCount2);
+ wtr_->OnError(PIPELINE_ERROR_DECODE);
+ wtr_->OnDurationChanged(base::TimeDelta::FromSeconds(5125));
+
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // All records should have the following:
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(2u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+ EXPECT_UKM(UkmEntry::kDurationName, 5000000);
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+
+ // All records inherit the final pipeline status code.
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_ERROR_DECODE);
+ }
+
+ // The first record should have...
+ auto* entry = entries[0];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime1.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kMeanTimeBetweenRebuffersName,
+ kWatchTime1.InMilliseconds() / kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 5);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties1->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties1->natural_size.height());
+
+ // The second record should have...
+ entry = entries[1];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime2.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kMeanTimeBetweenRebuffersName,
+ kWatchTime2.InMilliseconds() / kUnderflowCount2);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 1);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, kUnderflowCount2);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties2->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties2->natural_size.height());
+}
+
+TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, true, true, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecOpus, kCodecVP9, "MojoAudioDecoder", "MojoVideoDecoder",
+ gfx::Size(400, 300));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
+
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ const int kUnderflowCount1 = 2;
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
+ wtr_->UpdateUnderflowCount(kUnderflowCount1);
+
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ gfx::Size(800, 600));
+ wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
+
+ // Don't record any watch time to the new record, it should report zero watch
+ // time upon destruction. This ensures there's always a Finalize to prevent
+ // UKM was receiving negative values from the previous unfinalized record.
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // All records should have the following:
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(2u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+ EXPECT_NO_UKM(UkmEntry::kDurationName);
+ }
+
+ // The first record should have...
+ auto* entry = entries[0];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime1.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kMeanTimeBetweenRebuffersName,
+ kWatchTime1.InMilliseconds() / kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 5);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties1->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties1->natural_size.height());
+
+ // The second record should have...
+ entry = entries[1];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, 0);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 1);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties2->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties2->natural_size.height());
+}
+
+TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, true, true, false);
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecOpus, kCodecVP9, "MojoAudioDecoder", "MojoVideoDecoder",
+ gfx::Size(400, 300));
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
+
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(54);
+ const int kUnderflowCount1 = 2;
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime1);
+ wtr_->UpdateUnderflowCount(kUnderflowCount1);
+
+ // Force a finalize here so that the there is no unfinalized watch time at the
+ // time of the secondary property update.
+ wtr_->FinalizeWatchTime({});
+
+ mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
+ mojom::SecondaryPlaybackProperties::New(
+ kCodecAAC, kCodecH264, "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ gfx::Size(800, 600));
+ wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
+
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+ const int kUnderflowCount2 = 3;
+
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime2);
+ wtr_->UpdateUnderflowCount(kUnderflowCount2);
+ wtr_->OnError(PIPELINE_ERROR_DECODE);
+
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // All records should have the following:
+ const auto& entries = test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(2u, entries.size());
+ for (const auto* entry : entries) {
+ test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
+ EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
+ EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
+ EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
+ EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
+ EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
+ EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
+ EXPECT_NO_UKM(UkmEntry::kDurationName);
+
+ // All records inherit the final pipeline status code.
+ EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_ERROR_DECODE);
+ }
+
+ // The first record should have...
+ auto* entry = entries[0];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime1.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kMeanTimeBetweenRebuffersName,
+ kWatchTime1.InMilliseconds() / kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 5);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, kUnderflowCount1);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties1->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties1->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties1->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties1->natural_size.height());
+
+ // The second record should have...
+ entry = entries[1];
+ EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime2.InMilliseconds());
+ EXPECT_UKM(UkmEntry::kMeanTimeBetweenRebuffersName,
+ kWatchTime2.InMilliseconds() / kUnderflowCount2);
+ EXPECT_UKM(UkmEntry::kAudioDecoderNameName, 1);
+ EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 2);
+ EXPECT_UKM(UkmEntry::kRebuffersCountName, kUnderflowCount2);
+ EXPECT_UKM(UkmEntry::kAudioCodecName, secondary_properties2->audio_codec);
+ EXPECT_UKM(UkmEntry::kVideoCodecName, secondary_properties2->video_codec);
+ EXPECT_UKM(UkmEntry::kVideoNaturalWidthName,
+ secondary_properties2->natural_size.width());
+ EXPECT_UKM(UkmEntry::kVideoNaturalHeightName,
+ secondary_properties2->natural_size.height());
+}
+
#undef EXPECT_UKM
#undef EXPECT_NO_UKM
#undef EXPECT_HAS_UKM
diff --git a/chromium/media/remoting/proto_enum_utils.cc b/chromium/media/remoting/proto_enum_utils.cc
index e16c112c1f0..9e654203cbb 100644
--- a/chromium/media/remoting/proto_enum_utils.cc
+++ b/chromium/media/remoting/proto_enum_utils.cc
@@ -285,7 +285,9 @@ base::Optional<VideoCodecProfile> ToMediaVideoCodecProfile(
CASE_RETURN_OTHER(DOLBYVISION_PROFILE5);
CASE_RETURN_OTHER(DOLBYVISION_PROFILE7);
CASE_RETURN_OTHER(THEORAPROFILE_ANY);
- CASE_RETURN_OTHER(AV1PROFILE_PROFILE0);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_MAIN);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_HIGH);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_PRO);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -320,7 +322,9 @@ ToProtoVideoDecoderConfigProfile(VideoCodecProfile value) {
CASE_RETURN_OTHER(DOLBYVISION_PROFILE5);
CASE_RETURN_OTHER(DOLBYVISION_PROFILE7);
CASE_RETURN_OTHER(THEORAPROFILE_ANY);
- CASE_RETURN_OTHER(AV1PROFILE_PROFILE0);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_MAIN);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_HIGH);
+ CASE_RETURN_OTHER(AV1PROFILE_PROFILE_PRO);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -540,9 +544,9 @@ base::Optional<CdmSessionType> ToCdmSessionType(pb::CdmSessionType value) {
using OriginType = pb::CdmSessionType;
using OtherType = CdmSessionType;
switch (value) {
- CASE_RETURN_OTHER(TEMPORARY_SESSION);
- CASE_RETURN_OTHER(PERSISTENT_LICENSE_SESSION);
- CASE_RETURN_OTHER(PERSISTENT_RELEASE_MESSAGE_SESSION);
+ CASE_RETURN_OTHER(kTemporary);
+ CASE_RETURN_OTHER(kPersistentLicense);
+ CASE_RETURN_OTHER(kPersistentUsageRecord);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -551,9 +555,9 @@ base::Optional<pb::CdmSessionType> ToProtoCdmSessionType(CdmSessionType value) {
using OriginType = CdmSessionType;
using OtherType = pb::CdmSessionType;
switch (value) {
- CASE_RETURN_OTHER(TEMPORARY_SESSION);
- CASE_RETURN_OTHER(PERSISTENT_LICENSE_SESSION);
- CASE_RETURN_OTHER(PERSISTENT_RELEASE_MESSAGE_SESSION);
+ CASE_RETURN_OTHER(kTemporary);
+ CASE_RETURN_OTHER(kPersistentLicense);
+ CASE_RETURN_OTHER(kPersistentUsageRecord);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
diff --git a/chromium/media/remoting/renderer_controller_unittest.cc b/chromium/media/remoting/renderer_controller_unittest.cc
index a7336a3377b..2628b0d5b27 100644
--- a/chromium/media/remoting/renderer_controller_unittest.cc
+++ b/chromium/media/remoting/renderer_controller_unittest.cc
@@ -138,10 +138,7 @@ class RendererControllerTest : public ::testing::Test,
decoded_frames_ = frame_rate * kDelayedStartDuration.InSeconds();
clock_.Advance(kDelayedStartDuration);
RunUntilIdle();
- const base::Closure callback =
- controller_->delayed_start_stability_timer_.user_task();
- callback.Run();
- controller_->delayed_start_stability_timer_.Stop();
+ controller_->delayed_start_stability_timer_.FireNow();
}
void ExpectInDelayedStart() const {
diff --git a/chromium/media/remoting/rpc.proto b/chromium/media/remoting/rpc.proto
index 28936962cd8..579c21adff6 100644
--- a/chromium/media/remoting/rpc.proto
+++ b/chromium/media/remoting/rpc.proto
@@ -186,7 +186,9 @@ message VideoDecoderConfig {
DOLBYVISION_PROFILE5 = 21;
DOLBYVISION_PROFILE7 = 22;
THEORAPROFILE_ANY = 23;
- AV1PROFILE_PROFILE0 = 24;
+ AV1PROFILE_PROFILE_MAIN = 24;
+ AV1PROFILE_PROFILE_HIGH = 25;
+ AV1PROFILE_PROFILE_PRO = 26;
};
// Proto version of media::VideoPixelFormat.
@@ -318,9 +320,9 @@ enum CdmMessageType {
// Proto version of media::CdmSessionType.
// NEVER change these numbers or re-use old ones; only add new ones.
enum CdmSessionType {
- TEMPORARY_SESSION = 0;
- PERSISTENT_LICENSE_SESSION = 1;
- PERSISTENT_RELEASE_MESSAGE_SESSION = 2;
+ kTemporary = 0;
+ kPersistentLicense = 1;
+ kPersistentUsageRecord = 2;
};
message RendererInitialize {
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index b68e8f0b2c7..9a0852784d1 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -28,11 +28,15 @@ source_set("renderers") {
"video_overlay_factory.h",
"video_renderer_impl.cc",
"video_renderer_impl.h",
+ "video_resource_updater.cc",
+ "video_resource_updater.h",
]
deps = [
"//base",
+ "//cc/base", # For MathUtil.
"//cc/paint",
+ "//components/viz/client",
"//gpu/command_buffer/client:gles2_interface",
"//gpu/command_buffer/common",
"//media/base",
@@ -44,6 +48,7 @@ source_set("renderers") {
"//ui/gfx:geometry_skia",
"//ui/gfx:memory_buffer",
"//ui/gfx/geometry",
+ "//ui/gl",
]
configs += [
@@ -61,12 +66,15 @@ source_set("unit_tests") {
"paint_canvas_video_renderer_unittest.cc",
"renderer_impl_unittest.cc",
"video_renderer_impl_unittest.cc",
+ "video_resource_updater_unittest.cc",
]
configs += [ "//media:media_config" ]
deps = [
"//base",
"//base/test:test_support",
"//cc/paint",
+ "//components/viz/client",
+ "//components/viz/test:test_support",
"//gpu:test_support",
"//gpu/command_buffer/client:gles2_interface",
"//gpu/command_buffer/common",
diff --git a/chromium/media/renderers/DEPS b/chromium/media/renderers/DEPS
new file mode 100644
index 00000000000..1eea6262bb9
--- /dev/null
+++ b/chromium/media/renderers/DEPS
@@ -0,0 +1,14 @@
+# Do NOT add net/ or ui/base without a great reason, they're huge!
+include_rules = [
+ "+cc/base/math_util.h",
+ "+components/viz/client",
+ "+components/viz/common",
+ "+third_party/khronos/GLES2",
+ "+ul/gl/gl_enums.h",
+]
+
+specific_include_rules = {
+ ".*_unittest.cc": [
+ "+components/viz/test",
+ ],
+}
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 2737cded7ac..d16f3008ba4 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -294,8 +294,8 @@ void AudioRendererImpl::DoFlush_Locked() {
DCHECK_EQ(state_, kFlushed);
ended_timestamp_ = kInfiniteDuration;
- audio_buffer_stream_->Reset(base::Bind(&AudioRendererImpl::ResetDecoderDone,
- weak_factory_.GetWeakPtr()));
+ audio_buffer_stream_->Reset(base::BindOnce(
+ &AudioRendererImpl::ResetDecoderDone, weak_factory_.GetWeakPtr()));
}
void AudioRendererImpl::ResetDecoderDone() {
@@ -517,6 +517,9 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
sample_rate, preferred_buffer_size));
}
+ audio_parameters_.set_effects(audio_parameters_.effects() |
+ ::media::AudioParameters::MULTIZONE);
+
audio_parameters_.set_latency_tag(AudioLatency::LATENCY_PLAYBACK);
last_decoded_channel_layout_ =
@@ -530,12 +533,14 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
new AudioClock(base::TimeDelta(), audio_parameters_.sample_rate()));
audio_buffer_stream_->Initialize(
- stream, base::Bind(&AudioRendererImpl::OnAudioBufferStreamInitialized,
- weak_factory_.GetWeakPtr()),
- cdm_context, base::Bind(&AudioRendererImpl::OnStatisticsUpdate,
- weak_factory_.GetWeakPtr()),
- base::Bind(&AudioRendererImpl::OnWaitingForDecryptionKey,
- weak_factory_.GetWeakPtr()));
+ stream,
+ base::BindOnce(&AudioRendererImpl::OnAudioBufferStreamInitialized,
+ weak_factory_.GetWeakPtr()),
+ cdm_context,
+ base::BindRepeating(&AudioRendererImpl::OnStatisticsUpdate,
+ weak_factory_.GetWeakPtr()),
+ base::BindRepeating(&AudioRendererImpl::OnWaitingForDecryptionKey,
+ weak_factory_.GetWeakPtr()));
}
void AudioRendererImpl::OnAudioBufferStreamInitialized(bool success) {
@@ -820,8 +825,8 @@ void AudioRendererImpl::AttemptRead_Locked() {
return;
pending_read_ = true;
- audio_buffer_stream_->Read(base::Bind(&AudioRendererImpl::DecodedAudioReady,
- weak_factory_.GetWeakPtr()));
+ audio_buffer_stream_->Read(base::BindOnce(
+ &AudioRendererImpl::DecodedAudioReady, weak_factory_.GetWeakPtr()));
}
bool AudioRendererImpl::CanRead_Locked() {
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 7e9f62666be..febd8eb68d1 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -908,7 +908,6 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
gpu::gles2::GLES2Interface* canvas_gl = context_3d.gl;
gpu::MailboxHolder mailbox_holder;
mailbox_holder.texture_target = texture_info.fTarget;
- canvas_gl->GenMailboxCHROMIUM(mailbox_holder.mailbox.name);
canvas_gl->ProduceTextureDirectCHROMIUM(texture_info.fID,
mailbox_holder.mailbox.name);
diff --git a/chromium/media/renderers/video_overlay_factory.cc b/chromium/media/renderers/video_overlay_factory.cc
index bc9432ccc1e..9f5a5a4a0ab 100644
--- a/chromium/media/renderers/video_overlay_factory.cc
+++ b/chromium/media/renderers/video_overlay_factory.cc
@@ -36,7 +36,6 @@ class VideoOverlayFactory::Texture {
gl->BindTexture(GL_TEXTURE_2D, texture_id_);
gl->BindTexImage2DCHROMIUM(GL_TEXTURE_2D, image_id_);
- gl->GenMailboxCHROMIUM(mailbox_.name);
gl->ProduceTextureDirectCHROMIUM(texture_id_, mailbox_.name);
gl->GenSyncTokenCHROMIUM(sync_token_.GetData());
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index 42598c8427e..997a94916b4 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -176,8 +176,8 @@ void VideoRendererImpl::Flush(const base::Closure& callback) {
gpu_memory_buffer_pool_->Abort();
frame_callback_weak_factory_.InvalidateWeakPtrs();
video_frame_stream_->Reset(
- base::Bind(&VideoRendererImpl::OnVideoFrameStreamResetDone,
- weak_factory_.GetWeakPtr()));
+ base::BindOnce(&VideoRendererImpl::OnVideoFrameStreamResetDone,
+ weak_factory_.GetWeakPtr()));
// To avoid unnecessary work by VDAs, only delete queued frames after
// resetting |video_frame_stream_|. If this is done in the opposite order VDAs
@@ -252,12 +252,14 @@ void VideoRendererImpl::Initialize(
DCHECK(current_decoder_config_.IsValidConfig());
video_frame_stream_->Initialize(
- stream, base::Bind(&VideoRendererImpl::OnVideoFrameStreamInitialized,
- weak_factory_.GetWeakPtr()),
- cdm_context, base::Bind(&VideoRendererImpl::OnStatisticsUpdate,
- weak_factory_.GetWeakPtr()),
- base::Bind(&VideoRendererImpl::OnWaitingForDecryptionKey,
- weak_factory_.GetWeakPtr()));
+ stream,
+ base::BindOnce(&VideoRendererImpl::OnVideoFrameStreamInitialized,
+ weak_factory_.GetWeakPtr()),
+ cdm_context,
+ base::BindRepeating(&VideoRendererImpl::OnStatisticsUpdate,
+ weak_factory_.GetWeakPtr()),
+ base::BindRepeating(&VideoRendererImpl::OnWaitingForDecryptionKey,
+ weak_factory_.GetWeakPtr()));
}
scoped_refptr<VideoFrame> VideoRendererImpl::Render(
@@ -632,8 +634,8 @@ void VideoRendererImpl::AttemptRead_Locked() {
case kPlaying:
pending_read_ = true;
video_frame_stream_->Read(
- base::BindRepeating(&VideoRendererImpl::FrameReady,
- frame_callback_weak_factory_.GetWeakPtr()));
+ base::BindOnce(&VideoRendererImpl::FrameReady,
+ frame_callback_weak_factory_.GetWeakPtr()));
return;
case kUninitialized:
case kInitializing:
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
new file mode 100644
index 00000000000..952c9a7bb9a
--- /dev/null
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -0,0 +1,1154 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/renderers/video_resource_updater.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <string>
+
+#include "base/atomic_sequence_num.h"
+#include "base/bind.h"
+#include "base/bit_cast.h"
+#include "base/memory/shared_memory.h"
+#include "base/strings/stringprintf.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "base/trace_event/memory_dump_manager.h"
+#include "base/trace_event/process_memory_dump.h"
+#include "base/trace_event/trace_event.h"
+#include "build/build_config.h"
+#include "cc/base/math_util.h"
+#include "cc/paint/skia_paint_canvas.h"
+#include "components/viz/client/client_resource_provider.h"
+#include "components/viz/client/shared_bitmap_reporter.h"
+#include "components/viz/common/gpu/context_provider.h"
+#include "components/viz/common/gpu/texture_allocation.h"
+#include "components/viz/common/quads/render_pass.h"
+#include "components/viz/common/quads/stream_video_draw_quad.h"
+#include "components/viz/common/quads/texture_draw_quad.h"
+#include "components/viz/common/quads/yuv_video_draw_quad.h"
+#include "components/viz/common/resources/bitmap_allocation.h"
+#include "components/viz/common/resources/resource_sizes.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/command_buffer/client/context_support.h"
+#include "gpu/command_buffer/client/gles2_interface.h"
+#include "media/base/video_frame.h"
+#include "media/renderers/paint_canvas_video_renderer.h"
+#include "media/video/half_float_maker.h"
+#include "third_party/khronos/GLES2/gl2.h"
+#include "third_party/khronos/GLES2/gl2ext.h"
+#include "third_party/libyuv/include/libyuv.h"
+#include "third_party/skia/include/core/SkCanvas.h"
+#include "ui/gfx/geometry/size_conversions.h"
+#include "ui/gfx/skia_util.h"
+#include "ui/gl/gl_enums.h"
+#include "ui/gl/trace_util.h"
+
+namespace media {
+namespace {
+
+// Generates process-unique IDs to use for tracing video resources.
+base::AtomicSequenceNumber g_next_video_resource_updater_id;
+
+VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
+ VideoPixelFormat format,
+ GLuint target,
+ int num_textures,
+ gfx::BufferFormat* buffer_format,
+ bool use_stream_video_draw_quad) {
+ *buffer_format = gfx::BufferFormat::RGBA_8888;
+ switch (format) {
+ case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_XRGB:
+ case PIXEL_FORMAT_RGB32:
+ case PIXEL_FORMAT_UYVY:
+ switch (target) {
+ case GL_TEXTURE_EXTERNAL_OES:
+ if (use_stream_video_draw_quad)
+ return VideoFrameResourceType::STREAM_TEXTURE;
+ FALLTHROUGH;
+ case GL_TEXTURE_2D:
+ return (format == PIXEL_FORMAT_XRGB)
+ ? VideoFrameResourceType::RGB
+ : VideoFrameResourceType::RGBA_PREMULTIPLIED;
+ case GL_TEXTURE_RECTANGLE_ARB:
+ return VideoFrameResourceType::RGB;
+ default:
+ NOTREACHED();
+ break;
+ }
+ break;
+ case PIXEL_FORMAT_I420:
+ return VideoFrameResourceType::YUV;
+ case PIXEL_FORMAT_NV12:
+ DCHECK(target == GL_TEXTURE_EXTERNAL_OES || target == GL_TEXTURE_2D ||
+ target == GL_TEXTURE_RECTANGLE_ARB)
+ << "Unsupported target " << gl::GLEnums::GetStringEnum(target);
+ // Single plane textures can be sampled as RGB.
+ if (num_textures > 1)
+ return VideoFrameResourceType::YUV;
+
+ *buffer_format = gfx::BufferFormat::YUV_420_BIPLANAR;
+ return VideoFrameResourceType::RGB;
+ case PIXEL_FORMAT_YV12:
+ case PIXEL_FORMAT_I422:
+ case PIXEL_FORMAT_I444:
+ case PIXEL_FORMAT_I420A:
+ case PIXEL_FORMAT_NV21:
+ case PIXEL_FORMAT_YUY2:
+ case PIXEL_FORMAT_RGB24:
+ case PIXEL_FORMAT_MJPEG:
+ case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10:
+ case PIXEL_FORMAT_YUV420P12:
+ case PIXEL_FORMAT_YUV422P12:
+ case PIXEL_FORMAT_YUV444P12:
+ case PIXEL_FORMAT_Y16:
+ case PIXEL_FORMAT_UNKNOWN:
+ break;
+ }
+ return VideoFrameResourceType::NONE;
+}
+
+class SyncTokenClientImpl : public VideoFrame::SyncTokenClient {
+ public:
+ SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, gpu::SyncToken sync_token)
+ : gl_(gl), sync_token_(sync_token) {}
+ ~SyncTokenClientImpl() override = default;
+
+ void GenerateSyncToken(gpu::SyncToken* sync_token) override {
+ if (sync_token_.HasData()) {
+ *sync_token = sync_token_;
+ } else {
+ gl_->GenSyncTokenCHROMIUM(sync_token->GetData());
+ }
+ }
+
+ void WaitSyncToken(const gpu::SyncToken& sync_token) override {
+ if (sync_token.HasData()) {
+ gl_->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
+ if (sync_token_.HasData() && sync_token_ != sync_token) {
+ gl_->WaitSyncTokenCHROMIUM(sync_token_.GetConstData());
+ sync_token_.Clear();
+ }
+ }
+ }
+
+ private:
+ gpu::gles2::GLES2Interface* gl_;
+ gpu::SyncToken sync_token_;
+ DISALLOW_COPY_AND_ASSIGN(SyncTokenClientImpl);
+};
+
+// Sync tokens passed downstream to the compositor can be unverified.
+void GenerateCompositorSyncToken(gpu::gles2::GLES2Interface* gl,
+ gpu::SyncToken* sync_token) {
+ gl->GenUnverifiedSyncTokenCHROMIUM(sync_token->GetData());
+}
+
+// For frames that we receive in software format, determine the dimensions of
+// each plane in the frame.
+gfx::Size SoftwarePlaneDimension(VideoFrame* input_frame,
+ bool software_compositor,
+ size_t plane_index) {
+ gfx::Size coded_size = input_frame->coded_size();
+ if (software_compositor)
+ return coded_size;
+
+ int plane_width = VideoFrame::Columns(plane_index, input_frame->format(),
+ coded_size.width());
+ int plane_height =
+ VideoFrame::Rows(plane_index, input_frame->format(), coded_size.height());
+ return gfx::Size(plane_width, plane_height);
+}
+
+} // namespace
+
+VideoFrameExternalResources::VideoFrameExternalResources() = default;
+VideoFrameExternalResources::~VideoFrameExternalResources() = default;
+
+VideoFrameExternalResources::VideoFrameExternalResources(
+ VideoFrameExternalResources&& other) = default;
+VideoFrameExternalResources& VideoFrameExternalResources::operator=(
+ VideoFrameExternalResources&& other) = default;
+
+// Resource for a video plane allocated and owned by VideoResourceUpdater. There
+// can be multiple plane resources for each video frame, depending on the
+// format. These will be reused when possible.
+class VideoResourceUpdater::PlaneResource {
+ public:
+ PlaneResource(uint32_t plane_resource_id,
+ const gfx::Size& resource_size,
+ viz::ResourceFormat resource_format,
+ bool is_software)
+ : plane_resource_id_(plane_resource_id),
+ resource_size_(resource_size),
+ resource_format_(resource_format),
+ is_software_(is_software) {}
+ virtual ~PlaneResource() = default;
+
+ // Casts |this| to SoftwarePlaneResource for software compositing.
+ SoftwarePlaneResource* AsSoftware();
+
+ // Casts |this| to HardwarePlaneResource for GPU compositing.
+ HardwarePlaneResource* AsHardware();
+
+ // Returns true if this resource matches the unique identifiers of another
+ // VideoFrame resource.
+ bool Matches(int unique_frame_id, size_t plane_index) {
+ return has_unique_frame_id_and_plane_index_ &&
+ unique_frame_id_ == unique_frame_id && plane_index_ == plane_index;
+ }
+
+ // Sets the unique identifiers for this resource, may only be called when
+ // there is a single reference to the resource (i.e. |ref_count_| == 1).
+ void SetUniqueId(int unique_frame_id, size_t plane_index) {
+ DCHECK_EQ(ref_count_, 1);
+ plane_index_ = plane_index;
+ unique_frame_id_ = unique_frame_id;
+ has_unique_frame_id_and_plane_index_ = true;
+ }
+
+ // Accessors for resource identifiers provided at construction time.
+ uint32_t plane_resource_id() const { return plane_resource_id_; }
+ const gfx::Size& resource_size() const { return resource_size_; }
+ viz::ResourceFormat resource_format() const { return resource_format_; }
+
+ // Various methods for managing references. See |ref_count_| for details.
+ void add_ref() { ++ref_count_; }
+ void remove_ref() { --ref_count_; }
+ void clear_refs() { ref_count_ = 0; }
+ bool has_refs() const { return ref_count_ != 0; }
+
+ private:
+ const uint32_t plane_resource_id_;
+ const gfx::Size resource_size_;
+ const viz::ResourceFormat resource_format_;
+ const bool is_software_;
+
+ // The number of times this resource has been imported vs number of times this
+ // resource has returned.
+ int ref_count_ = 0;
+
+ // These two members are used for identifying the data stored in this
+ // resource; they uniquely identify a VideoFrame plane.
+ int unique_frame_id_ = 0;
+ size_t plane_index_ = 0u;
+ // Indicates if the above two members have been set or not.
+ bool has_unique_frame_id_and_plane_index_ = false;
+
+ DISALLOW_COPY_AND_ASSIGN(PlaneResource);
+};
+
+class VideoResourceUpdater::SoftwarePlaneResource
+ : public VideoResourceUpdater::PlaneResource {
+ public:
+ SoftwarePlaneResource(uint32_t plane_resource_id,
+ const gfx::Size& size,
+ viz::SharedBitmapReporter* shared_bitmap_reporter)
+ : PlaneResource(plane_resource_id,
+ size,
+ viz::ResourceFormat::RGBA_8888,
+ /*is_software=*/true),
+ shared_bitmap_reporter_(shared_bitmap_reporter),
+ shared_bitmap_id_(viz::SharedBitmap::GenerateId()) {
+ DCHECK(shared_bitmap_reporter_);
+
+ // Allocate SharedMemory and notify display compositor of the allocation.
+ shared_memory_ = viz::bitmap_allocation::AllocateMappedBitmap(
+ resource_size(), viz::ResourceFormat::RGBA_8888);
+ mojo::ScopedSharedBufferHandle handle =
+ viz::bitmap_allocation::DuplicateAndCloseMappedBitmap(
+ shared_memory_.get(), resource_size(),
+ viz::ResourceFormat::RGBA_8888);
+ shared_bitmap_reporter_->DidAllocateSharedBitmap(std::move(handle),
+ shared_bitmap_id_);
+ }
+ ~SoftwarePlaneResource() override {
+ shared_bitmap_reporter_->DidDeleteSharedBitmap(shared_bitmap_id_);
+ }
+
+ const viz::SharedBitmapId& shared_bitmap_id() const {
+ return shared_bitmap_id_;
+ }
+ void* pixels() { return shared_memory_->memory(); }
+
+ // Returns a memory dump GUID consistent across processes.
+ base::UnguessableToken GetSharedMemoryGuid() const {
+ return shared_memory_->mapped_id();
+ }
+
+ private:
+ viz::SharedBitmapReporter* const shared_bitmap_reporter_;
+ const viz::SharedBitmapId shared_bitmap_id_;
+ std::unique_ptr<base::SharedMemory> shared_memory_;
+
+ DISALLOW_COPY_AND_ASSIGN(SoftwarePlaneResource);
+};
+
+class VideoResourceUpdater::HardwarePlaneResource
+ : public VideoResourceUpdater::PlaneResource {
+ public:
+ HardwarePlaneResource(uint32_t plane_resource_id,
+ const gfx::Size& size,
+ viz::ResourceFormat format,
+ viz::ContextProvider* context_provider,
+ viz::TextureAllocation allocation)
+ : PlaneResource(plane_resource_id, size, format, /*is_software=*/false),
+ context_provider_(context_provider),
+ allocation_(std::move(allocation)) {
+ DCHECK(context_provider_);
+ context_provider_->ContextGL()->ProduceTextureDirectCHROMIUM(
+ allocation_.texture_id, mailbox_.name);
+ }
+ ~HardwarePlaneResource() override {
+ context_provider_->ContextGL()->DeleteTextures(1, &allocation_.texture_id);
+ }
+
+ const gpu::Mailbox& mailbox() const { return mailbox_; }
+ GLuint texture_id() const { return allocation_.texture_id; }
+ GLenum texture_target() const { return allocation_.texture_target; }
+ bool overlay_candidate() const { return allocation_.overlay_candidate; }
+
+ private:
+ viz::ContextProvider* const context_provider_;
+ gpu::Mailbox mailbox_;
+ const viz::TextureAllocation allocation_;
+
+ DISALLOW_COPY_AND_ASSIGN(HardwarePlaneResource);
+};
+
+VideoResourceUpdater::SoftwarePlaneResource*
+VideoResourceUpdater::PlaneResource::AsSoftware() {
+ DCHECK(is_software_);
+ return static_cast<SoftwarePlaneResource*>(this);
+}
+
+VideoResourceUpdater::HardwarePlaneResource*
+VideoResourceUpdater::PlaneResource::AsHardware() {
+ DCHECK(!is_software_);
+ return static_cast<HardwarePlaneResource*>(this);
+}
+
+VideoResourceUpdater::VideoResourceUpdater(
+ viz::ContextProvider* context_provider,
+ viz::SharedBitmapReporter* shared_bitmap_reporter,
+ viz::ClientResourceProvider* resource_provider,
+ bool use_stream_video_draw_quad,
+ bool use_gpu_memory_buffer_resources,
+ bool use_r16_texture,
+ int max_resource_size)
+ : context_provider_(context_provider),
+ shared_bitmap_reporter_(shared_bitmap_reporter),
+ resource_provider_(resource_provider),
+ use_stream_video_draw_quad_(use_stream_video_draw_quad),
+ use_gpu_memory_buffer_resources_(use_gpu_memory_buffer_resources),
+ use_r16_texture_(use_r16_texture),
+ max_resource_size_(max_resource_size),
+ tracing_id_(g_next_video_resource_updater_id.GetNext()),
+ weak_ptr_factory_(this) {
+ DCHECK(context_provider_ || shared_bitmap_reporter_);
+
+ base::trace_event::MemoryDumpManager::GetInstance()->RegisterDumpProvider(
+ this, "media::VideoResourceUpdater", base::ThreadTaskRunnerHandle::Get());
+}
+
+VideoResourceUpdater::~VideoResourceUpdater() {
+ base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider(
+ this);
+}
+
+void VideoResourceUpdater::ObtainFrameResources(
+ scoped_refptr<VideoFrame> video_frame) {
+ VideoFrameExternalResources external_resources =
+ CreateExternalResourcesFromVideoFrame(video_frame);
+ frame_resource_type_ = external_resources.type;
+
+ if (external_resources.type == VideoFrameResourceType::YUV) {
+ frame_resource_offset_ = external_resources.offset;
+ frame_resource_multiplier_ = external_resources.multiplier;
+ frame_bits_per_channel_ = external_resources.bits_per_channel;
+ }
+
+ DCHECK_EQ(external_resources.resources.size(),
+ external_resources.release_callbacks.size());
+ for (size_t i = 0; i < external_resources.resources.size(); ++i) {
+ viz::ResourceId resource_id = resource_provider_->ImportResource(
+ external_resources.resources[i],
+ viz::SingleReleaseCallback::Create(
+ std::move(external_resources.release_callbacks[i])));
+ frame_resources_.push_back(
+ {resource_id, external_resources.resources[i].size});
+ }
+ TRACE_EVENT_INSTANT1("media", "VideoResourceUpdater::ObtainFrameResources",
+ TRACE_EVENT_SCOPE_THREAD, "Timestamp",
+ video_frame->timestamp().InMicroseconds());
+}
+
+void VideoResourceUpdater::ReleaseFrameResources() {
+ for (auto& frame_resource : frame_resources_)
+ resource_provider_->RemoveImportedResource(frame_resource.id);
+ frame_resources_.clear();
+}
+
+void VideoResourceUpdater::AppendQuads(viz::RenderPass* render_pass,
+ scoped_refptr<VideoFrame> frame,
+ gfx::Transform transform,
+ gfx::Size rotated_size,
+ gfx::Rect visible_layer_rect,
+ gfx::Rect clip_rect,
+ bool is_clipped,
+ bool contents_opaque,
+ float draw_opacity,
+ int sorting_context_id,
+ gfx::Rect visible_quad_rect) {
+ DCHECK(frame.get());
+
+ viz::SharedQuadState* shared_quad_state =
+ render_pass->CreateAndAppendSharedQuadState();
+ gfx::Rect rotated_size_rect(rotated_size);
+ shared_quad_state->SetAll(
+ transform, rotated_size_rect, visible_layer_rect, clip_rect, is_clipped,
+ contents_opaque, draw_opacity, SkBlendMode::kSrcOver, sorting_context_id);
+
+ gfx::Rect quad_rect(rotated_size);
+ gfx::Rect visible_rect = frame->visible_rect();
+ bool needs_blending = !contents_opaque;
+ gfx::Size coded_size = frame->coded_size();
+
+ const float tex_width_scale =
+ static_cast<float>(visible_rect.width()) / coded_size.width();
+ const float tex_height_scale =
+ static_cast<float>(visible_rect.height()) / coded_size.height();
+
+ switch (frame_resource_type_) {
+ case VideoFrameResourceType::YUV: {
+ const gfx::Size ya_tex_size = coded_size;
+
+ int u_width = VideoFrame::Columns(VideoFrame::kUPlane, frame->format(),
+ coded_size.width());
+ int u_height = VideoFrame::Rows(VideoFrame::kUPlane, frame->format(),
+ coded_size.height());
+ gfx::Size uv_tex_size(u_width, u_height);
+
+ if (frame->HasTextures()) {
+ if (frame->format() == PIXEL_FORMAT_NV12) {
+ DCHECK_EQ(2u, frame_resources_.size());
+ } else {
+ DCHECK_EQ(PIXEL_FORMAT_I420, frame->format());
+ DCHECK_EQ(3u,
+ frame_resources_.size()); // Alpha is not supported yet.
+ }
+ } else {
+ DCHECK_GE(frame_resources_.size(), 3u);
+ DCHECK(frame_resources_.size() <= 3 ||
+ ya_tex_size == VideoFrame::PlaneSize(frame->format(),
+ VideoFrame::kAPlane,
+ coded_size));
+ }
+
+ // Compute the UV sub-sampling factor based on the ratio between
+ // |ya_tex_size| and |uv_tex_size|.
+ float uv_subsampling_factor_x =
+ static_cast<float>(ya_tex_size.width()) / uv_tex_size.width();
+ float uv_subsampling_factor_y =
+ static_cast<float>(ya_tex_size.height()) / uv_tex_size.height();
+ gfx::RectF ya_tex_coord_rect(visible_rect);
+ gfx::RectF uv_tex_coord_rect(
+ visible_rect.x() / uv_subsampling_factor_x,
+ visible_rect.y() / uv_subsampling_factor_y,
+ visible_rect.width() / uv_subsampling_factor_x,
+ visible_rect.height() / uv_subsampling_factor_y);
+
+ auto* yuv_video_quad =
+ render_pass->CreateAndAppendDrawQuad<viz::YUVVideoDrawQuad>();
+ yuv_video_quad->SetNew(
+ shared_quad_state, quad_rect, visible_quad_rect, needs_blending,
+ ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size, uv_tex_size,
+ frame_resources_[0].id, frame_resources_[1].id,
+ frame_resources_.size() > 2 ? frame_resources_[2].id
+ : frame_resources_[1].id,
+ frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
+ frame->ColorSpace(), frame_resource_offset_,
+ frame_resource_multiplier_, frame_bits_per_channel_);
+ yuv_video_quad->require_overlay =
+ frame->metadata()->IsTrue(VideoFrameMetadata::REQUIRE_OVERLAY);
+ yuv_video_quad->is_protected_video =
+ frame->metadata()->IsTrue(VideoFrameMetadata::PROTECTED_VIDEO);
+
+ for (viz::ResourceId resource_id : yuv_video_quad->resources) {
+ resource_provider_->ValidateResource(resource_id);
+ }
+ break;
+ }
+ case VideoFrameResourceType::RGBA:
+ case VideoFrameResourceType::RGBA_PREMULTIPLIED:
+ case VideoFrameResourceType::RGB: {
+ DCHECK_EQ(frame_resources_.size(), 1u);
+ if (frame_resources_.size() < 1u)
+ break;
+ bool premultiplied_alpha =
+ frame_resource_type_ == VideoFrameResourceType::RGBA_PREMULTIPLIED;
+ gfx::PointF uv_top_left(0.f, 0.f);
+ gfx::PointF uv_bottom_right(tex_width_scale, tex_height_scale);
+ float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f};
+ bool flipped = false;
+ bool nearest_neighbor = false;
+ auto* texture_quad =
+ render_pass->CreateAndAppendDrawQuad<viz::TextureDrawQuad>();
+ texture_quad->SetNew(shared_quad_state, quad_rect, visible_quad_rect,
+ needs_blending, frame_resources_[0].id,
+ premultiplied_alpha, uv_top_left, uv_bottom_right,
+ SK_ColorTRANSPARENT, opacity, flipped,
+ nearest_neighbor, false);
+ texture_quad->set_resource_size_in_pixels(coded_size);
+ for (viz::ResourceId resource_id : texture_quad->resources) {
+ resource_provider_->ValidateResource(resource_id);
+ }
+ break;
+ }
+ case VideoFrameResourceType::STREAM_TEXTURE: {
+ DCHECK_EQ(frame_resources_.size(), 1u);
+ if (frame_resources_.size() < 1u)
+ break;
+ gfx::Transform scale;
+ scale.Scale(tex_width_scale, tex_height_scale);
+ auto* stream_video_quad =
+ render_pass->CreateAndAppendDrawQuad<viz::StreamVideoDrawQuad>();
+ stream_video_quad->SetNew(shared_quad_state, quad_rect, visible_quad_rect,
+ needs_blending, frame_resources_[0].id,
+ frame_resources_[0].size_in_pixels, scale);
+ for (viz::ResourceId resource_id : stream_video_quad->resources) {
+ resource_provider_->ValidateResource(resource_id);
+ }
+ break;
+ }
+ case VideoFrameResourceType::NONE:
+ NOTIMPLEMENTED();
+ break;
+ }
+}
+
+VideoFrameExternalResources
+VideoResourceUpdater::CreateExternalResourcesFromVideoFrame(
+ scoped_refptr<VideoFrame> video_frame) {
+ if (video_frame->format() == PIXEL_FORMAT_UNKNOWN)
+ return VideoFrameExternalResources();
+ DCHECK(video_frame->HasTextures() || video_frame->IsMappable());
+ if (video_frame->HasTextures())
+ return CreateForHardwarePlanes(std::move(video_frame));
+ else
+ return CreateForSoftwarePlanes(std::move(video_frame));
+}
+
+viz::ResourceFormat VideoResourceUpdater::YuvResourceFormat(
+ int bits_per_channel) {
+ DCHECK(context_provider_);
+ const auto& caps = context_provider_->ContextCapabilities();
+ if (caps.disable_one_component_textures)
+ return viz::RGBA_8888;
+ if (bits_per_channel <= 8)
+ return caps.texture_rg ? viz::RED_8 : viz::LUMINANCE_8;
+ if (use_r16_texture_ && caps.texture_norm16)
+ return viz::R16_EXT;
+ if (caps.texture_half_float_linear)
+ return viz::LUMINANCE_F16;
+ return viz::LUMINANCE_8;
+}
+
+VideoResourceUpdater::PlaneResource*
+VideoResourceUpdater::RecycleOrAllocateResource(
+ const gfx::Size& resource_size,
+ viz::ResourceFormat resource_format,
+ const gfx::ColorSpace& color_space,
+ int unique_id,
+ int plane_index) {
+ PlaneResource* recyclable_resource = nullptr;
+ for (auto& resource : all_resources_) {
+ // If the plane index is valid (positive, or 0, meaning all planes)
+ // then we are allowed to return a referenced resource that already
+ // contains the right frame data. It's safe to reuse it even if
+ // resource_provider_ holds some references to it, because those
+ // references are read-only.
+ if (plane_index != -1 && resource->Matches(unique_id, plane_index)) {
+ DCHECK(resource->resource_size() == resource_size);
+ DCHECK(resource->resource_format() == resource_format);
+ return resource.get();
+ }
+
+ // Otherwise check whether this is an unreferenced resource of the right
+ // format that we can recycle. Remember it, but don't return immediately,
+ // because we still want to find any reusable resources.
+ const bool in_use = resource->has_refs();
+
+ if (!in_use && resource->resource_size() == resource_size &&
+ resource->resource_format() == resource_format) {
+ recyclable_resource = resource.get();
+ }
+ }
+
+ if (recyclable_resource)
+ return recyclable_resource;
+
+ // There was nothing available to reuse or recycle. Allocate a new resource.
+ return AllocateResource(resource_size, resource_format, color_space);
+}
+
+VideoResourceUpdater::PlaneResource* VideoResourceUpdater::AllocateResource(
+ const gfx::Size& plane_size,
+ viz::ResourceFormat format,
+ const gfx::ColorSpace& color_space) {
+ const uint32_t plane_resource_id = next_plane_resource_id_++;
+
+ if (software_compositor()) {
+ DCHECK_EQ(format, viz::ResourceFormat::RGBA_8888);
+
+ all_resources_.push_back(std::make_unique<SoftwarePlaneResource>(
+ plane_resource_id, plane_size, shared_bitmap_reporter_));
+ } else {
+ // Video textures get composited into the display frame, the GPU doesn't
+ // draw to them directly.
+ constexpr bool kForFrameBufferAttachment = false;
+
+ viz::TextureAllocation alloc = viz::TextureAllocation::MakeTextureId(
+ context_provider_->ContextGL(),
+ context_provider_->ContextCapabilities(), format,
+ use_gpu_memory_buffer_resources_, kForFrameBufferAttachment);
+ viz::TextureAllocation::AllocateStorage(
+ context_provider_->ContextGL(),
+ context_provider_->ContextCapabilities(), format, plane_size, alloc,
+ color_space);
+
+ all_resources_.push_back(std::make_unique<HardwarePlaneResource>(
+ plane_resource_id, plane_size, format, context_provider_,
+ std::move(alloc)));
+ }
+ return all_resources_.back().get();
+}
+
+void VideoResourceUpdater::CopyHardwarePlane(
+ VideoFrame* video_frame,
+ const gfx::ColorSpace& resource_color_space,
+ const gpu::MailboxHolder& mailbox_holder,
+ VideoFrameExternalResources* external_resources) {
+ const gfx::Size output_plane_resource_size = video_frame->coded_size();
+ // The copy needs to be a direct transfer of pixel data, so we use an RGBA8
+ // target to avoid loss of precision or dropping any alpha component.
+ constexpr viz::ResourceFormat copy_resource_format =
+ viz::ResourceFormat::RGBA_8888;
+
+ const int no_unique_id = 0;
+ const int no_plane_index = -1; // Do not recycle referenced textures.
+ PlaneResource* plane_resource = RecycleOrAllocateResource(
+ output_plane_resource_size, copy_resource_format, resource_color_space,
+ no_unique_id, no_plane_index);
+ HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
+ hardware_resource->add_ref();
+
+ DCHECK_EQ(hardware_resource->texture_target(),
+ static_cast<GLenum>(GL_TEXTURE_2D));
+
+ gpu::gles2::GLES2Interface* gl = context_provider_->ContextGL();
+
+ gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
+ uint32_t src_texture_id =
+ gl->CreateAndConsumeTextureCHROMIUM(mailbox_holder.mailbox.name);
+ gl->CopySubTextureCHROMIUM(
+ src_texture_id, 0, GL_TEXTURE_2D, hardware_resource->texture_id(), 0, 0,
+ 0, 0, 0, output_plane_resource_size.width(),
+ output_plane_resource_size.height(), false, false, false);
+ gl->DeleteTextures(1, &src_texture_id);
+
+ // Pass an empty sync token to force generation of a new sync token.
+ SyncTokenClientImpl client(gl, gpu::SyncToken());
+ gpu::SyncToken sync_token = video_frame->UpdateReleaseSyncToken(&client);
+
+ auto transferable_resource = viz::TransferableResource::MakeGL(
+ hardware_resource->mailbox(), GL_LINEAR, GL_TEXTURE_2D, sync_token);
+ transferable_resource.color_space = resource_color_space;
+ transferable_resource.format = copy_resource_format;
+ external_resources->resources.push_back(std::move(transferable_resource));
+
+ external_resources->release_callbacks.push_back(base::BindOnce(
+ &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
+ hardware_resource->plane_resource_id()));
+}
+
+VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
+ scoped_refptr<VideoFrame> video_frame) {
+ TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForHardwarePlanes");
+ DCHECK(video_frame->HasTextures());
+ if (!context_provider_)
+ return VideoFrameExternalResources();
+
+ VideoFrameExternalResources external_resources;
+ gfx::ColorSpace resource_color_space = video_frame->ColorSpace();
+
+ bool copy_required =
+ video_frame->metadata()->IsTrue(VideoFrameMetadata::COPY_REQUIRED);
+
+ GLuint target = video_frame->mailbox_holder(0).texture_target;
+ // If |copy_required| then we will copy into a GL_TEXTURE_2D target.
+ if (copy_required)
+ target = GL_TEXTURE_2D;
+
+ gfx::BufferFormat buffer_format;
+ external_resources.type = ExternalResourceTypeForHardwarePlanes(
+ video_frame->format(), target, video_frame->NumTextures(), &buffer_format,
+ use_stream_video_draw_quad_);
+
+ if (external_resources.type == VideoFrameResourceType::NONE) {
+ DLOG(ERROR) << "Unsupported Texture format"
+ << VideoPixelFormatToString(video_frame->format());
+ return external_resources;
+ }
+ if (external_resources.type == VideoFrameResourceType::RGB ||
+ external_resources.type == VideoFrameResourceType::RGBA ||
+ external_resources.type == VideoFrameResourceType::RGBA_PREMULTIPLIED) {
+ resource_color_space = resource_color_space.GetAsFullRangeRGB();
+ }
+
+ const size_t num_textures = video_frame->NumTextures();
+ for (size_t i = 0; i < num_textures; ++i) {
+ const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(i);
+ if (mailbox_holder.mailbox.IsZero())
+ break;
+
+ if (copy_required) {
+ CopyHardwarePlane(video_frame.get(), resource_color_space, mailbox_holder,
+ &external_resources);
+ } else {
+ auto transfer_resource = viz::TransferableResource::MakeGLOverlay(
+ mailbox_holder.mailbox, GL_LINEAR, mailbox_holder.texture_target,
+ mailbox_holder.sync_token, video_frame->coded_size(),
+ video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY));
+ transfer_resource.color_space = resource_color_space;
+ transfer_resource.read_lock_fences_enabled =
+ video_frame->metadata()->IsTrue(
+ VideoFrameMetadata::READ_LOCK_FENCES_ENABLED);
+ transfer_resource.format = viz::GetResourceFormat(buffer_format);
+
+#if defined(OS_ANDROID)
+ transfer_resource.is_backed_by_surface_texture =
+ video_frame->metadata()->IsTrue(VideoFrameMetadata::TEXTURE_OWNER);
+ transfer_resource.wants_promotion_hint = video_frame->metadata()->IsTrue(
+ VideoFrameMetadata::WANTS_PROMOTION_HINT);
+#endif
+ external_resources.resources.push_back(std::move(transfer_resource));
+ external_resources.release_callbacks.push_back(
+ base::BindOnce(&VideoResourceUpdater::ReturnTexture,
+ weak_ptr_factory_.GetWeakPtr(), video_frame));
+ }
+ }
+ return external_resources;
+}
+
+VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
+ scoped_refptr<VideoFrame> video_frame) {
+ TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
+ const VideoPixelFormat input_frame_format = video_frame->format();
+
+ size_t bits_per_channel = video_frame->BitDepth();
+
+ // Only YUV and Y16 software video frames are supported.
+ DCHECK(IsYuvPlanar(input_frame_format) ||
+ input_frame_format == PIXEL_FORMAT_Y16);
+
+ viz::ResourceFormat output_resource_format;
+ gfx::ColorSpace output_color_space = video_frame->ColorSpace();
+ if (input_frame_format == PIXEL_FORMAT_Y16) {
+ // Unable to display directly as yuv planes so convert it to RGBA for
+ // compositing.
+ output_resource_format = viz::RGBA_8888;
+ output_color_space = output_color_space.GetAsFullRangeRGB();
+ } else if (!software_compositor()) {
+ // Can be composited directly from yuv planes.
+ output_resource_format = YuvResourceFormat(bits_per_channel);
+ }
+
+ // If GPU compositing is enabled, but the output resource format
+ // returned by the resource provider is viz::RGBA_8888, then a GPU driver
+ // bug workaround requires that YUV frames must be converted to RGB
+ // before texture upload.
+ bool texture_needs_rgb_conversion =
+ !software_compositor() &&
+ output_resource_format == viz::ResourceFormat::RGBA_8888;
+
+ size_t output_plane_count = VideoFrame::NumPlanes(input_frame_format);
+
+ // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
+ // conversion here. That involves an extra copy of each frame to a bitmap.
+ // Obviously, this is suboptimal and should be addressed once ubercompositor
+ // starts shaping up.
+ if (software_compositor() || texture_needs_rgb_conversion) {
+ output_resource_format = viz::RGBA_8888;
+ output_plane_count = 1;
+ bits_per_channel = 8;
+
+ // The YUV to RGB conversion will be performed when we convert
+ // from single-channel textures to an RGBA texture via
+ // ConvertVideoFrameToRGBPixels below.
+ output_color_space = output_color_space.GetAsFullRangeRGB();
+ }
+
+ std::vector<gfx::Size> outplane_plane_sizes;
+ outplane_plane_sizes.reserve(output_plane_count);
+ for (size_t i = 0; i < output_plane_count; ++i) {
+ outplane_plane_sizes.push_back(
+ SoftwarePlaneDimension(video_frame.get(), software_compositor(), i));
+ const gfx::Size& output_plane_resource_size = outplane_plane_sizes.back();
+ if (output_plane_resource_size.IsEmpty() ||
+ output_plane_resource_size.width() > max_resource_size_ ||
+ output_plane_resource_size.height() > max_resource_size_) {
+ // This output plane has invalid geometry so return an empty external
+ // resources.
+ return VideoFrameExternalResources();
+ }
+ }
+
+ // Delete recycled resources that are the wrong format or wrong size.
+ auto can_delete_resource_fn =
+ [output_resource_format,
+ &outplane_plane_sizes](const std::unique_ptr<PlaneResource>& resource) {
+ // Resources that are still being used can't be deleted.
+ if (resource->has_refs())
+ return false;
+
+ return resource->resource_format() != output_resource_format ||
+ !base::ContainsValue(outplane_plane_sizes,
+ resource->resource_size());
+ };
+ base::EraseIf(all_resources_, can_delete_resource_fn);
+
+ // Recycle or allocate resources for each video plane.
+ std::vector<PlaneResource*> plane_resources;
+ plane_resources.reserve(output_plane_count);
+ for (size_t i = 0; i < output_plane_count; ++i) {
+ plane_resources.push_back(RecycleOrAllocateResource(
+ outplane_plane_sizes[i], output_resource_format, output_color_space,
+ video_frame->unique_id(), i));
+ plane_resources.back()->add_ref();
+ }
+
+ VideoFrameExternalResources external_resources;
+
+ external_resources.bits_per_channel = bits_per_channel;
+
+ if (software_compositor() || texture_needs_rgb_conversion) {
+ DCHECK_EQ(plane_resources.size(), 1u);
+ PlaneResource* plane_resource = plane_resources[0];
+ DCHECK_EQ(plane_resource->resource_format(), viz::RGBA_8888);
+
+ if (!plane_resource->Matches(video_frame->unique_id(), 0)) {
+ // We need to transfer data from |video_frame| to the plane resource.
+ if (software_compositor()) {
+ if (!video_renderer_)
+ video_renderer_ = std::make_unique<PaintCanvasVideoRenderer>();
+
+ SoftwarePlaneResource* software_resource = plane_resource->AsSoftware();
+
+ // We know the format is RGBA_8888 from check above.
+ SkImageInfo info = SkImageInfo::MakeN32Premul(
+ gfx::SizeToSkISize(software_resource->resource_size()));
+
+ SkBitmap sk_bitmap;
+ sk_bitmap.installPixels(info, software_resource->pixels(),
+ info.minRowBytes());
+ cc::SkiaPaintCanvas canvas(sk_bitmap);
+
+ // This is software path, so canvas and video_frame are always backed
+ // by software.
+ video_renderer_->Copy(video_frame, &canvas, Context3D());
+ } else {
+ HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
+ size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(
+ video_frame->coded_size().width(), viz::ResourceFormat::RGBA_8888);
+ size_t needed_size = bytes_per_row * video_frame->coded_size().height();
+ if (upload_pixels_size_ < needed_size) {
+ // Free the existing data first so that the memory can be reused,
+ // if possible. Note that the new array is purposely not initialized.
+ upload_pixels_.reset();
+ upload_pixels_.reset(new uint8_t[needed_size]);
+ upload_pixels_size_ = needed_size;
+ }
+
+ PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
+ video_frame.get(), upload_pixels_.get(), bytes_per_row);
+
+ // Copy pixels into texture.
+ auto* gl = context_provider_->ContextGL();
+ gl->BindTexture(hardware_resource->texture_target(),
+ hardware_resource->texture_id());
+ const gfx::Size& plane_size = hardware_resource->resource_size();
+ gl->TexSubImage2D(
+ hardware_resource->texture_target(), 0, 0, 0, plane_size.width(),
+ plane_size.height(), GLDataFormat(viz::ResourceFormat::RGBA_8888),
+ GLDataType(viz::ResourceFormat::RGBA_8888), upload_pixels_.get());
+ }
+ plane_resource->SetUniqueId(video_frame->unique_id(), 0);
+ }
+
+ viz::TransferableResource transferable_resource;
+ if (software_compositor()) {
+ SoftwarePlaneResource* software_resource = plane_resource->AsSoftware();
+ external_resources.type = VideoFrameResourceType::RGBA_PREMULTIPLIED;
+ transferable_resource = viz::TransferableResource::MakeSoftware(
+ software_resource->shared_bitmap_id(),
+ software_resource->resource_size(),
+ plane_resource->resource_format());
+ } else {
+ HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
+ external_resources.type = VideoFrameResourceType::RGBA;
+ gpu::SyncToken sync_token;
+ GenerateCompositorSyncToken(context_provider_->ContextGL(), &sync_token);
+ transferable_resource = viz::TransferableResource::MakeGLOverlay(
+ hardware_resource->mailbox(), GL_LINEAR,
+ hardware_resource->texture_target(), sync_token,
+ hardware_resource->resource_size(),
+ hardware_resource->overlay_candidate());
+ }
+
+ transferable_resource.color_space = output_color_space;
+ transferable_resource.format = viz::ResourceFormat::RGBA_8888;
+ external_resources.resources.push_back(std::move(transferable_resource));
+ external_resources.release_callbacks.push_back(base::BindOnce(
+ &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
+ plane_resource->plane_resource_id()));
+
+ return external_resources;
+ }
+
+ const viz::ResourceFormat yuv_resource_format =
+ YuvResourceFormat(bits_per_channel);
+ DCHECK(yuv_resource_format == viz::LUMINANCE_F16 ||
+ yuv_resource_format == viz::R16_EXT ||
+ yuv_resource_format == viz::LUMINANCE_8 ||
+ yuv_resource_format == viz::RED_8)
+ << yuv_resource_format;
+
+ std::unique_ptr<HalfFloatMaker> half_float_maker;
+ if (yuv_resource_format == viz::LUMINANCE_F16) {
+ half_float_maker = HalfFloatMaker::NewHalfFloatMaker(bits_per_channel);
+ external_resources.offset = half_float_maker->Offset();
+ external_resources.multiplier = half_float_maker->Multiplier();
+ } else if (yuv_resource_format == viz::R16_EXT) {
+ external_resources.multiplier = 65535.0f / ((1 << bits_per_channel) - 1);
+ external_resources.offset = 0;
+ }
+
+ // We need to transfer data from |video_frame| to the plane resources.
+ for (size_t i = 0; i < plane_resources.size(); ++i) {
+ HardwarePlaneResource* plane_resource = plane_resources[i]->AsHardware();
+
+ // Skip the transfer if this |video_frame|'s plane has been processed.
+ if (plane_resource->Matches(video_frame->unique_id(), i))
+ continue;
+
+ const viz::ResourceFormat plane_resource_format =
+ plane_resource->resource_format();
+ DCHECK_EQ(plane_resource_format, yuv_resource_format);
+
+ // TODO(hubbe): Move upload code to media/.
+ // TODO(reveman): Can use GpuMemoryBuffers here to improve performance.
+
+ // |video_stride_bytes| is the width of the |video_frame| we are uploading
+ // (including non-frame data to fill in the stride).
+ const int video_stride_bytes = video_frame->stride(i);
+
+ // |resource_size_pixels| is the size of the destination resource.
+ const gfx::Size resource_size_pixels = plane_resource->resource_size();
+
+ const size_t bytes_per_row =
+ viz::ResourceSizes::CheckedWidthInBytes<size_t>(
+ resource_size_pixels.width(), plane_resource_format);
+ // Use 4-byte row alignment (OpenGL default) for upload performance.
+ // Assuming that GL_UNPACK_ALIGNMENT has not changed from default.
+ const size_t upload_image_stride =
+ cc::MathUtil::CheckedRoundUp<size_t>(bytes_per_row, 4u);
+
+ const size_t resource_bit_depth =
+ static_cast<size_t>(viz::BitsPerPixel(plane_resource_format));
+
+ // Data downshifting is needed if the resource bit depth is not enough.
+ const bool needs_bit_downshifting = bits_per_channel > resource_bit_depth;
+
+ // A copy to adjust strides is needed if those are different and both source
+ // and destination have the same bit depth.
+ const bool needs_stride_adaptation =
+ (bits_per_channel == resource_bit_depth) &&
+ (upload_image_stride != static_cast<size_t>(video_stride_bytes));
+
+ // We need to convert the incoming data if we're transferring to half float,
+ // if the need a bit downshift or if the strides need to be reconciled.
+ const bool needs_conversion = plane_resource_format == viz::LUMINANCE_F16 ||
+ needs_bit_downshifting ||
+ needs_stride_adaptation;
+
+ const uint8_t* pixels;
+ if (!needs_conversion) {
+ pixels = video_frame->data(i);
+ } else {
+ // Avoid malloc for each frame/plane if possible.
+ const size_t needed_size =
+ upload_image_stride * resource_size_pixels.height();
+ if (upload_pixels_size_ < needed_size) {
+ // Free the existing data first so that the memory can be reused,
+ // if possible. Note that the new array is purposely not initialized.
+ upload_pixels_.reset();
+ upload_pixels_.reset(new uint8_t[needed_size]);
+ upload_pixels_size_ = needed_size;
+ }
+
+ if (plane_resource_format == viz::LUMINANCE_F16) {
+ for (int row = 0; row < resource_size_pixels.height(); ++row) {
+ uint16_t* dst = reinterpret_cast<uint16_t*>(
+ &upload_pixels_[upload_image_stride * row]);
+ const uint16_t* src = reinterpret_cast<uint16_t*>(
+ video_frame->data(i) + (video_stride_bytes * row));
+ half_float_maker->MakeHalfFloats(src, bytes_per_row / 2, dst);
+ }
+ } else if (needs_bit_downshifting) {
+ DCHECK(plane_resource_format == viz::LUMINANCE_8 ||
+ plane_resource_format == viz::RED_8);
+ const int scale = 0x10000 >> (bits_per_channel - 8);
+ libyuv::Convert16To8Plane(
+ reinterpret_cast<uint16_t*>(video_frame->data(i)),
+ video_stride_bytes / 2, upload_pixels_.get(), upload_image_stride,
+ scale, bytes_per_row, resource_size_pixels.height());
+ } else {
+ // Make a copy to reconcile stride, size and format being equal.
+ DCHECK(needs_stride_adaptation);
+ DCHECK(plane_resource_format == viz::LUMINANCE_8 ||
+ plane_resource_format == viz::RED_8);
+ libyuv::CopyPlane(video_frame->data(i), video_stride_bytes,
+ upload_pixels_.get(), upload_image_stride,
+ resource_size_pixels.width(),
+ resource_size_pixels.height());
+ }
+
+ pixels = upload_pixels_.get();
+ }
+
+ // Copy pixels into texture. TexSubImage2D() is applicable because
+ // |yuv_resource_format| is LUMINANCE_F16, R16_EXT, LUMINANCE_8 or RED_8.
+ auto* gl = context_provider_->ContextGL();
+ gl->BindTexture(plane_resource->texture_target(),
+ plane_resource->texture_id());
+ DCHECK(GLSupportsFormat(plane_resource_format));
+ gl->TexSubImage2D(
+ plane_resource->texture_target(), 0, 0, 0, resource_size_pixels.width(),
+ resource_size_pixels.height(), GLDataFormat(plane_resource_format),
+ GLDataType(plane_resource_format), pixels);
+
+ plane_resource->SetUniqueId(video_frame->unique_id(), i);
+ }
+
+ // Set the sync token otherwise resource is assumed to be synchronized.
+ gpu::SyncToken sync_token;
+ GenerateCompositorSyncToken(context_provider_->ContextGL(), &sync_token);
+
+ for (size_t i = 0; i < plane_resources.size(); ++i) {
+ HardwarePlaneResource* plane_resource = plane_resources[i]->AsHardware();
+ auto transferable_resource = viz::TransferableResource::MakeGLOverlay(
+ plane_resource->mailbox(), GL_LINEAR, plane_resource->texture_target(),
+ sync_token, plane_resource->resource_size(),
+ plane_resource->overlay_candidate());
+ transferable_resource.color_space = output_color_space;
+ transferable_resource.format = output_resource_format;
+ external_resources.resources.push_back(std::move(transferable_resource));
+ external_resources.release_callbacks.push_back(base::BindOnce(
+ &VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
+ plane_resource->plane_resource_id()));
+ }
+
+ external_resources.type = VideoFrameResourceType::YUV;
+ return external_resources;
+}
+
+void VideoResourceUpdater::ReturnTexture(
+ const scoped_refptr<VideoFrame>& video_frame,
+ const gpu::SyncToken& sync_token,
+ bool lost_resource) {
+ // TODO(dshwang): Forward to the decoder as a lost resource.
+ if (lost_resource)
+ return;
+
+ // The video frame will insert a wait on the previous release sync token.
+ SyncTokenClientImpl client(context_provider_->ContextGL(), sync_token);
+ video_frame->UpdateReleaseSyncToken(&client);
+}
+
+void VideoResourceUpdater::RecycleResource(uint32_t plane_resource_id,
+ const gpu::SyncToken& sync_token,
+ bool lost_resource) {
+ auto matches_id_fn =
+ [plane_resource_id](const std::unique_ptr<PlaneResource>& resource) {
+ return resource->plane_resource_id() == plane_resource_id;
+ };
+ auto resource_it =
+ std::find_if(all_resources_.begin(), all_resources_.end(), matches_id_fn);
+ if (resource_it == all_resources_.end())
+ return;
+
+ if (context_provider_ && sync_token.HasData()) {
+ context_provider_->ContextGL()->WaitSyncTokenCHROMIUM(
+ sync_token.GetConstData());
+ }
+
+ if (lost_resource) {
+ all_resources_.erase(resource_it);
+ } else {
+ (*resource_it)->remove_ref();
+ }
+}
+
+bool VideoResourceUpdater::OnMemoryDump(
+ const base::trace_event::MemoryDumpArgs& args,
+ base::trace_event::ProcessMemoryDump* pmd) {
+ for (auto& resource : all_resources_) {
+ std::string dump_name =
+ base::StringPrintf("cc/video_memory/updater_%d/resource_%d",
+ tracing_id_, resource->plane_resource_id());
+ base::trace_event::MemoryAllocatorDump* dump =
+ pmd->CreateAllocatorDump(dump_name);
+
+ const uint64_t total_bytes =
+ viz::ResourceSizes::UncheckedSizeInBytesAligned<uint64_t>(
+ resource->resource_size(), resource->resource_format());
+ dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
+ base::trace_event::MemoryAllocatorDump::kUnitsBytes,
+ total_bytes);
+
+ // The importance value assigned to the GUID here must be greater than the
+ // importance value assigned elsewhere so that resource ownership is
+ // attributed to VideoResourceUpdater.
+ constexpr int kImportance = 2;
+
+ // Resources are shared across processes and require a shared GUID to
+ // prevent double counting the memory.
+ if (software_compositor()) {
+ base::UnguessableToken shm_guid =
+ resource->AsSoftware()->GetSharedMemoryGuid();
+ pmd->CreateSharedMemoryOwnershipEdge(dump->guid(), shm_guid, kImportance);
+ } else {
+ base::trace_event::MemoryAllocatorDumpGuid guid =
+ gl::GetGLTextureClientGUIDForTracing(
+ context_provider_->ContextSupport()->ShareGroupTracingGUID(),
+ resource->AsHardware()->texture_id());
+ pmd->CreateSharedGlobalAllocatorDump(guid);
+ pmd->AddOwnershipEdge(dump->guid(), guid, kImportance);
+ }
+ }
+
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/renderers/video_resource_updater.h b/chromium/media/renderers/video_resource_updater.h
new file mode 100644
index 00000000000..9ea3f503ca9
--- /dev/null
+++ b/chromium/media/renderers/video_resource_updater.h
@@ -0,0 +1,223 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_RENDERERS_VIDEO_RESOURCE_UPDATER_H_
+#define MEDIA_RENDERERS_VIDEO_RESOURCE_UPDATER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <list>
+#include <memory>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/weak_ptr.h"
+#include "base/time/time.h"
+#include "base/trace_event/memory_dump_provider.h"
+#include "components/viz/common/resources/release_callback.h"
+#include "components/viz/common/resources/resource_format.h"
+#include "components/viz/common/resources/resource_id.h"
+#include "components/viz/common/resources/transferable_resource.h"
+#include "media/base/media_export.h"
+#include "ui/gfx/buffer_types.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace gfx {
+class Rect;
+class Transform;
+} // namespace gfx
+
+namespace viz {
+class ClientResourceProvider;
+class ContextProvider;
+class RenderPass;
+class SharedBitmapReporter;
+} // namespace viz
+
+namespace media {
+class PaintCanvasVideoRenderer;
+class VideoFrame;
+
+// Specifies what type of data is contained in the mailboxes, as well as how
+// many mailboxes will be present.
+enum class VideoFrameResourceType {
+ NONE,
+ YUV,
+ RGB,
+ RGBA_PREMULTIPLIED,
+ RGBA,
+ STREAM_TEXTURE,
+};
+
+class MEDIA_EXPORT VideoFrameExternalResources {
+ public:
+ VideoFrameResourceType type = VideoFrameResourceType::NONE;
+ std::vector<viz::TransferableResource> resources;
+ std::vector<viz::ReleaseCallback> release_callbacks;
+
+ // Used by hardware textures which do not return values in the 0-1 range.
+ // After a lookup, subtract offset and multiply by multiplier.
+ float offset = 0.f;
+ float multiplier = 1.f;
+ uint32_t bits_per_channel = 8;
+
+ VideoFrameExternalResources();
+ VideoFrameExternalResources(VideoFrameExternalResources&& other);
+ VideoFrameExternalResources& operator=(VideoFrameExternalResources&& other);
+ ~VideoFrameExternalResources();
+};
+
+// VideoResourceUpdater is used by the video system to produce frame content as
+// resources consumable by the display compositor.
+class MEDIA_EXPORT VideoResourceUpdater
+ : public base::trace_event::MemoryDumpProvider {
+ public:
+ // For GPU compositing |context_provider| should be provided and for software
+ // compositing |shared_bitmap_reporter| should be provided. If there is a
+ // non-null |context_provider| we assume GPU compositing.
+ VideoResourceUpdater(viz::ContextProvider* context_provider,
+ viz::SharedBitmapReporter* shared_bitmap_reporter,
+ viz::ClientResourceProvider* resource_provider,
+ bool use_stream_video_draw_quad,
+ bool use_gpu_memory_buffer_resources,
+ bool use_r16_texture,
+ int max_resource_size);
+
+ ~VideoResourceUpdater() override;
+
+ // For each CompositorFrame the following sequence is expected:
+ // 1. ObtainFrameResources(): Import resources for the next video frame with
+ // viz::ClientResourceProvider. This will reuse existing GPU or
+ // SharedMemory buffers if possible, otherwise it will allocate new ones.
+ // 2. AppendQuads(): Add DrawQuads to CompositorFrame for video.
+ // 3. ReleaseFrameResources(): After the CompositorFrame has been submitted,
+ // remove imported resources from viz::ClientResourceProvider.
+ void ObtainFrameResources(scoped_refptr<VideoFrame> video_frame);
+ void ReleaseFrameResources();
+ // Appends a quad representing |frame| to |render_pass|.
+ // At most one quad is expected to be appended, this is enforced by the users
+ // of this class (e.g: VideoFrameSubmitter). Producing only one quad will
+ // allow viz to optimize compositing when the only content changing per-frame
+ // is the video.
+ void AppendQuads(viz::RenderPass* render_pass,
+ scoped_refptr<VideoFrame> frame,
+ gfx::Transform transform,
+ gfx::Size rotated_size,
+ gfx::Rect visible_layer_rect,
+ gfx::Rect clip_rect,
+ bool is_clipped,
+ bool context_opaque,
+ float draw_opacity,
+ int sorting_context_id,
+ gfx::Rect visible_quad_rect);
+
+ // TODO(kylechar): This is only public for testing, make private.
+ VideoFrameExternalResources CreateExternalResourcesFromVideoFrame(
+ scoped_refptr<VideoFrame> video_frame);
+
+ viz::ResourceFormat YuvResourceFormat(int bits_per_channel);
+
+ private:
+ class PlaneResource;
+ class HardwarePlaneResource;
+ class SoftwarePlaneResource;
+
+ // A resource that will be embedded in a DrawQuad in the next CompositorFrame.
+ // Each video plane will correspond to one FrameResource.
+ struct FrameResource {
+ viz::ResourceId id;
+ gfx::Size size_in_pixels;
+ };
+
+ bool software_compositor() const { return context_provider_ == nullptr; }
+
+ // Obtain a resource of the right format by either recycling an
+ // unreferenced but appropriately formatted resource, or by
+ // allocating a new resource.
+ // Additionally, if the |unique_id| and |plane_index| match, then
+ // it is assumed that the resource has the right data already and will only be
+ // used for reading, and so is returned even if it is still referenced.
+ // Passing -1 for |plane_index| avoids returning referenced
+ // resources.
+ PlaneResource* RecycleOrAllocateResource(const gfx::Size& resource_size,
+ viz::ResourceFormat resource_format,
+ const gfx::ColorSpace& color_space,
+ int unique_id,
+ int plane_index);
+ PlaneResource* AllocateResource(const gfx::Size& plane_size,
+ viz::ResourceFormat format,
+ const gfx::ColorSpace& color_space);
+
+ // Create a copy of a texture-backed source video frame in a new GL_TEXTURE_2D
+ // texture. This is used when there are multiple GPU threads (Android WebView)
+ // and the source video frame texture can't be used on the output GL context.
+ // https://crbug.com/582170
+ void CopyHardwarePlane(VideoFrame* video_frame,
+ const gfx::ColorSpace& resource_color_space,
+ const gpu::MailboxHolder& mailbox_holder,
+ VideoFrameExternalResources* external_resources);
+
+ // Get resources ready to be appended into DrawQuads. This is used for GPU
+ // compositing most of the time, except for the cases mentioned in
+ // CreateForSoftwarePlanes().
+ VideoFrameExternalResources CreateForHardwarePlanes(
+ scoped_refptr<VideoFrame> video_frame);
+
+ // Get resources ready to be appended into DrawQuads. This is always used for
+ // software compositing. This is also used for GPU compositing when the input
+ // video frame has no textures.
+ VideoFrameExternalResources CreateForSoftwarePlanes(
+ scoped_refptr<VideoFrame> video_frame);
+
+ void RecycleResource(uint32_t plane_resource_id,
+ const gpu::SyncToken& sync_token,
+ bool lost_resource);
+ void ReturnTexture(const scoped_refptr<VideoFrame>& video_frame,
+ const gpu::SyncToken& sync_token,
+ bool lost_resource);
+
+ // base::trace_event::MemoryDumpProvider implementation.
+ bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
+ base::trace_event::ProcessMemoryDump* pmd) override;
+
+ viz::ContextProvider* const context_provider_;
+ viz::SharedBitmapReporter* const shared_bitmap_reporter_;
+ viz::ClientResourceProvider* const resource_provider_;
+ const bool use_stream_video_draw_quad_;
+ const bool use_gpu_memory_buffer_resources_;
+ // TODO(crbug.com/759456): Remove after r16 is used without the flag.
+ const bool use_r16_texture_;
+ const int max_resource_size_;
+ const int tracing_id_;
+ std::unique_ptr<PaintCanvasVideoRenderer> video_renderer_;
+ uint32_t next_plane_resource_id_ = 1;
+
+ // Temporary pixel buffer when converting between formats.
+ std::unique_ptr<uint8_t[]> upload_pixels_;
+ size_t upload_pixels_size_ = 0;
+
+ VideoFrameResourceType frame_resource_type_;
+
+ float frame_resource_offset_;
+ float frame_resource_multiplier_;
+ uint32_t frame_bits_per_channel_;
+
+ // Resources that will be placed into quads by the next call to
+ // AppendDrawQuads().
+ std::vector<FrameResource> frame_resources_;
+
+ // Resources allocated by VideoResourceUpdater. Used to recycle resources so
+ // we can reduce the number of allocations and data transfers.
+ std::vector<std::unique_ptr<PlaneResource>> all_resources_;
+
+ base::WeakPtrFactory<VideoResourceUpdater> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoResourceUpdater);
+};
+
+} // namespace media
+
+#endif // MEDIA_RENDERERS_VIDEO_RESOURCE_UPDATER_H_
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
new file mode 100644
index 00000000000..02743bd083a
--- /dev/null
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -0,0 +1,751 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/renderers/video_resource_updater.h"
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "base/bind.h"
+#include "base/message_loop/message_loop.h"
+#include "components/viz/client/client_resource_provider.h"
+#include "components/viz/client/shared_bitmap_reporter.h"
+#include "components/viz/test/fake_output_surface.h"
+#include "components/viz/test/test_gles2_interface.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "media/base/video_frame.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace {
+
+class FakeSharedBitmapReporter : public viz::SharedBitmapReporter {
+ public:
+ FakeSharedBitmapReporter() = default;
+ ~FakeSharedBitmapReporter() override = default;
+
+ // viz::SharedBitmapReporter implementation.
+ void DidAllocateSharedBitmap(mojo::ScopedSharedBufferHandle buffer,
+ const viz::SharedBitmapId& id) override {
+ DCHECK_EQ(shared_bitmaps_.count(id), 0u);
+ shared_bitmaps_.insert(id);
+ }
+ void DidDeleteSharedBitmap(const viz::SharedBitmapId& id) override {
+ DCHECK_EQ(shared_bitmaps_.count(id), 1u);
+ shared_bitmaps_.erase(id);
+ }
+
+ const base::flat_set<viz::SharedBitmapId> shared_bitmaps() const {
+ return shared_bitmaps_;
+ }
+
+ private:
+ base::flat_set<viz::SharedBitmapId> shared_bitmaps_;
+};
+
+class UploadCounterGLES2Interface : public viz::TestGLES2Interface {
+ public:
+ void TexSubImage2D(GLenum target,
+ GLint level,
+ GLint xoffset,
+ GLint yoffset,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type,
+ const void* pixels) override {
+ ++upload_count_;
+ }
+
+ void TexStorage2DEXT(GLenum target,
+ GLint levels,
+ GLuint internalformat,
+ GLint width,
+ GLint height) override {}
+
+ void GenTextures(GLsizei n, GLuint* textures) override {
+ created_texture_count_ += n;
+ viz::TestGLES2Interface::GenTextures(n, textures);
+ }
+
+ void DeleteTextures(GLsizei n, const GLuint* textures) override {
+ created_texture_count_ -= n;
+ viz::TestGLES2Interface::DeleteTextures(n, textures);
+ }
+
+ int UploadCount() { return upload_count_; }
+ void ResetUploadCount() { upload_count_ = 0; }
+
+ int TextureCreationCount() { return created_texture_count_; }
+ void ResetTextureCreationCount() { created_texture_count_ = 0; }
+
+ private:
+ int upload_count_;
+ int created_texture_count_;
+};
+
+class VideoResourceUpdaterTest : public testing::Test {
+ protected:
+ VideoResourceUpdaterTest() {
+ std::unique_ptr<UploadCounterGLES2Interface> gl(
+ new UploadCounterGLES2Interface());
+
+ gl_ = gl.get();
+ gl_->set_support_texture_storage(true);
+
+ context_provider_ = viz::TestContextProvider::Create(std::move(gl));
+ context_provider_->BindToCurrentThread();
+ }
+
+ // testing::Test implementation.
+ void SetUp() override {
+ testing::Test::SetUp();
+ resource_provider_ = std::make_unique<viz::ClientResourceProvider>(
+ /*delegated_sync_points_required=*/true);
+ }
+
+ std::unique_ptr<VideoResourceUpdater> CreateUpdaterForHardware(
+ bool use_stream_video_draw_quad = false) {
+ return std::make_unique<VideoResourceUpdater>(
+ context_provider_.get(), nullptr, resource_provider_.get(),
+ use_stream_video_draw_quad, /*use_gpu_memory_buffer_resources=*/false,
+ /*use_r16_texture=*/use_r16_texture_, /*max_resource_size=*/10000);
+ }
+
+ std::unique_ptr<VideoResourceUpdater> CreateUpdaterForSoftware() {
+ return std::make_unique<VideoResourceUpdater>(
+ nullptr, &shared_bitmap_reporter_, resource_provider_.get(),
+ /*use_stream_video_draw_quad=*/false,
+ /*use_gpu_memory_buffer_resources=*/false,
+ /*use_r16_texture=*/false,
+ /*max_resource_size=*/10000);
+ }
+
+ // Note that the number of pixels needed for |size| must be less than or equal
+ // to the number of pixels needed for size of 100x100.
+ scoped_refptr<media::VideoFrame> CreateTestYUVVideoFrame(
+ const gfx::Size& size = gfx::Size(10, 10)) {
+ constexpr int kMaxDimension = 100;
+ static uint8_t y_data[kMaxDimension * kMaxDimension] = {0};
+ static uint8_t u_data[kMaxDimension * kMaxDimension / 2] = {0};
+ static uint8_t v_data[kMaxDimension * kMaxDimension / 2] = {0};
+
+ CHECK_LE(size.width() * size.height(), kMaxDimension * kMaxDimension);
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapExternalYuvData(
+ media::PIXEL_FORMAT_I422, // format
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ size.width(), // y_stride
+ size.width() / 2, // u_stride
+ size.width() / 2, // v_stride
+ y_data, // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateWonkyTestYUVVideoFrame() {
+ const int kDimension = 10;
+ const int kYWidth = kDimension + 5;
+ const int kUWidth = (kYWidth + 1) / 2 + 200;
+ const int kVWidth = (kYWidth + 1) / 2 + 1;
+ static uint8_t y_data[kYWidth * kDimension] = {0};
+ static uint8_t u_data[kUWidth * kDimension] = {0};
+ static uint8_t v_data[kVWidth * kDimension] = {0};
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapExternalYuvData(
+ media::PIXEL_FORMAT_I422, // format
+ gfx::Size(kYWidth, kDimension), // coded_size
+ gfx::Rect(2, 0, kDimension, kDimension), // visible_rect
+ gfx::Size(kDimension, kDimension), // natural_size
+ -kYWidth, // y_stride (negative)
+ kUWidth, // u_stride
+ kVWidth, // v_stride
+ y_data + kYWidth * (kDimension - 1), // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestHighBitFrame() {
+ const int kDimension = 10;
+ gfx::Size size(kDimension, kDimension);
+
+ scoped_refptr<media::VideoFrame> video_frame(media::VideoFrame::CreateFrame(
+ media::PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size), size,
+ base::TimeDelta()));
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ void SetReleaseSyncToken(const gpu::SyncToken& sync_token) {
+ release_sync_token_ = sync_token;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestHardwareVideoFrame(
+ media::VideoPixelFormat format,
+ unsigned target) {
+ const int kDimension = 10;
+ gfx::Size size(kDimension, kDimension);
+
+ gpu::Mailbox mailbox;
+ mailbox.name[0] = 51;
+
+ gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes] = {
+ gpu::MailboxHolder(mailbox, kMailboxSyncToken, target)};
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapNativeTextures(
+ format, mailbox_holders,
+ base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
+ base::Unretained(this)),
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestRGBAHardwareVideoFrame() {
+ return CreateTestHardwareVideoFrame(media::PIXEL_FORMAT_ARGB,
+ GL_TEXTURE_2D);
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestStreamTextureHardwareVideoFrame(
+ bool needs_copy) {
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
+ media::PIXEL_FORMAT_ARGB, GL_TEXTURE_EXTERNAL_OES);
+ video_frame->metadata()->SetBoolean(
+ media::VideoFrameMetadata::COPY_REQUIRED, needs_copy);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestYuvHardwareVideoFrame(
+ media::VideoPixelFormat format,
+ size_t num_textures,
+ unsigned target) {
+ const int kDimension = 10;
+ gfx::Size size(kDimension, kDimension);
+
+ gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes];
+ for (size_t i = 0; i < num_textures; ++i) {
+ gpu::Mailbox mailbox;
+ mailbox.name[0] = 50 + 1;
+ mailbox_holders[i] =
+ gpu::MailboxHolder(mailbox, kMailboxSyncToken, target);
+ }
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapNativeTextures(
+ format, mailbox_holders,
+ base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
+ base::Unretained(this)),
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ static const gpu::SyncToken kMailboxSyncToken;
+
+ // VideoResourceUpdater registers as a MemoryDumpProvider, which requires
+ // a TaskRunner.
+ base::MessageLoop message_loop_;
+ UploadCounterGLES2Interface* gl_;
+ scoped_refptr<viz::TestContextProvider> context_provider_;
+ FakeSharedBitmapReporter shared_bitmap_reporter_;
+ std::unique_ptr<viz::ClientResourceProvider> resource_provider_;
+ gpu::SyncToken release_sync_token_;
+ bool use_r16_texture_ = false;
+};
+
+const gpu::SyncToken VideoResourceUpdaterTest::kMailboxSyncToken =
+ gpu::SyncToken(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(0x123),
+ 7);
+
+TEST_F(VideoResourceUpdaterTest, SoftwareFrame) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, HighBitFrameNoF16) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+}
+
+class VideoResourceUpdaterTestWithF16 : public VideoResourceUpdaterTest {
+ public:
+ VideoResourceUpdaterTestWithF16() : VideoResourceUpdaterTest() {
+ gl_->set_support_texture_half_float_linear(true);
+ }
+};
+
+TEST_F(VideoResourceUpdaterTestWithF16, HighBitFrame) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_NEAR(resources.multiplier, 2.0, 0.1);
+ EXPECT_NEAR(resources.offset, 0.5, 0.1);
+
+ // Create the resource again, to test the path where the
+ // resources are cached.
+ VideoFrameExternalResources resources2 =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources2.type);
+ EXPECT_NEAR(resources2.multiplier, 2.0, 0.1);
+ EXPECT_NEAR(resources2.offset, 0.5, 0.1);
+}
+
+class VideoResourceUpdaterTestWithR16 : public VideoResourceUpdaterTest {
+ public:
+ VideoResourceUpdaterTestWithR16() : VideoResourceUpdaterTest() {
+ use_r16_texture_ = true;
+ gl_->set_support_texture_norm16(true);
+ }
+};
+
+TEST_F(VideoResourceUpdaterTestWithR16, HighBitFrame) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+
+ // Max 10-bit values as read by a sampler.
+ double max_10bit_value = ((1 << 10) - 1) / 65535.0;
+ EXPECT_NEAR(resources.multiplier * max_10bit_value, 1.0, 0.0001);
+ EXPECT_NEAR(resources.offset, 0.0, 0.1);
+
+ // Create the resource again, to test the path where the
+ // resources are cached.
+ VideoFrameExternalResources resources2 =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources2.type);
+ EXPECT_NEAR(resources2.multiplier * max_10bit_value, 1.0, 0.0001);
+ EXPECT_NEAR(resources2.offset, 0.0, 0.1);
+}
+
+TEST_F(VideoResourceUpdaterTest, HighBitFrameSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrame) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, ReuseResource) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+
+ // Allocate the resources for a YUV video frame.
+ gl_->ResetUploadCount();
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(3u, resources.resources.size());
+ EXPECT_EQ(3u, resources.release_callbacks.size());
+ // Expect exactly three texture uploads, one for each plane.
+ EXPECT_EQ(3, gl_->UploadCount());
+
+ // Simulate the ResourceProvider releasing the resources back to the video
+ // updater.
+ for (auto& release_callback : resources.release_callbacks)
+ std::move(release_callback).Run(gpu::SyncToken(), false);
+
+ // Allocate resources for the same frame.
+ gl_->ResetUploadCount();
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(3u, resources.resources.size());
+ EXPECT_EQ(3u, resources.release_callbacks.size());
+ // The data should be reused so expect no texture uploads.
+ EXPECT_EQ(0, gl_->UploadCount());
+}
+
+TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDelete) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+
+ // Allocate the resources for a YUV video frame.
+ gl_->ResetUploadCount();
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(3u, resources.resources.size());
+ EXPECT_EQ(3u, resources.release_callbacks.size());
+ // Expect exactly three texture uploads, one for each plane.
+ EXPECT_EQ(3, gl_->UploadCount());
+
+ // Allocate resources for the same frame.
+ gl_->ResetUploadCount();
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(3u, resources.resources.size());
+ EXPECT_EQ(3u, resources.release_callbacks.size());
+ // The data should be reused so expect no texture uploads.
+ EXPECT_EQ(0, gl_->UploadCount());
+}
+
+TEST_F(VideoResourceUpdaterTest, SoftwareFrameSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, ReuseResourceSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+
+ // Allocate the resources for a software video frame.
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+ // Expect exactly one allocated shared bitmap.
+ EXPECT_EQ(1u, shared_bitmap_reporter_.shared_bitmaps().size());
+ auto shared_bitmaps_copy = shared_bitmap_reporter_.shared_bitmaps();
+
+ // Simulate the ResourceProvider releasing the resource back to the video
+ // updater.
+ std::move(resources.release_callbacks[0]).Run(gpu::SyncToken(), false);
+
+ // Allocate resources for the same frame.
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+
+ // Ensure that the same shared bitmap was reused.
+ EXPECT_EQ(shared_bitmap_reporter_.shared_bitmaps(), shared_bitmaps_copy);
+}
+
+TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDeleteSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
+
+ // Allocate the resources for a software video frame.
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+ // Expect exactly one allocated shared bitmap.
+ EXPECT_EQ(1u, shared_bitmap_reporter_.shared_bitmaps().size());
+ auto shared_bitmaps_copy = shared_bitmap_reporter_.shared_bitmaps();
+
+ // Allocate resources for the same frame.
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+
+ // Ensure that the same shared bitmap was reused.
+ EXPECT_EQ(shared_bitmap_reporter_.shared_bitmaps(), shared_bitmaps_copy);
+}
+
+TEST_F(VideoResourceUpdaterTest, ChangeResourceSizeSoftwareCompositor) {
+ constexpr gfx::Size kSize1(10, 10);
+ constexpr gfx::Size kSize2(20, 20);
+
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+
+ // Allocate the resources for a software video frame.
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(
+ CreateTestYUVVideoFrame(kSize1));
+ // Expect exactly one allocated shared bitmap.
+ EXPECT_EQ(1u, shared_bitmap_reporter_.shared_bitmaps().size());
+ auto shared_bitmaps_copy = shared_bitmap_reporter_.shared_bitmaps();
+
+ // Simulate the ResourceProvider releasing the resource back to the video
+ // updater.
+ std::move(resources.release_callbacks[0]).Run(gpu::SyncToken(), false);
+
+ // Allocate resources for the next frame with a different size.
+ resources = updater->CreateExternalResourcesFromVideoFrame(
+ CreateTestYUVVideoFrame(kSize2));
+
+ // The first resource was released, so it can be reused but it's the wrong
+ // size. We should expect the first shared bitmap to be deleted and a new
+ // shared bitmap to be allocated.
+ EXPECT_EQ(1u, shared_bitmap_reporter_.shared_bitmaps().size());
+ EXPECT_NE(shared_bitmap_reporter_.shared_bitmaps(), shared_bitmaps_copy);
+}
+
+TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestRGBAHardwareVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+
+ video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
+ GL_TEXTURE_RECTANGLE_ARB);
+
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(3u, resources.resources.size());
+ EXPECT_EQ(3u, resources.release_callbacks.size());
+ EXPECT_FALSE(resources.resources[0].read_lock_fences_enabled);
+ EXPECT_FALSE(resources.resources[1].read_lock_fences_enabled);
+ EXPECT_FALSE(resources.resources[2].read_lock_fences_enabled);
+
+ video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
+ GL_TEXTURE_RECTANGLE_ARB);
+ video_frame->metadata()->SetBoolean(
+ media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED, true);
+
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_TRUE(resources.resources[0].read_lock_fences_enabled);
+ EXPECT_TRUE(resources.resources[1].read_lock_fences_enabled);
+ EXPECT_TRUE(resources.resources[2].read_lock_fences_enabled);
+}
+
+TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_StreamTexture) {
+ // Note that |use_stream_video_draw_quad| is true for this test.
+ std::unique_ptr<VideoResourceUpdater> updater =
+ CreateUpdaterForHardware(true);
+ gl_->ResetTextureCreationCount();
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestStreamTextureHardwareVideoFrame(false);
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::STREAM_TEXTURE, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_EXTERNAL_OES,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+ EXPECT_EQ(0, gl_->TextureCreationCount());
+
+ // A copied stream texture should return an RGBA resource in a new
+ // GL_TEXTURE_2D texture.
+ gl_->ResetTextureCreationCount();
+ video_frame = CreateTestStreamTextureHardwareVideoFrame(true);
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_2D,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+ EXPECT_EQ(1, gl_->TextureCreationCount());
+}
+
+TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_TextureQuad) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ gl_->ResetTextureCreationCount();
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestStreamTextureHardwareVideoFrame(false);
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_EXTERNAL_OES,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(1u, resources.release_callbacks.size());
+ EXPECT_EQ(0, gl_->TextureCreationCount());
+}
+
+// Passthrough the sync token returned by the compositor if we don't have an
+// existing release sync token.
+TEST_F(VideoResourceUpdaterTest, PassReleaseSyncToken) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+
+ const gpu::SyncToken sync_token(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(0x123),
+ 123);
+
+ {
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestRGBAHardwareVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+
+ ASSERT_EQ(resources.release_callbacks.size(), 1u);
+ std::move(resources.release_callbacks[0]).Run(sync_token, false);
+ }
+
+ EXPECT_EQ(release_sync_token_, sync_token);
+}
+
+// Generate new sync token because video frame has an existing sync token.
+TEST_F(VideoResourceUpdaterTest, GenerateReleaseSyncToken) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+
+ const gpu::SyncToken sync_token1(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(0x123),
+ 123);
+
+ const gpu::SyncToken sync_token2(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(0x234),
+ 234);
+
+ {
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestRGBAHardwareVideoFrame();
+
+ VideoFrameExternalResources resources1 =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ ASSERT_EQ(resources1.release_callbacks.size(), 1u);
+ std::move(resources1.release_callbacks[0]).Run(sync_token1, false);
+
+ VideoFrameExternalResources resources2 =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ ASSERT_EQ(resources2.release_callbacks.size(), 1u);
+ std::move(resources2.release_callbacks[0]).Run(sync_token2, false);
+ }
+
+ EXPECT_TRUE(release_sync_token_.HasData());
+ EXPECT_NE(release_sync_token_, sync_token1);
+ EXPECT_NE(release_sync_token_, sync_token2);
+}
+
+// Pass mailbox sync token as is if no GL operations are performed before frame
+// resources are handed off to the compositor.
+TEST_F(VideoResourceUpdaterTest, PassMailboxSyncToken) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestRGBAHardwareVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+
+ ASSERT_EQ(resources.resources.size(), 1u);
+ EXPECT_TRUE(resources.resources[0].mailbox_holder.sync_token.HasData());
+ EXPECT_EQ(resources.resources[0].mailbox_holder.sync_token,
+ kMailboxSyncToken);
+}
+
+// Generate new sync token for compositor when copying the texture.
+TEST_F(VideoResourceUpdaterTest, GenerateSyncTokenOnTextureCopy) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestStreamTextureHardwareVideoFrame(true /* needs_copy */);
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+
+ ASSERT_EQ(resources.resources.size(), 1u);
+ EXPECT_TRUE(resources.resources[0].mailbox_holder.sync_token.HasData());
+ EXPECT_NE(resources.resources[0].mailbox_holder.sync_token,
+ kMailboxSyncToken);
+}
+
+// NV12 VideoFrames backed by a single native texture can be sampled out
+// by GL as RGB. To use them as HW overlays we need to know the format
+// of the underlying buffer, that is YUV_420_BIPLANAR.
+TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_SingleNV12) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ gl_->ResetTextureCreationCount();
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
+ media::PIXEL_FORMAT_NV12, GL_TEXTURE_EXTERNAL_OES);
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGB, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_EXTERNAL_OES,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(viz::YUV_420_BIPLANAR, resources.resources[0].format);
+
+ video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 1,
+ GL_TEXTURE_RECTANGLE_ARB);
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGB, resources.type);
+ EXPECT_EQ(1u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_RECTANGLE_ARB,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(viz::YUV_420_BIPLANAR, resources.resources[0].format);
+
+ EXPECT_EQ(0, gl_->TextureCreationCount());
+}
+
+TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_DualNV12) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ gl_->ResetTextureCreationCount();
+ scoped_refptr<media::VideoFrame> video_frame =
+ CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 2,
+ GL_TEXTURE_EXTERNAL_OES);
+
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(2u, resources.resources.size());
+ EXPECT_EQ(2u, resources.release_callbacks.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_EXTERNAL_OES,
+ resources.resources[0].mailbox_holder.texture_target);
+ // |updater| doesn't set |buffer_format| in this case.
+ EXPECT_EQ(viz::RGBA_8888, resources.resources[0].format);
+
+ video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 2,
+ GL_TEXTURE_RECTANGLE_ARB);
+ resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
+ EXPECT_EQ(2u, resources.resources.size());
+ EXPECT_EQ((GLenum)GL_TEXTURE_RECTANGLE_ARB,
+ resources.resources[0].mailbox_holder.texture_target);
+ EXPECT_EQ(viz::RGBA_8888, resources.resources[0].format);
+ EXPECT_EQ(0, gl_->TextureCreationCount());
+}
+
+} // namespace
+} // namespace media
diff --git a/chromium/media/test/BUILD.gn b/chromium/media/test/BUILD.gn
index 2e6bdabd49e..5b541e59f98 100644
--- a/chromium/media/test/BUILD.gn
+++ b/chromium/media/test/BUILD.gn
@@ -4,6 +4,7 @@
import("//media/media_options.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
+import("//third_party/libaom/options.gni")
source_set("run_all_unittests") {
testonly = true
@@ -15,7 +16,7 @@ source_set("run_all_unittests") {
"//base",
"//base/test:test_support",
"//media:test_support",
- "//mojo/edk",
+ "//mojo/core/embedder",
]
if (is_android) {
@@ -167,6 +168,10 @@ pipeline_integration_fuzzer_variants = [
# See below for additional variants depending on build configuration.
]
+if (enable_av1_decoder) {
+ pipeline_integration_fuzzer_variants += [ "MP4_AV1" ]
+}
+
if (proprietary_codecs) {
pipeline_integration_fuzzer_variants += [
"ADTS",
@@ -210,6 +215,7 @@ foreach(variant, pipeline_integration_fuzzer_variants) {
# header for pipeline_integration_test_base.h. This should be
# moved into the .cc file to avoid the extra dependency here.
"//testing/gmock",
+ "//third_party/libaom:av1_buildflags",
"//ui/gfx:test_support",
]
diff --git a/chromium/media/video/fake_video_encode_accelerator.cc b/chromium/media/video/fake_video_encode_accelerator.cc
index dc51da6118e..1af316d869b 100644
--- a/chromium/media/video/fake_video_encode_accelerator.cc
+++ b/chromium/media/video/fake_video_encode_accelerator.cc
@@ -83,6 +83,12 @@ void FakeVideoEncodeAccelerator::RequestEncodingParametersChange(
stored_bitrates_.push_back(bitrate);
}
+void FakeVideoEncodeAccelerator::RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate,
+ uint32_t framerate) {
+ stored_bitrate_allocations_.push_back(bitrate);
+}
+
void FakeVideoEncodeAccelerator::Destroy() { delete this; }
void FakeVideoEncodeAccelerator::SendDummyFrameForTesting(bool key_frame) {
@@ -130,8 +136,10 @@ void FakeVideoEncodeAccelerator::DoBitstreamBufferReady(
int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) const {
- client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame,
- base::Time::Now() - base::Time());
+ client_->BitstreamBufferReady(
+ bitstream_buffer_id,
+ BitstreamBufferMetadata(payload_size, key_frame,
+ base::Time::Now().since_origin()));
}
} // namespace media
diff --git a/chromium/media/video/fake_video_encode_accelerator.h b/chromium/media/video/fake_video_encode_accelerator.h
index ea6ae546db8..79dba744d46 100644
--- a/chromium/media/video/fake_video_encode_accelerator.h
+++ b/chromium/media/video/fake_video_encode_accelerator.h
@@ -44,11 +44,17 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
void UseOutputBitstreamBuffer(const BitstreamBuffer& buffer) override;
void RequestEncodingParametersChange(uint32_t bitrate,
uint32_t framerate) override;
+ void RequestEncodingParametersChange(const VideoBitrateAllocation& bitrate,
+ uint32_t framerate) override;
void Destroy() override;
const std::vector<uint32_t>& stored_bitrates() const {
return stored_bitrates_;
}
+ const std::vector<VideoBitrateAllocation>& stored_bitrate_allocations()
+ const {
+ return stored_bitrate_allocations_;
+ }
void SendDummyFrameForTesting(bool key_frame);
void SetWillInitializationSucceed(bool will_initialization_succeed);
@@ -66,6 +72,7 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
// Our original (constructor) calling message loop used for all tasks.
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::vector<uint32_t> stored_bitrates_;
+ std::vector<VideoBitrateAllocation> stored_bitrate_allocations_;
bool will_initialization_succeed_;
VideoEncodeAccelerator::Client* client_;
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index 2bcbac3231c..734606a1ed7 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -21,6 +21,7 @@
#include "base/containers/stack_container.h"
#include "base/location.h"
#include "base/macros.h"
+#include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
#include "base/time/default_tick_clock.h"
#include "base/trace_event/memory_dump_manager.h"
@@ -381,7 +382,7 @@ void CopyRowsToI420Buffer(int first_row,
} else {
const int scale = 0x10000 >> (bit_depth - 8);
libyuv::Convert16To8Plane(
- reinterpret_cast<const uint16*>(source + source_stride * first_row),
+ reinterpret_cast<const uint16_t*>(source + source_stride * first_row),
source_stride / 2, output + dest_stride * first_row, dest_stride, scale,
bytes_per_row, rows);
}
@@ -589,6 +590,11 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
case PIXEL_FORMAT_UNKNOWN:
+ if (!video_frame->HasTextures()) {
+ UMA_HISTOGRAM_ENUMERATION(
+ "Media.GpuMemoryBufferVideoFramePool.UnsupportedFormat",
+ video_frame->format(), PIXEL_FORMAT_MAX + 1);
+ }
passthrough = true;
}
// TODO(dcastagna): Handle odd positioned video frame input, see
@@ -904,7 +910,8 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
break;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30:
- allow_overlay = true;
+ // TODO(mcasas): Enable this for ChromeOS https://crbug.com/776093.
+ allow_overlay = false;
// We've converted the YUV to RGB, fix the color space.
// TODO(hubbe): The libyuv YUV to RGB conversion may not have
// honored the color space conversion 100%. We should either fix
@@ -1007,7 +1014,6 @@ GpuMemoryBufferVideoFramePool::PoolImpl::GetOrCreateFrameResources(
gles2->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
gles2->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
gles2->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- gles2->GenMailboxCHROMIUM(plane_resource.mailbox.name);
gles2->ProduceTextureDirectCHROMIUM(plane_resource.texture_id,
plane_resource.mailbox.name);
}
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index ca6af30c90e..7cefccc3897 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -41,7 +41,7 @@ class TestGLES2Interface : public gpu::gles2::GLES2InterfaceStub {
memcpy(sync_token, &sync_token_data, sizeof(sync_token_data));
}
- void GenMailboxCHROMIUM(GLbyte* mailbox) override {
+ void ProduceTextureDirectCHROMIUM(GLuint texture, GLbyte* mailbox) override {
*reinterpret_cast<unsigned*>(mailbox) = ++mailbox_;
}
diff --git a/chromium/media/video/video_decode_accelerator.h b/chromium/media/video/video_decode_accelerator.h
index b4768003fdc..c933f24211c 100644
--- a/chromium/media/video/video_decode_accelerator.h
+++ b/chromium/media/video/video_decode_accelerator.h
@@ -19,7 +19,6 @@
#include "media/base/decoder_buffer.h"
#include "media/base/encryption_scheme.h"
#include "media/base/overlay_info.h"
-#include "media/base/surface_manager.h"
#include "media/base/video_decoder_config.h"
#include "media/video/picture.h"
#include "ui/gfx/color_space.h"
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 1f6c0dcb778..0c3d76df2d0 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -8,6 +8,24 @@
namespace media {
+Vp8Metadata::Vp8Metadata()
+ : non_reference(false), temporal_idx(0), layer_sync(false) {}
+Vp8Metadata::Vp8Metadata(const Vp8Metadata& other) = default;
+Vp8Metadata::Vp8Metadata(Vp8Metadata&& other) = default;
+Vp8Metadata::~Vp8Metadata() = default;
+
+BitstreamBufferMetadata::BitstreamBufferMetadata()
+ : payload_size_bytes(0), key_frame(false) {}
+BitstreamBufferMetadata::BitstreamBufferMetadata(
+ BitstreamBufferMetadata&& other) = default;
+BitstreamBufferMetadata::BitstreamBufferMetadata(size_t payload_size_bytes,
+ bool key_frame,
+ base::TimeDelta timestamp)
+ : payload_size_bytes(payload_size_bytes),
+ key_frame(key_frame),
+ timestamp(timestamp) {}
+BitstreamBufferMetadata::~BitstreamBufferMetadata() = default;
+
VideoEncodeAccelerator::~VideoEncodeAccelerator() = default;
VideoEncodeAccelerator::SupportedProfile::SupportedProfile()
@@ -25,9 +43,9 @@ void VideoEncodeAccelerator::Flush(FlushCallback flush_callback) {
}
void VideoEncodeAccelerator::RequestEncodingParametersChange(
- const VideoBitrateAllocation& bitrate,
+ const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate) {
- RequestEncodingParametersChange(bitrate.GetSumBps(), framerate);
+ RequestEncodingParametersChange(bitrate_allocation.GetSumBps(), framerate);
}
} // namespace media
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index ddc1d7736de..092b4ce8bd8 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -26,6 +26,43 @@ namespace media {
class BitstreamBuffer;
class VideoFrame;
+// Metadata for a VP8 bitstream buffer.
+// |non_reference| is true iff this frame does not update any reference buffer,
+// meaning dropping this frame still results in a decodable
+// stream.
+// |temporal_idx| indicates the temporal index for this frame.
+// |layer_sync| if true iff this frame has |temporal_idx| > 0 and does NOT
+// reference any reference buffer containing a frame with
+// temporal_idx > 0.
+struct MEDIA_EXPORT Vp8Metadata final {
+ Vp8Metadata();
+ Vp8Metadata(const Vp8Metadata& other);
+ Vp8Metadata(Vp8Metadata&& other);
+ ~Vp8Metadata();
+ bool non_reference;
+ uint8_t temporal_idx;
+ bool layer_sync;
+};
+
+// Metadata associated with a bitstream buffer.
+// |payload_size| is the byte size of the used portion of the buffer.
+// |key_frame| is true if this delivered frame is a keyframe.
+// |timestamp| is the same timestamp as in VideoFrame passed to Encode().
+// |vp8|, if set, contains metadata specific to VP8. See above.
+struct MEDIA_EXPORT BitstreamBufferMetadata final {
+ BitstreamBufferMetadata();
+ BitstreamBufferMetadata(BitstreamBufferMetadata&& other);
+ BitstreamBufferMetadata(size_t payload_size_bytes,
+ bool key_frame,
+ base::TimeDelta timestamp);
+ ~BitstreamBufferMetadata();
+
+ size_t payload_size_bytes;
+ bool key_frame;
+ base::TimeDelta timestamp;
+ base::Optional<Vp8Metadata> vp8;
+};
+
// Video encoder interface.
class MEDIA_EXPORT VideoEncodeAccelerator {
public:
@@ -82,13 +119,10 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// is transferred back to the VEA::Client once this callback is made.
// Parameters:
// |bitstream_buffer_id| is the id of the buffer that is ready.
- // |payload_size| is the byte size of the used portion of the buffer.
- // |key_frame| is true if this delivered frame is a keyframe.
- // |timestamp| is the same timestamp as in VideoFrame passed to Encode().
- virtual void BitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame,
- base::TimeDelta timestamp) = 0;
+ // |metadata| contains data such as payload size and timestamp. See above.
+ virtual void BitstreamBufferReady(
+ int32_t bitstream_buffer_id,
+ const BitstreamBufferMetadata& metadata) = 0;
// Error notification callback. Note that errors in Initialize() will not be
// reported here, but will instead be indicated by a false return value