summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn39
-rw-r--r--chromium/media/DEPS6
-rw-r--r--chromium/media/README.md30
-rw-r--r--chromium/media/audio/BUILD.gn25
-rw-r--r--chromium/media/audio/alive_checker.cc4
-rw-r--r--chromium/media/audio/alive_checker.h1
-rw-r--r--chromium/media/audio/alive_checker_unittest.cc1
-rw-r--r--chromium/media/audio/alsa/alsa_input.cc36
-rw-r--r--chromium/media/audio/alsa/alsa_input.h1
-rw-r--r--chromium/media/audio/alsa/alsa_output.cc20
-rw-r--r--chromium/media/audio/alsa/alsa_output_unittest.cc24
-rw-r--r--chromium/media/audio/alsa/alsa_util.cc19
-rw-r--r--chromium/media/audio/alsa/alsa_util.h2
-rw-r--r--chromium/media/audio/alsa/audio_manager_alsa.cc10
-rw-r--r--chromium/media/audio/android/audio_android_unittest.cc38
-rw-r--r--chromium/media/audio/android/audio_manager_android.cc7
-rw-r--r--chromium/media/audio/android/audio_record_input.cc23
-rw-r--r--chromium/media/audio/android/audio_record_input.h1
-rw-r--r--chromium/media/audio/android/opensles_input.cc20
-rw-r--r--chromium/media/audio/android/opensles_input.h1
-rw-r--r--chromium/media/audio/android/opensles_output.cc45
-rw-r--r--chromium/media/audio/android/opensles_output.h5
-rw-r--r--chromium/media/audio/audio_debug_file_writer_unittest.cc1
-rw-r--r--chromium/media/audio/audio_debug_recording_helper_unittest.cc4
-rw-r--r--chromium/media/audio/audio_device_description.cc24
-rw-r--r--chromium/media/audio/audio_device_description.h8
-rw-r--r--chromium/media/audio/audio_device_name.cc4
-rw-r--r--chromium/media/audio/audio_device_name.h4
-rw-r--r--chromium/media/audio/audio_input_controller.cc22
-rw-r--r--chromium/media/audio/audio_input_controller.h21
-rw-r--r--chromium/media/audio/audio_input_controller_unittest.cc45
-rw-r--r--chromium/media/audio/audio_input_delegate.h6
-rw-r--r--chromium/media/audio/audio_input_device.cc210
-rw-r--r--chromium/media/audio/audio_input_device.h104
-rw-r--r--chromium/media/audio/audio_input_device_unittest.cc50
-rw-r--r--chromium/media/audio/audio_input_ipc.h15
-rw-r--r--chromium/media/audio/audio_input_stream_data_interceptor.cc6
-rw-r--r--chromium/media/audio/audio_input_stream_data_interceptor.h1
-rw-r--r--chromium/media/audio/audio_input_stream_data_interceptor_unittest.cc7
-rw-r--r--chromium/media/audio/audio_input_sync_writer.cc36
-rw-r--r--chromium/media/audio/audio_input_sync_writer.h16
-rw-r--r--chromium/media/audio/audio_input_sync_writer_unittest.cc13
-rw-r--r--chromium/media/audio/audio_io.h5
-rw-r--r--chromium/media/audio/audio_low_latency_input_output_unittest.cc24
-rw-r--r--chromium/media/audio/audio_manager.cc3
-rw-r--r--chromium/media/audio/audio_manager.h1
-rw-r--r--chromium/media/audio/audio_manager_base.cc81
-rw-r--r--chromium/media/audio/audio_manager_base.h10
-rw-r--r--chromium/media/audio/audio_manager_unittest.cc5
-rw-r--r--chromium/media/audio/audio_output_controller_unittest.cc5
-rw-r--r--chromium/media/audio/audio_output_delegate.h4
-rw-r--r--chromium/media/audio/audio_output_device.cc205
-rw-r--r--chromium/media/audio/audio_output_device.h52
-rw-r--r--chromium/media/audio/audio_output_device_unittest.cc459
-rw-r--r--chromium/media/audio/audio_output_ipc.h23
-rw-r--r--chromium/media/audio/audio_output_proxy.cc1
-rw-r--r--chromium/media/audio/audio_output_proxy_unittest.cc13
-rw-r--r--chromium/media/audio/audio_output_resampler.cc20
-rw-r--r--chromium/media/audio/audio_sync_reader.cc44
-rw-r--r--chromium/media/audio/audio_sync_reader.h22
-rw-r--r--chromium/media/audio/audio_sync_reader_unittest.cc15
-rw-r--r--chromium/media/audio/audio_system.cc29
-rw-r--r--chromium/media/audio/audio_system.h8
-rw-r--r--chromium/media/audio/audio_system_impl.cc12
-rw-r--r--chromium/media/audio/audio_system_impl.h4
-rw-r--r--chromium/media/audio/audio_system_test_util.h7
-rw-r--r--chromium/media/audio/audio_thread_impl.cc1
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.cc21
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.h2
-rw-r--r--chromium/media/audio/cras/cras_input.cc20
-rw-r--r--chromium/media/audio/cras/cras_input.h1
-rw-r--r--chromium/media/audio/cras/cras_input_unittest.cc19
-rw-r--r--chromium/media/audio/cras/cras_unified.cc14
-rw-r--r--chromium/media/audio/cras/cras_unified_unittest.cc17
-rw-r--r--chromium/media/audio/fake_audio_input_stream.cc5
-rw-r--r--chromium/media/audio/fake_audio_input_stream.h1
-rw-r--r--chromium/media/audio/fake_audio_manager.cc10
-rw-r--r--chromium/media/audio/fuchsia/audio_manager_fuchsia.cc2
-rw-r--r--chromium/media/audio/mac/audio_input_mac.cc25
-rw-r--r--chromium/media/audio/mac/audio_input_mac.h1
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac.cc171
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac.h24
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc4
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.cc51
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.h6
-rw-r--r--chromium/media/audio/mac/core_audio_util_mac.cc5
-rw-r--r--chromium/media/audio/mac/coreaudio_dispatch_override.cc2
-rw-r--r--chromium/media/audio/mock_audio_debug_recording_manager.h1
-rw-r--r--chromium/media/audio/mock_audio_source_callback.h2
-rw-r--r--chromium/media/audio/null_audio_sink.cc1
-rw-r--r--chromium/media/audio/null_audio_sink.h4
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.cc13
-rw-r--r--chromium/media/audio/pulse/pulse_input.cc18
-rw-r--r--chromium/media/audio/pulse/pulse_input.h1
-rw-r--r--chromium/media/audio/pulse/pulse_output.cc11
-rw-r--r--chromium/media/audio/pulse/pulse_output.h2
-rw-r--r--chromium/media/audio/pulse/pulse_util.cc32
-rw-r--r--chromium/media/audio/pulse/pulse_util.h5
-rw-r--r--chromium/media/audio/scoped_task_runner_observer.cc2
-rw-r--r--chromium/media/audio/scoped_task_runner_observer.h4
-rw-r--r--chromium/media/audio/simple_sources.cc17
-rw-r--r--chromium/media/audio/simple_sources_unittest.cc14
-rw-r--r--chromium/media/audio/sounds/audio_stream_handler.cc10
-rw-r--r--chromium/media/audio/virtual_audio_input_stream.cc5
-rw-r--r--chromium/media/audio/virtual_audio_input_stream.h1
-rw-r--r--chromium/media/audio/virtual_audio_input_stream_unittest.cc8
-rw-r--r--chromium/media/audio/virtual_audio_output_stream_unittest.cc8
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc761
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.h91
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win_unittest.cc90
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win.cc23
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win_unittest.cc19
-rw-r--r--chromium/media/audio/win/audio_manager_win.cc36
-rw-r--r--chromium/media/audio/win/audio_output_win_unittest.cc61
-rw-r--r--chromium/media/audio/win/core_audio_util_win.cc78
-rw-r--r--chromium/media/audio/win/core_audio_util_win.h17
-rw-r--r--chromium/media/audio/win/waveout_output_win.cc8
-rw-r--r--chromium/media/base/BUILD.gn19
-rw-r--r--chromium/media/base/android/BUILD.gn2
-rw-r--r--chromium/media/base/android/android_cdm_factory.cc91
-rw-r--r--chromium/media/base/android/android_cdm_factory.h22
-rw-r--r--chromium/media/base/android/media_codec_loop_unittest.cc2
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc113
-rw-r--r--chromium/media/base/android/media_drm_bridge.h47
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.cc136
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.h82
-rw-r--r--chromium/media/base/android/mock_media_crypto_context.cc8
-rw-r--r--chromium/media/base/android/mock_media_crypto_context.h3
-rw-r--r--chromium/media/base/audio_buffer.cc1
-rw-r--r--chromium/media/base/audio_buffer_converter.cc1
-rw-r--r--chromium/media/base/audio_buffer_converter_unittest.cc4
-rw-r--r--chromium/media/base/audio_bus.cc1
-rw-r--r--chromium/media/base/audio_bus.h1
-rw-r--r--chromium/media/base/audio_bus_unittest.cc12
-rw-r--r--chromium/media/base/audio_capturer_source.h16
-rw-r--r--chromium/media/base/audio_codecs.cc4
-rw-r--r--chromium/media/base/audio_codecs.h3
-rw-r--r--chromium/media/base/audio_converter_perftest.cc13
-rw-r--r--chromium/media/base/audio_converter_unittest.cc13
-rw-r--r--chromium/media/base/audio_decoder_config.cc15
-rw-r--r--chromium/media/base/audio_decoder_config.h33
-rw-r--r--chromium/media/base/audio_parameters.cc26
-rw-r--r--chromium/media/base/audio_parameters.h23
-rw-r--r--chromium/media/base/audio_parameters_unittest.cc150
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.cc24
-rw-r--r--chromium/media/base/audio_renderer_mixer_input.h4
-rw-r--r--chromium/media/base/audio_renderer_mixer_input_unittest.cc61
-rw-r--r--chromium/media/base/audio_renderer_mixer_pool.h8
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc28
-rw-r--r--chromium/media/base/audio_renderer_sink.h2
-rw-r--r--chromium/media/base/bind_to_current_loop_unittest.cc9
-rw-r--r--chromium/media/base/bitstream_buffer.cc26
-rw-r--r--chromium/media/base/bitstream_buffer.h25
-rw-r--r--chromium/media/base/callback_registry.h105
-rw-r--r--chromium/media/base/callback_registry_unittest.cc175
-rw-r--r--chromium/media/base/cdm_context.cc11
-rw-r--r--chromium/media/base/cdm_context.h40
-rw-r--r--chromium/media/base/channel_layout.cc6
-rw-r--r--chromium/media/base/channel_layout.h7
-rw-r--r--chromium/media/base/channel_mixer.cc16
-rw-r--r--chromium/media/base/channel_mixer.h8
-rw-r--r--chromium/media/base/channel_mixer_unittest.cc11
-rw-r--r--chromium/media/base/channel_mixing_matrix_unittest.cc6
-rw-r--r--chromium/media/base/decode_capabilities.cc3
-rw-r--r--chromium/media/base/decoder_buffer.cc18
-rw-r--r--chromium/media/base/decoder_buffer.h27
-rw-r--r--chromium/media/base/decoder_buffer_unittest.cc59
-rw-r--r--chromium/media/base/decoder_factory.cc2
-rw-r--r--chromium/media/base/decoder_factory.h8
-rw-r--r--chromium/media/base/decrypt_config.cc80
-rw-r--r--chromium/media/base/decrypt_config.h49
-rw-r--r--chromium/media/base/decrypt_config_unittest.cc203
-rw-r--r--chromium/media/base/demuxer.h20
-rw-r--r--chromium/media/base/eme_constants.h101
-rw-r--r--chromium/media/base/encryption_pattern.cc23
-rw-r--r--chromium/media/base/encryption_pattern.h12
-rw-r--r--chromium/media/base/encryption_scheme.cc28
-rw-r--r--chromium/media/base/encryption_scheme.h7
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.cc1
-rw-r--r--chromium/media/base/fake_audio_worker_unittest.cc1
-rw-r--r--chromium/media/base/fake_demuxer_stream.cc8
-rw-r--r--chromium/media/base/fake_demuxer_stream.h1
-rw-r--r--chromium/media/base/fake_text_track_stream.h5
-rw-r--r--chromium/media/base/fallback_video_decoder.cc102
-rw-r--r--chromium/media/base/fallback_video_decoder.h63
-rw-r--r--chromium/media/base/fallback_video_decoder_unittest.cc160
-rw-r--r--chromium/media/base/ipc/media_param_traits.cc6
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h4
-rw-r--r--chromium/media/base/key_system_properties.h5
-rw-r--r--chromium/media/base/key_systems.cc130
-rw-r--r--chromium/media/base/key_systems.h6
-rw-r--r--chromium/media/base/key_systems_unittest.cc37
-rw-r--r--chromium/media/base/keyboard_event_counter.cc9
-rw-r--r--chromium/media/base/keyboard_event_counter.h6
-rw-r--r--chromium/media/base/keyboard_event_counter_unittest.cc1
-rw-r--r--chromium/media/base/media_log.cc6
-rw-r--r--chromium/media/base/media_log.h16
-rw-r--r--chromium/media/base/media_log_event.h8
-rw-r--r--chromium/media/base/media_permission.h10
-rw-r--r--chromium/media/base/media_resource.h12
-rw-r--r--chromium/media/base/media_switches.cc49
-rw-r--r--chromium/media/base/media_switches.h8
-rw-r--r--chromium/media/base/media_url_demuxer.cc24
-rw-r--r--chromium/media/base/media_url_demuxer.h10
-rw-r--r--chromium/media/base/mime_util_internal.cc19
-rw-r--r--chromium/media/base/mime_util_internal.h3
-rw-r--r--chromium/media/base/mime_util_unittest.cc8
-rw-r--r--chromium/media/base/mock_audio_renderer_sink.cc2
-rw-r--r--chromium/media/base/mock_audio_renderer_sink.h5
-rw-r--r--chromium/media/base/mock_demuxer_host.h2
-rw-r--r--chromium/media/base/mock_filters.cc39
-rw-r--r--chromium/media/base/mock_filters.h121
-rw-r--r--chromium/media/base/mock_media_log.h11
-rw-r--r--chromium/media/base/pipeline.h33
-rw-r--r--chromium/media/base/pipeline_impl.cc118
-rw-r--r--chromium/media/base/pipeline_impl.h16
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc2
-rw-r--r--chromium/media/base/renderer.cc14
-rw-r--r--chromium/media/base/renderer.h13
-rw-r--r--chromium/media/base/renderer_factory_selector.cc9
-rw-r--r--chromium/media/base/renderer_factory_selector.h10
-rw-r--r--chromium/media/base/sample_format.cc10
-rw-r--r--chromium/media/base/sample_format.h37
-rw-r--r--chromium/media/base/silent_sink_suspender_unittest.cc1
-rw-r--r--chromium/media/base/subsample_entry.cc29
-rw-r--r--chromium/media/base/subsample_entry.h11
-rw-r--r--chromium/media/base/subsample_entry_unittest.cc38
-rw-r--r--chromium/media/base/test_data_util.cc2
-rw-r--r--chromium/media/base/test_helpers.cc31
-rw-r--r--chromium/media/base/test_helpers.h9
-rw-r--r--chromium/media/base/text_renderer_unittest.cc71
-rw-r--r--chromium/media/base/text_track.h4
-rw-r--r--chromium/media/base/unaligned_shared_memory.cc60
-rw-r--r--chromium/media/base/unaligned_shared_memory.h37
-rw-r--r--chromium/media/base/unaligned_shared_memory_unittest.cc86
-rw-r--r--chromium/media/base/user_input_monitor.cc83
-rw-r--r--chromium/media/base/user_input_monitor.h57
-rw-r--r--chromium/media/base/user_input_monitor_linux.cc56
-rw-r--r--chromium/media/base/user_input_monitor_mac.cc47
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc52
-rw-r--r--chromium/media/base/user_input_monitor_win.cc59
-rw-r--r--chromium/media/base/video_bitrate_allocation.cc54
-rw-r--r--chromium/media/base/video_bitrate_allocation.h43
-rw-r--r--chromium/media/base/video_bitrate_allocation_unittest.cc70
-rw-r--r--chromium/media/base/video_decoder_config.cc16
-rw-r--r--chromium/media/base/video_decoder_config.h5
-rw-r--r--chromium/media/base/video_facing.h2
-rw-r--r--chromium/media/base/video_frame.cc2
-rw-r--r--chromium/media/base/video_frame_metadata.h15
-rw-r--r--chromium/media/base/video_util.cc43
-rw-r--r--chromium/media/base/video_util.h29
-rw-r--r--chromium/media/base/video_util_unittest.cc62
-rw-r--r--chromium/media/base/watch_time_keys.cc85
-rw-r--r--chromium/media/base/watch_time_keys.h17
-rw-r--r--chromium/media/blink/BUILD.gn7
-rw-r--r--chromium/media/blink/DEPS2
-rw-r--r--chromium/media/blink/key_system_config_selector.cc23
-rw-r--r--chromium/media/blink/key_system_config_selector.h2
-rw-r--r--chromium/media/blink/key_system_config_selector_unittest.cc16
-rw-r--r--chromium/media/blink/mock_webassociatedurlloader.h2
-rw-r--r--chromium/media/blink/multibuffer_data_source.cc13
-rw-r--r--chromium/media/blink/multibuffer_data_source_unittest.cc53
-rw-r--r--chromium/media/blink/multibuffer_unittest.cc4
-rw-r--r--chromium/media/blink/remote_playback_client_wrapper_impl.cc30
-rw-r--r--chromium/media/blink/remote_playback_client_wrapper_impl.h36
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider.cc2
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider.h2
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider_unittest.cc2
-rw-r--r--chromium/media/blink/run_all_unittests.cc85
-rw-r--r--chromium/media/blink/texttrack_impl.cc23
-rw-r--r--chromium/media/blink/texttrack_impl.h8
-rw-r--r--chromium/media/blink/url_index_unittest.cc3
-rw-r--r--chromium/media/blink/video_decode_stats_reporter.h1
-rw-r--r--chromium/media/blink/video_decode_stats_reporter_unittest.cc26
-rw-r--r--chromium/media/blink/video_frame_compositor.cc9
-rw-r--r--chromium/media/blink/video_frame_compositor.h6
-rw-r--r--chromium/media/blink/video_frame_compositor_unittest.cc16
-rw-r--r--chromium/media/blink/watch_time_reporter.cc151
-rw-r--r--chromium/media/blink/watch_time_reporter.h17
-rw-r--r--chromium/media/blink/watch_time_reporter_unittest.cc491
-rw-r--r--chromium/media/blink/webaudiosourceprovider_impl.cc70
-rw-r--r--chromium/media/blink/webaudiosourceprovider_impl.h10
-rw-r--r--chromium/media/blink/webaudiosourceprovider_impl_unittest.cc3
-rw-r--r--chromium/media/blink/webcontentdecryptionmodulesession_impl.cc11
-rw-r--r--chromium/media/blink/webencryptedmediaclient_impl.cc1
-rw-r--r--chromium/media/blink/webmediaplayer_delegate.h41
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc230
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h42
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc269
-rw-r--r--chromium/media/blink/webmediaplayer_params.cc8
-rw-r--r--chromium/media/blink/webmediaplayer_params.h25
-rw-r--r--chromium/media/capture/BUILD.gn20
-rw-r--r--chromium/media/capture/content/OWNERS2
-rw-r--r--chromium/media/capture/content/screen_capture_device_core.cc3
-rw-r--r--chromium/media/capture/content/thread_safe_capture_oracle.cc11
-rw-r--r--chromium/media/capture/ipc/capture_param_traits.cc4
-rw-r--r--chromium/media/capture/ipc/capture_param_traits_macros.h3
-rw-r--r--chromium/media/capture/mojom/BUILD.gn2
-rw-r--r--chromium/media/capture/mojom/video_capture.mojom18
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom53
-rw-r--r--chromium/media/capture/mojom/video_capture_types.typemap2
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc170
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.h23
-rw-r--r--chromium/media/capture/video/android/BUILD.gn1
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.cc10
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc19
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc327
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.h83
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller_unittest.cc502
-rw-r--r--chromium/media/capture/video/chromeos/camera_buffer_factory.cc9
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.cc13
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.h10
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc249
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h30
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc102
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc55
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h13
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc7
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc38
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h4
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc1
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.cc58
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.h15
-rw-r--r--chromium/media/capture/video/chromeos/display_rotation_observer.cc11
-rw-r--r--chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc5
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.cc8
-rw-r--r--chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.cc53
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.cc16
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.h11
-rw-r--r--chromium/media/capture/video/chromeos/pixel_format_utils.cc5
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc671
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h207
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager_unittest.cc290
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc4
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h3
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h1
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc4
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.h3
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_unittest.cc11
-rw-r--r--chromium/media/capture/video/file_video_capture_device_unittest.cc21
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc17
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc128
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.h (renamed from chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h)38
-rw-r--r--chromium/media/capture/video/mock_video_frame_receiver.h6
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.cc7
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.h4
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool.h4
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc21
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.h5
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker.h15
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory.h3
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc3
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h3
-rw-r--r--chromium/media/capture/video/video_capture_device.h18
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc170
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h10
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc97
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.cc6
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc40
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.cc13
-rw-r--r--chromium/media/capture/video/video_frame_receiver.h11
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.cc9
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.h6
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc338
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h28
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc46
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc19
-rw-r--r--chromium/media/capture/video_capture_types.cc34
-rw-r--r--chromium/media/capture/video_capture_types.h13
-rw-r--r--chromium/media/cast/OWNERS2
-rw-r--r--chromium/media/cast/cast_sender_impl.cc1
-rw-r--r--chromium/media/cast/net/cast_transport_config.h5
-rw-r--r--chromium/media/cast/net/pacing/mock_paced_packet_sender.h2
-rw-r--r--chromium/media/cast/net/pacing/paced_sender.cc1
-rw-r--r--chromium/media/cast/net/rtp/mock_rtp_payload_feedback.h2
-rw-r--r--chromium/media/cast/net/udp_transport_impl.cc1
-rw-r--r--chromium/media/cast/receiver/audio_decoder_unittest.cc4
-rw-r--r--chromium/media/cast/receiver/cast_receiver_impl.cc1
-rw-r--r--chromium/media/cast/receiver/frame_receiver.cc1
-rw-r--r--chromium/media/cast/sender/audio_encoder.cc3
-rw-r--r--chromium/media/cast/sender/audio_sender.cc1
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc1
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder_unittest.cc3
-rw-r--r--chromium/media/cast/sender/video_encoder_impl.cc1
-rw-r--r--chromium/media/cdm/BUILD.gn15
-rw-r--r--chromium/media/cdm/DEPS1
-rw-r--r--chromium/media/cdm/aes_cbc_crypto.cc99
-rw-r--r--chromium/media/cdm/aes_cbc_crypto.h53
-rw-r--r--chromium/media/cdm/aes_cbc_crypto_unittest.cc208
-rw-r--r--chromium/media/cdm/aes_decryptor.cc176
-rw-r--r--chromium/media/cdm/aes_decryptor.h4
-rw-r--r--chromium/media/cdm/aes_decryptor_unittest.cc94
-rw-r--r--chromium/media/cdm/api/README3
-rw-r--r--chromium/media/cdm/api/README.md24
-rw-r--r--chromium/media/cdm/api/content_decryption_module.h417
-rw-r--r--chromium/media/cdm/cbcs_decryptor.cc184
-rw-r--r--chromium/media/cdm/cbcs_decryptor.h55
-rw-r--r--chromium/media/cdm/cbcs_decryptor_fuzzer.cc75
-rw-r--r--chromium/media/cdm/cbcs_decryptor_unittest.cc408
-rw-r--r--chromium/media/cdm/cdm_adapter.cc205
-rw-r--r--chromium/media/cdm/cdm_adapter.h49
-rw-r--r--chromium/media/cdm/cdm_adapter_factory.cc14
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc378
-rw-r--r--chromium/media/cdm/cdm_wrapper.h188
-rw-r--r--chromium/media/cdm/cenc_decryptor.cc152
-rw-r--r--chromium/media/cdm/cenc_decryptor.h46
-rw-r--r--chromium/media/cdm/cenc_decryptor_fuzzer.cc66
-rw-r--r--chromium/media/cdm/cenc_decryptor_unittest.cc269
-rw-r--r--chromium/media/cdm/external_clear_key_test_helper.cc2
-rw-r--r--chromium/media/cdm/library_cdm/BUILD.gn37
-rw-r--r--chromium/media/cdm/library_cdm/cdm_host_proxy.h (renamed from chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h)6
-rw-r--r--chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h (renamed from chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy_impl.h)19
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn12
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.cc2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc113
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h6
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc24
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h13
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.cc2
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.cc13
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.h2
-rw-r--r--chromium/media/cdm/library_cdm/mock_library_cdm.cc100
-rw-r--r--chromium/media/cdm/library_cdm/mock_library_cdm.h148
-rw-r--r--chromium/media/cdm/mock_helpers.cc6
-rw-r--r--chromium/media/cdm/mock_helpers.h2
-rw-r--r--chromium/media/cdm/simple_cdm_allocator_unittest.cc10
-rw-r--r--chromium/media/cdm/supported_cdm_versions.cc68
-rw-r--r--chromium/media/cdm/supported_cdm_versions.h146
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.h8
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.mm10
-rw-r--r--chromium/media/device_monitors/device_monitor_udev.cc2
-rw-r--r--chromium/media/device_monitors/device_monitor_udev.h4
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common.h5
-rw-r--r--chromium/media/filters/BUILD.gn14
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc16
-rw-r--r--chromium/media/filters/aom_video_decoder.cc13
-rw-r--r--chromium/media/filters/aom_video_decoder_unittest.cc3
-rw-r--r--chromium/media/filters/audio_decoder_selector_unittest.cc5
-rw-r--r--chromium/media/filters/audio_decoder_unittest.cc3
-rw-r--r--chromium/media/filters/audio_file_reader_unittest.cc2
-rw-r--r--chromium/media/filters/audio_renderer_algorithm_unittest.cc5
-rw-r--r--chromium/media/filters/audio_timestamp_validator.cc10
-rw-r--r--chromium/media/filters/audio_timestamp_validator.h3
-rw-r--r--chromium/media/filters/audio_timestamp_validator_unittest.cc10
-rw-r--r--chromium/media/filters/audio_video_metadata_extractor_unittest.cc2
-rw-r--r--chromium/media/filters/blocking_url_protocol_unittest.cc4
-rw-r--r--chromium/media/filters/chunk_demuxer.cc90
-rw-r--r--chromium/media/filters/chunk_demuxer.h21
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc63
-rw-r--r--chromium/media/filters/decoder_selector.cc8
-rw-r--r--chromium/media/filters/decoder_stream.cc31
-rw-r--r--chromium/media/filters/decoder_stream.h5
-rw-r--r--chromium/media/filters/decoder_stream_traits.cc29
-rw-r--r--chromium/media/filters/decoder_stream_traits.h11
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc8
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.cc18
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream_unittest.cc22
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc8
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc5
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.cc16
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc169
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.h21
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc157
-rw-r--r--chromium/media/filters/ffmpeg_glue_unittest.cc10
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.cc3
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc7
-rw-r--r--chromium/media/filters/file_data_source_unittest.cc2
-rw-r--r--chromium/media/filters/gpu_video_decoder.cc76
-rw-r--r--chromium/media/filters/h264_bitstream_buffer.cc4
-rw-r--r--chromium/media/filters/h264_bitstream_buffer.h17
-rw-r--r--chromium/media/filters/h264_bitstream_buffer_unittest.cc10
-rw-r--r--chromium/media/filters/jpeg_parser_unittest.cc4
-rw-r--r--chromium/media/filters/media_file_checker_unittest.cc2
-rw-r--r--chromium/media/filters/offloading_video_decoder_unittest.cc13
-rw-r--r--chromium/media/filters/pipeline_controller.cc64
-rw-r--r--chromium/media/filters/pipeline_controller.h25
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc73
-rw-r--r--chromium/media/filters/stream_parser_factory.cc26
-rw-r--r--chromium/media/filters/video_decoder_selector_unittest.cc4
-rw-r--r--chromium/media/filters/video_frame_stream_unittest.cc1
-rw-r--r--chromium/media/filters/vp8_parser.cc2
-rw-r--r--chromium/media/filters/vp8_parser.h13
-rw-r--r--chromium/media/filters/vpx_video_decoder.cc1
-rw-r--r--chromium/media/filters/vpx_video_decoder_fuzzertest.cc4
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc3
-rw-r--r--chromium/media/formats/BUILD.gn22
-rw-r--r--chromium/media/formats/ac3/ac3_util.cc10
-rw-r--r--chromium/media/formats/mp2t/es_parser_adts.cc10
-rw-r--r--chromium/media/formats/mp2t/es_parser_h264.cc28
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.cc36
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.h7
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc3
-rw-r--r--chromium/media/formats/mp2t/ts_section_cets_ecm.cc9
-rw-r--r--chromium/media/formats/mp2t/ts_section_cets_ecm.h20
-rw-r--r--chromium/media/formats/mp4/box_definitions.cc25
-rw-r--r--chromium/media/formats/mp4/box_definitions.h8
-rw-r--r--chromium/media/formats/mp4/box_reader.cc9
-rw-r--r--chromium/media/formats/mp4/box_reader.h12
-rw-r--r--chromium/media/formats/mp4/fourccs.h5
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.cc103
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.h7
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser_unittest.cc60
-rw-r--r--chromium/media/formats/mp4/track_run_iterator.cc87
-rw-r--r--chromium/media/formats/mp4/track_run_iterator.h1
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc1
-rw-r--r--chromium/media/formats/webm/webm_crypto_helpers.cc15
-rw-r--r--chromium/media/formats/webm/webm_crypto_helpers.h10
-rw-r--r--chromium/media/formats/webm/webm_crypto_helpers_unittest.cc10
-rw-r--r--chromium/media/formats/webm/webm_parser_unittest.cc2
-rw-r--r--chromium/media/formats/webm/webm_stream_parser_unittest.cc32
-rw-r--r--chromium/media/formats/webm/webm_video_client.cc2
-rw-r--r--chromium/media/gpu/BUILD.gn73
-rw-r--r--chromium/media/gpu/DEPS3
-rw-r--r--chromium/media/gpu/accelerated_video_decoder.h12
-rw-r--r--chromium/media/gpu/android/android_image_reader_abi.h95
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat.cc138
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat.h75
-rw-r--r--chromium/media/gpu/android/android_image_reader_compat_unittest.cc47
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator.cc215
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator.h8
-rw-r--r--chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc120
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser.h13
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.cc63
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.h12
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc133
-rw-r--r--chromium/media/gpu/android/avda_codec_allocator.h1
-rw-r--r--chromium/media/gpu/android/avda_codec_image.cc41
-rw-r--r--chromium/media/gpu/android/avda_codec_image.h25
-rw-r--r--chromium/media/gpu/android/avda_picture_buffer_manager.cc40
-rw-r--r--chromium/media/gpu/android/avda_picture_buffer_manager.h8
-rw-r--r--chromium/media/gpu/android/avda_shared_state.cc21
-rw-r--r--chromium/media/gpu/android/avda_shared_state.h38
-rw-r--r--chromium/media/gpu/android/avda_surface_bundle.cc23
-rw-r--r--chromium/media/gpu/android/avda_surface_bundle.h13
-rw-r--r--chromium/media/gpu/android/codec_image.cc71
-rw-r--r--chromium/media/gpu/android/codec_image.h31
-rw-r--r--chromium/media/gpu/android/codec_image_group_unittest.cc1
-rw-r--r--chromium/media/gpu/android/codec_image_unittest.cc90
-rw-r--r--chromium/media/gpu/android/codec_wrapper.cc5
-rw-r--r--chromium/media/gpu/android/codec_wrapper_unittest.cc1
-rw-r--r--chromium/media/gpu/android/command_buffer_stub_wrapper.h39
-rw-r--r--chromium/media/gpu/android/command_buffer_stub_wrapper_impl.cc30
-rw-r--r--chromium/media/gpu/android/command_buffer_stub_wrapper_impl.h31
-rw-r--r--chromium/media/gpu/android/fake_codec_allocator.cc8
-rw-r--r--chromium/media/gpu/android/fake_codec_allocator.h15
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc118
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h10
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc376
-rw-r--r--chromium/media/gpu/android/mock_android_video_surface_chooser.cc8
-rw-r--r--chromium/media/gpu/android/mock_android_video_surface_chooser.h8
-rw-r--r--chromium/media/gpu/android/mock_command_buffer_stub_wrapper.cc30
-rw-r--r--chromium/media/gpu/android/mock_command_buffer_stub_wrapper.h37
-rw-r--r--chromium/media/gpu/android/mock_texture_owner.cc (renamed from chromium/media/gpu/android/mock_surface_texture_gl_owner.cc)23
-rw-r--r--chromium/media/gpu/android/mock_texture_owner.h (renamed from chromium/media/gpu/android/mock_surface_texture_gl_owner.h)18
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper.cc8
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper.h12
-rw-r--r--chromium/media/gpu/android/surface_chooser_helper_unittest.cc13
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.cc41
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.h84
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc4
-rw-r--r--chromium/media/gpu/android/texture_owner.cc18
-rw-r--r--chromium/media/gpu/android/texture_owner.h80
-rw-r--r--chromium/media/gpu/android/texture_pool.cc87
-rw-r--r--chromium/media/gpu/android/texture_pool.h30
-rw-r--r--chromium/media/gpu/android/texture_pool_unittest.cc110
-rw-r--r--chromium/media/gpu/android/video_frame_factory.h10
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc56
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.h17
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl_unittest.cc30
-rw-r--r--chromium/media/gpu/codec_picture.cc12
-rw-r--r--chromium/media/gpu/codec_picture.h14
-rw-r--r--chromium/media/gpu/command_buffer_helper.cc241
-rw-r--r--chromium/media/gpu/command_buffer_helper.h135
-rw-r--r--chromium/media/gpu/fake_command_buffer_helper.cc141
-rw-r--r--chromium/media/gpu/fake_command_buffer_helper.h80
-rw-r--r--chromium/media/gpu/gles2_decoder_helper.cc113
-rw-r--r--chromium/media/gpu/gles2_decoder_helper.h18
-rw-r--r--chromium/media/gpu/gpu_jpeg_decode_accelerator_factory.cc1
-rw-r--r--chromium/media/gpu/gpu_jpeg_encode_accelerator_factory.cc1
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.cc30
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.h26
-rw-r--r--chromium/media/gpu/gpu_video_encode_accelerator_factory.cc6
-rw-r--r--chromium/media/gpu/h264_decoder.cc18
-rw-r--r--chromium/media/gpu/h264_decoder.h5
-rw-r--r--chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc3
-rw-r--r--chromium/media/gpu/ipc/service/BUILD.gn21
-rw-r--r--chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc6
-rw-r--r--chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.h2
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager.cc329
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager.h120
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc201
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc758
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.h228
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc405
-rw-r--r--chromium/media/gpu/jpeg_decode_accelerator_unittest.cc2
-rw-r--r--chromium/media/gpu/jpeg_encode_accelerator_unittest.cc2
-rw-r--r--chromium/media/gpu/rendering_helper.cc559
-rw-r--r--chromium/media/gpu/rendering_helper.h186
-rw-r--r--chromium/media/gpu/shared_memory_region.cc17
-rw-r--r--chromium/media/gpu/shared_memory_region.h10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc58
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc68
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc6
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn6
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.cc50
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.h148
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.cc482
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.h163
-rw-r--r--chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc6
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc20
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc32
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc261
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc1457
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h199
-rw-r--r--chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc13
-rw-r--r--chromium/media/gpu/vaapi/vaapi_vp8_accelerator.h7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc14
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h2
-rw-r--r--chromium/media/gpu/vaapi/vp8_encoder.cc181
-rw-r--r--chromium/media/gpu/vaapi/vp8_encoder.h109
-rw-r--r--chromium/media/gpu/video_accelerator_unittest_helpers.h62
-rw-r--r--chromium/media/gpu/video_decode_accelerator_unittest.cc901
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc109
-rw-r--r--chromium/media/gpu/vp8_decoder.cc103
-rw-r--r--chromium/media/gpu/vp8_decoder.h29
-rw-r--r--chromium/media/gpu/vp8_picture.cc2
-rw-r--r--chromium/media/gpu/vp8_reference_frame_vector.cc102
-rw-r--r--chromium/media/gpu/vp8_reference_frame_vector.h38
-rw-r--r--chromium/media/gpu/vp9_decoder.cc11
-rw-r--r--chromium/media/gpu/vp9_decoder.h5
-rw-r--r--chromium/media/gpu/vt_video_decode_accelerator_mac.cc269
-rw-r--r--chromium/media/gpu/vt_video_decode_accelerator_mac.h24
-rw-r--r--chromium/media/gpu/vt_video_encode_accelerator_mac.cc3
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy.cc18
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.cc71
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.h50
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc76
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h29
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.cc83
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.h22
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc147
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.cc3
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc64
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h13
-rw-r--r--chromium/media/media_options.gni12
-rw-r--r--chromium/media/midi/midi_manager_alsa.cc1
-rw-r--r--chromium/media/midi/midi_manager_mac.cc1
-rw-r--r--chromium/media/midi/midi_manager_unittest.cc8
-rw-r--r--chromium/media/midi/midi_manager_usb.cc1
-rw-r--r--chromium/media/midi/usb_midi_device_factory_android.cc1
-rw-r--r--chromium/media/mojo/DEPS2
-rw-r--r--chromium/media/mojo/clients/BUILD.gn2
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc5
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_unittest.cc61
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.h2
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor_unittest.cc13
-rw-r--r--chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc1
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.cc10
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.h10
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_unittest.cc6
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc96
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h5
-rw-r--r--chromium/media/mojo/common/BUILD.gn1
-rw-r--r--chromium/media/mojo/common/media_type_converters.cc53
-rw-r--r--chromium/media/mojo/common/media_type_converters.h14
-rw-r--r--chromium/media/mojo/common/media_type_converters_unittest.cc53
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc16
-rw-r--r--chromium/media/mojo/interfaces/BUILD.gn1
-rw-r--r--chromium/media/mojo/interfaces/audio_input_stream.mojom15
-rw-r--r--chromium/media/mojo/interfaces/audio_output_stream.mojom47
-rw-r--r--chromium/media/mojo/interfaces/interface_factory.mojom15
-rw-r--r--chromium/media/mojo/interfaces/jpeg_decode_accelerator_typemap_traits.cc5
-rw-r--r--chromium/media/mojo/interfaces/key_system_support.mojom10
-rw-r--r--chromium/media/mojo/interfaces/media_types.mojom6
-rw-r--r--chromium/media/mojo/interfaces/media_types.typemap4
-rw-r--r--chromium/media/mojo/interfaces/video_decoder.mojom4
-rw-r--r--chromium/media/mojo/interfaces/watch_time_recorder.mojom1
-rw-r--r--chromium/media/mojo/services/BUILD.gn11
-rw-r--r--chromium/media/mojo/services/OWNERS3
-rw-r--r--chromium/media/mojo/services/cdm_manifest.json2
-rw-r--r--chromium/media/mojo/services/cdm_service.cc43
-rw-r--r--chromium/media/mojo/services/cdm_service.h8
-rw-r--r--chromium/media/mojo/services/cdm_service_unittest.cc91
-rw-r--r--chromium/media/mojo/services/cdm_service_unittest_manifest.json2
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc26
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.h6
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc128
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.h21
-rw-r--r--chromium/media/mojo/services/media_manifest.json2
-rw-r--r--chromium/media/mojo/services/media_resource_shim.cc4
-rw-r--r--chromium/media/mojo/services/media_resource_shim.h1
-rw-r--r--chromium/media/mojo/services/media_service.h5
-rw-r--r--chromium/media/mojo/services/media_service_factory.cc5
-rw-r--r--chromium/media/mojo/services/media_service_factory.h2
-rw-r--r--chromium/media/mojo/services/media_service_unittest.cc319
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.cc20
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.h4
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_observer.cc31
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_observer.h35
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_observer_unittest.cc64
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc44
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.cc45
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.h23
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.cc38
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.h12
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc38
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc98
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.cc15
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service_context.cc11
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.cc35
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.h27
-rw-r--r--chromium/media/mojo/services/mojo_jpeg_decode_accelerator_service_unittest.cc2
-rw-r--r--chromium/media/mojo/services/mojo_jpeg_encode_accelerator_service.cc9
-rw-r--r--chromium/media/mojo/services/mojo_media_client.cc21
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h32
-rw-r--r--chromium/media/mojo/services/mojo_media_log.cc5
-rw-r--r--chromium/media/mojo/services/mojo_media_log.h6
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.cc29
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.h27
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc67
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.h6
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.cc60
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.h25
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.cc90
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.h3
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc67
-rw-r--r--chromium/media/muxers/webm_muxer.cc6
-rw-r--r--chromium/media/muxers/webm_muxer_fuzzertest.cc2
-rw-r--r--chromium/media/muxers/webm_muxer_unittest.cc8
-rw-r--r--chromium/media/remoting/OWNERS1
-rw-r--r--chromium/media/remoting/courier_renderer.cc1
-rw-r--r--chromium/media/remoting/courier_renderer.h1
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter_unittest.cc4
-rw-r--r--chromium/media/remoting/end2end_test_renderer.cc14
-rw-r--r--chromium/media/remoting/end2end_test_renderer.h8
-rw-r--r--chromium/media/remoting/fake_media_resource.cc5
-rw-r--r--chromium/media/remoting/fake_media_resource.h1
-rw-r--r--chromium/media/remoting/metrics.cc6
-rw-r--r--chromium/media/remoting/proto_enum_utils.cc28
-rw-r--r--chromium/media/remoting/proto_enum_utils.h4
-rw-r--r--chromium/media/remoting/proto_utils.cc36
-rw-r--r--chromium/media/remoting/rpc.proto14
-rw-r--r--chromium/media/remoting/stream_provider.h1
-rw-r--r--chromium/media/renderers/BUILD.gn7
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc15
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc17
-rw-r--r--chromium/media/renderers/default_decoder_factory.cc131
-rw-r--r--chromium/media/renderers/default_decoder_factory.h44
-rw-r--r--chromium/media/renderers/default_renderer_factory.cc100
-rw-r--r--chromium/media/renderers/flinging_renderer_client_factory.cc41
-rw-r--r--chromium/media/renderers/flinging_renderer_client_factory.h55
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc24
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h7
-rw-r--r--chromium/media/renderers/remote_playback_client_wrapper.h22
-rw-r--r--chromium/media/renderers/renderer_impl.cc448
-rw-r--r--chromium/media/renderers/renderer_impl.h54
-rw-r--r--chromium/media/renderers/renderer_impl_unittest.cc484
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc1
-rw-r--r--chromium/media/renderers/video_renderer_impl_unittest.cc11
-rw-r--r--chromium/media/test/BUILD.gn10
-rw-r--r--chromium/media/video/BUILD.gn1
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc220
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc65
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h13
-rw-r--r--chromium/media/video/h264_parser.cc53
-rw-r--r--chromium/media/video/h264_parser.h10
-rw-r--r--chromium/media/video/h264_parser_unittest.cc107
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.cc9
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h14
-rw-r--r--chromium/media/video/mock_video_decode_accelerator.h4
-rw-r--r--chromium/media/video/mock_video_encode_accelerator.h2
-rw-r--r--chromium/media/video/picture.cc2
-rw-r--r--chromium/media/video/picture.h8
-rw-r--r--chromium/media/video/video_decode_accelerator.cc5
-rw-r--r--chromium/media/video/video_decode_accelerator.h13
-rw-r--r--chromium/media/video/video_encode_accelerator.cc6
-rw-r--r--chromium/media/video/video_encode_accelerator.h13
788 files changed, 24704 insertions, 11430 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index a5d98ccdd68..e4debbaa0ed 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -29,11 +29,11 @@ buildflag_header("media_buildflags") {
"ENABLE_LIBRARY_CDMS=$enable_library_cdms",
"ENABLE_LIBVPX=$media_use_libvpx",
"ENABLE_MSE_MPEG2TS_STREAM_PARSER=$enable_mse_mpeg2ts_stream_parser",
+ "ENABLE_MPEG_H_AUDIO_DEMUXING=$enable_mpeg_h_audio_demuxing",
"ENABLE_RUNTIME_MEDIA_RENDERER_SELECTION=$enable_runtime_media_renderer_selection",
"ENABLE_CDM_STORAGE_ID=$enable_cdm_storage_id",
"ENABLE_MEDIA_REMOTING=$enable_media_remoting",
"ENABLE_MEDIA_REMOTING_RPC=$enable_media_remoting_rpc",
- "ENABLE_WEBRTC=$enable_webrtc",
"USE_PROPRIETARY_CODECS=$proprietary_codecs",
]
}
@@ -124,6 +124,7 @@ source_set("test_support") {
"//media/base/android:test_support",
"//media/filters:test_support",
"//media/formats:test_support",
+ "//media/gpu:test_support",
"//media/video:test_support",
]
}
@@ -173,9 +174,8 @@ test("media_unittests") {
deps += [ "//media/remoting:media_remoting_tests" ]
}
- if (proprietary_codecs) {
- configs += [ "//third_party/opus:opus_config" ]
- }
+ # The test needs OPUS_FIXED_POINT conditional define.
+ configs += [ "//third_party/opus:opus_config" ]
}
test("media_perftests") {
@@ -202,12 +202,11 @@ test("media_perftests") {
data = [
"test/data/",
+ ]
+ data_deps = [
# Needed for isolate script to execute.
- "//testing/scripts/common.py",
- "//testing/xvfb.py",
- "//testing/scripts/run_gtest_perf_test.py",
- "//tools/perf/generate_legacy_perf_dashboard_json.py",
+ "//testing:run_perf_test",
]
}
@@ -248,6 +247,8 @@ component("shared_memory_support") {
"base/channel_layout.h",
"base/limits.h",
"base/media_shmem_export.h",
+ "base/sample_format.cc",
+ "base/sample_format.h",
"base/vector_math.cc",
"base/vector_math.h",
"base/vector_math_testing.h",
@@ -352,6 +353,28 @@ fuzzer_test("media_webm_muxer_fuzzer") {
]
}
+fuzzer_test("cbcs_decryptor_fuzzer") {
+ sources = [
+ "cdm/cbcs_decryptor_fuzzer.cc",
+ ]
+ deps = [
+ ":media",
+ "//base",
+ "//crypto",
+ ]
+}
+
+fuzzer_test("cenc_decryptor_fuzzer") {
+ sources = [
+ "cdm/cenc_decryptor_fuzzer.cc",
+ ]
+ deps = [
+ ":media",
+ "//base",
+ "//crypto",
+ ]
+}
+
if (proprietary_codecs) {
fuzzer_test("media_mp4_avcc_parser_fuzzer") {
sources = [
diff --git a/chromium/media/DEPS b/chromium/media/DEPS
index bf3b007c358..cae84d3e449 100644
--- a/chromium/media/DEPS
+++ b/chromium/media/DEPS
@@ -8,6 +8,7 @@ include_rules = [
"+gpu",
"+jni",
"+mojo/public/cpp/bindings/callback_helpers.h",
+ "+services/ui/public/cpp/gpu/context_provider_command_buffer.h",
"+skia/ext",
"+third_party/ffmpeg",
"+third_party/libaom",
@@ -28,5 +29,10 @@ include_rules = [
specific_include_rules = {
"audio_manager_unittest.cc": [
"+chromeos/dbus"
+ ],
+ # TODO(https://crbug.com/844508): Remove this dependency once the
+ # AudioOutputDevice shared memory refactor is done.
+ "audio_output_device_unittest.cc": [
+ "+mojo/public/cpp/system/platform_handle.h"
]
}
diff --git a/chromium/media/README.md b/chromium/media/README.md
index 33d71df3799..71ad42ffb92 100644
--- a/chromium/media/README.md
+++ b/chromium/media/README.md
@@ -151,3 +151,33 @@ issues, it's helpful to review the internal logs at chrome://media-internals.
The internals page contains information about active
`media::WebMediaPlayerImpl`, `media::AudioInputController`,
`media::AudioOutputController`, and `media::AudioOutputStream` instances.
+
+
+
+# Logging
+
+Media playback typically involves multiple threads, in many cases even multiple
+processes. Media operations are often asynchronous running in a sandbox. These
+make attaching a debugger (e.g. GDB) sometimes less efficient than other
+mechanisms like logging.
+
+## DVLOG
+
+In media we use DVLOG() a lot. It makes filename-based filtering super easy.
+Within one file, not all logs are created equal. To make log filtering
+more convenient, use appropriate log levels. Here are some general
+recommendations:
+
+* DVLOG(1): Once per playback events or other important events, e.g.
+ construction/destruction, initialization, playback start/end, suspend/resume,
+ any error conditions.
+* DVLOG(2): Recurring events per playback, e.g. seek/reset/flush, config change.
+* DVLOG(3): Frequent events, e.g. demuxer read, audio/video buffer decrypt or
+ decode, audio/video frame rendering.
+
+## MediaLog
+
+MediaLog will send logs to `about://media-internals`, which is easily accessible
+by developers (including web developes), testers and even users to get detailed
+information about a playback instance. For guidance on how to use MediaLog, see
+`media/base/media_log.h`. \ No newline at end of file
diff --git a/chromium/media/audio/BUILD.gn b/chromium/media/audio/BUILD.gn
index 35e67c9c00d..8754161b0c9 100644
--- a/chromium/media/audio/BUILD.gn
+++ b/chromium/media/audio/BUILD.gn
@@ -92,6 +92,8 @@ source_set("audio") {
"audio_input_device.h",
"audio_input_ipc.cc",
"audio_input_ipc.h",
+ "audio_input_stream_data_interceptor.cc",
+ "audio_input_stream_data_interceptor.h",
"audio_input_sync_writer.cc",
"audio_input_sync_writer.h",
"audio_io.h",
@@ -122,6 +124,7 @@ source_set("audio") {
"audio_source_diverter.h",
"audio_sync_reader.cc",
"audio_sync_reader.h",
+ "audio_system.cc",
"audio_system.h",
"audio_system_helper.cc",
"audio_system_helper.h",
@@ -220,9 +223,13 @@ source_set("audio") {
]
libs += [
+ "dmoguids.lib",
"dxguid.lib",
+ "msdmo.lib",
"setupapi.lib",
+ "strmiids.lib",
"winmm.lib",
+ "wmcodecdspuuid.lib",
]
}
@@ -311,13 +318,6 @@ source_set("audio") {
libs += [ "media_client" ]
}
- if (enable_webrtc) {
- sources += [
- "audio_input_stream_data_interceptor.cc",
- "audio_input_stream_data_interceptor.h",
- ]
- }
-
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
}
@@ -385,6 +385,7 @@ source_set("unit_tests") {
"audio_debug_recording_session_impl_unittest.cc",
"audio_input_controller_unittest.cc",
"audio_input_device_unittest.cc",
+ "audio_input_stream_data_interceptor_unittest.cc",
"audio_input_sync_writer_unittest.cc",
"audio_input_unittest.cc",
"audio_manager_unittest.cc",
@@ -405,6 +406,12 @@ source_set("unit_tests") {
"//base",
"//base/test:test_support",
"//media:test_support",
+
+ # TODO(https://crbug.com/844508): Mojo is used in the
+ # audio_output_device_unittest.cc for conversion between shared memory
+ # types. Remove this dependency once the AudioOutputDevice shared memory
+ # refactor is done.
+ "//mojo/public/cpp/system:system",
"//testing/gmock",
"//testing/gtest",
"//url",
@@ -416,10 +423,6 @@ source_set("unit_tests") {
"//media:media_config",
]
- if (enable_webrtc) {
- sources += [ "audio_input_stream_data_interceptor_unittest.cc" ]
- }
-
if (is_android) {
sources += [ "android/audio_android_unittest.cc" ]
deps += [ "//ui/gl" ]
diff --git a/chromium/media/audio/alive_checker.cc b/chromium/media/audio/alive_checker.cc
index dd4a53f7c05..7a65c870ac9 100644
--- a/chromium/media/audio/alive_checker.cc
+++ b/chromium/media/audio/alive_checker.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
namespace media {
@@ -46,7 +46,7 @@ AliveChecker::AliveChecker(
PowerObserverHelperFactoryCallback power_observer_helper_factory_callback)
: check_interval_(check_interval),
timeout_(timeout),
- task_runner_(base::MessageLoop::current()->task_runner()),
+ task_runner_(base::MessageLoopCurrent::Get()->task_runner()),
dead_callback_(std::move(dead_callback)),
stop_at_first_alive_notification_(stop_at_first_alive_notification),
weak_factory_(this) {
diff --git a/chromium/media/audio/alive_checker.h b/chromium/media/audio/alive_checker.h
index 83e1168bb4f..7a55f4aead5 100644
--- a/chromium/media/audio/alive_checker.h
+++ b/chromium/media/audio/alive_checker.h
@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "base/timer/timer.h"
#include "media/audio/power_observer_helper.h"
diff --git a/chromium/media/audio/alive_checker_unittest.cc b/chromium/media/audio/alive_checker_unittest.cc
index 426e7346e41..ac2bf7342bf 100644
--- a/chromium/media/audio/alive_checker_unittest.cc
+++ b/chromium/media/audio/alive_checker_unittest.cc
@@ -6,6 +6,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/sequenced_task_runner.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/scoped_task_environment.h"
#include "base/threading/thread.h"
diff --git a/chromium/media/audio/alsa/alsa_input.cc b/chromium/media/audio/alsa/alsa_input.cc
index 6b11b6889c4..13fb7c6070c 100644
--- a/chromium/media/audio/alsa/alsa_input.cc
+++ b/chromium/media/audio/alsa/alsa_input.cc
@@ -19,6 +19,9 @@
namespace media {
+static const SampleFormat kSampleFormat = kSampleFormatS16;
+static const snd_pcm_format_t kAlsaSampleFormat = SND_PCM_FORMAT_S16;
+
static const int kNumPacketsInRingBuffer = 3;
static const char kDefaultDevice1[] = "default";
@@ -33,8 +36,7 @@ AlsaPcmInputStream::AlsaPcmInputStream(AudioManagerBase* audio_manager,
: audio_manager_(audio_manager),
device_name_(device_name),
params_(params),
- bytes_per_buffer_(params.frames_per_buffer() *
- (params.channels() * params.bits_per_sample()) / 8),
+ bytes_per_buffer_(params.GetBytesPerBuffer(kSampleFormat)),
wrapper_(wrapper),
buffer_duration_(base::TimeDelta::FromMicroseconds(
params.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
@@ -54,14 +56,6 @@ bool AlsaPcmInputStream::Open() {
if (device_handle_)
return false; // Already open.
- snd_pcm_format_t pcm_format = alsa_util::BitsToFormat(
- params_.bits_per_sample());
- if (pcm_format == SND_PCM_FORMAT_UNKNOWN) {
- LOG(WARNING) << "Unsupported bits per sample: "
- << params_.bits_per_sample();
- return false;
- }
-
uint32_t latency_us =
buffer_duration_.InMicroseconds() * kNumPacketsInRingBuffer;
@@ -72,8 +66,8 @@ bool AlsaPcmInputStream::Open() {
const char* device_names[] = { kDefaultDevice1, kDefaultDevice2 };
for (size_t i = 0; i < arraysize(device_names); ++i) {
device_handle_ = alsa_util::OpenCaptureDevice(
- wrapper_, device_names[i], params_.channels(),
- params_.sample_rate(), pcm_format, latency_us);
+ wrapper_, device_names[i], params_.channels(), params_.sample_rate(),
+ kAlsaSampleFormat, latency_us);
if (device_handle_) {
device_name_ = device_names[i];
@@ -81,11 +75,9 @@ bool AlsaPcmInputStream::Open() {
}
}
} else {
- device_handle_ = alsa_util::OpenCaptureDevice(wrapper_,
- device_name_.c_str(),
- params_.channels(),
- params_.sample_rate(),
- pcm_format, latency_us);
+ device_handle_ = alsa_util::OpenCaptureDevice(
+ wrapper_, device_name_.c_str(), params_.channels(),
+ params_.sample_rate(), kAlsaSampleFormat, latency_us);
}
if (device_handle_) {
@@ -214,8 +206,9 @@ void AlsaPcmInputStream::ReadAudio() {
int frames_read = wrapper_->PcmReadi(device_handle_, audio_buffer_.get(),
params_.frames_per_buffer());
if (frames_read == params_.frames_per_buffer()) {
- audio_bus_->FromInterleaved(audio_buffer_.get(), audio_bus_->frames(),
- params_.bits_per_sample() / 8);
+ audio_bus_->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<int16_t*>(audio_buffer_.get()),
+ audio_bus_->frames());
// TODO(dalecurtis): This should probably use snd_pcm_htimestamp() so that
// we can have |capture_time| directly instead of computing it as
@@ -364,6 +357,11 @@ bool AlsaPcmInputStream::IsMuted() {
return false;
}
+void AlsaPcmInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
void AlsaPcmInputStream::HandleError(const char* method, int error) {
LOG(WARNING) << method << ": " << wrapper_->StrError(error);
if (callback_)
diff --git a/chromium/media/audio/alsa/alsa_input.h b/chromium/media/audio/alsa/alsa_input.h
index c30944a3876..09e525f826f 100644
--- a/chromium/media/audio/alsa/alsa_input.h
+++ b/chromium/media/audio/alsa/alsa_input.h
@@ -54,6 +54,7 @@ class MEDIA_EXPORT AlsaPcmInputStream
void SetVolume(double volume) override;
double GetVolume() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
// Logs the error and invokes any registered callbacks.
diff --git a/chromium/media/audio/alsa/alsa_output.cc b/chromium/media/audio/alsa/alsa_output.cc
index 02ac80e2b98..a957846a069 100644
--- a/chromium/media/audio/alsa/alsa_output.cc
+++ b/chromium/media/audio/alsa/alsa_output.cc
@@ -132,6 +132,9 @@ std::ostream& operator<<(std::ostream& os,
return os;
}
+static const SampleFormat kSampleFormat = kSampleFormatS16;
+static const snd_pcm_format_t kAlsaSampleFormat = SND_PCM_FORMAT_S16;
+
const char AlsaPcmOutputStream::kDefaultDevice[] = "default";
const char AlsaPcmOutputStream::kAutoSelectDevice[] = "";
const char AlsaPcmOutputStream::kPlugPrefix[] = "plug:";
@@ -145,13 +148,13 @@ AlsaPcmOutputStream::AlsaPcmOutputStream(const std::string& device_name,
AlsaWrapper* wrapper,
AudioManagerBase* manager)
: requested_device_name_(device_name),
- pcm_format_(alsa_util::BitsToFormat(params.bits_per_sample())),
+ pcm_format_(kAlsaSampleFormat),
channels_(params.channels()),
channel_layout_(params.channel_layout()),
sample_rate_(params.sample_rate()),
- bytes_per_sample_(params.bits_per_sample() / 8),
- bytes_per_frame_(params.GetBytesPerFrame()),
- packet_size_(params.GetBytesPerBuffer()),
+ bytes_per_sample_(SampleFormatToBytesPerChannel(kSampleFormat)),
+ bytes_per_frame_(params.GetBytesPerFrame(kSampleFormat)),
+ packet_size_(params.GetBytesPerBuffer(kSampleFormat)),
latency_(std::max(
base::TimeDelta::FromMicroseconds(kMinLatencyMicros),
AudioTimestampHelper::FramesToTime(params.frames_per_buffer() * 2,
@@ -178,11 +181,6 @@ AlsaPcmOutputStream::AlsaPcmOutputStream(const std::string& device_name,
LOG(WARNING) << "Unsupported audio parameters.";
TransitionTo(kInError);
}
-
- if (pcm_format_ == SND_PCM_FORMAT_UNKNOWN) {
- LOG(WARNING) << "Unsupported bits per sample: " << params.bits_per_sample();
- TransitionTo(kInError);
- }
}
AlsaPcmOutputStream::~AlsaPcmOutputStream() {
@@ -418,8 +416,8 @@ void AlsaPcmOutputStream::BufferPacket(bool* source_exhausted) {
// Note: If this ever changes to output raw float the data must be clipped
// and sanitized since it may come from an untrusted source such as NaCl.
output_bus->Scale(volume_);
- output_bus->ToInterleaved(
- frames_filled, bytes_per_sample_, packet->writable_data());
+ output_bus->ToInterleaved<SignedInt16SampleTypeTraits>(
+ frames_filled, reinterpret_cast<int16_t*>(packet->writable_data()));
if (packet_size > 0) {
packet->set_data_size(packet_size);
diff --git a/chromium/media/audio/alsa/alsa_output_unittest.cc b/chromium/media/audio/alsa/alsa_output_unittest.cc
index a5b316b54d1..8150c329533 100644
--- a/chromium/media/audio/alsa/alsa_output_unittest.cc
+++ b/chromium/media/audio/alsa/alsa_output_unittest.cc
@@ -117,7 +117,7 @@ class AlsaPcmOutputStreamTest : public testing::Test {
mock_manager_.reset(new StrictMock<MockAudioManagerAlsa>());
}
- virtual ~AlsaPcmOutputStreamTest() { mock_manager_->Shutdown(); }
+ ~AlsaPcmOutputStreamTest() override { mock_manager_->Shutdown(); }
AlsaPcmOutputStream* CreateStream(ChannelLayout layout) {
return CreateStream(layout, kTestFramesPerPacket);
@@ -126,7 +126,7 @@ class AlsaPcmOutputStreamTest : public testing::Test {
AlsaPcmOutputStream* CreateStream(ChannelLayout layout,
int32_t samples_per_packet) {
AudioParameters params(kTestFormat, layout, kTestSampleRate,
- kTestBitsPerSample, samples_per_packet);
+ samples_per_packet);
return new AlsaPcmOutputStream(kTestDeviceName,
params,
&mock_alsa_wrapper_,
@@ -188,7 +188,7 @@ const ChannelLayout AlsaPcmOutputStreamTest::kTestChannelLayout =
CHANNEL_LAYOUT_STEREO;
const int AlsaPcmOutputStreamTest::kTestSampleRate =
AudioParameters::kAudioCDSampleRate;
-const int AlsaPcmOutputStreamTest::kTestBitsPerSample = 8;
+const int AlsaPcmOutputStreamTest::kTestBitsPerSample = 16;
const int AlsaPcmOutputStreamTest::kTestBytesPerFrame =
AlsaPcmOutputStreamTest::kTestBitsPerSample / 8 *
ChannelLayoutToChannelCount(AlsaPcmOutputStreamTest::kTestChannelLayout);
@@ -234,17 +234,6 @@ TEST_F(AlsaPcmOutputStreamTest, ConstructedState) {
test_stream = CreateStream(CHANNEL_LAYOUT_SURROUND);
EXPECT_EQ(AlsaPcmOutputStream::kCreated, test_stream->state());
test_stream->Close();
-
- // Bad bits per sample.
- AudioParameters bad_bps_params(kTestFormat, kTestChannelLayout,
- kTestSampleRate, kTestBitsPerSample - 1,
- kTestFramesPerPacket);
- test_stream = new AlsaPcmOutputStream(kTestDeviceName,
- bad_bps_params,
- &mock_alsa_wrapper_,
- mock_manager_.get());
- EXPECT_EQ(AlsaPcmOutputStream::kInError, test_stream->state());
- test_stream->Close();
}
TEST_F(AlsaPcmOutputStreamTest, LatencyFloor) {
@@ -322,13 +311,10 @@ TEST_F(AlsaPcmOutputStreamTest, OpenClose) {
SND_PCM_NONBLOCK))
.WillOnce(DoAll(SetArgPointee<0>(kFakeHandle), Return(0)));
EXPECT_CALL(mock_alsa_wrapper_,
- PcmSetParams(kFakeHandle,
- SND_PCM_FORMAT_U8,
+ PcmSetParams(kFakeHandle, SND_PCM_FORMAT_S16_LE,
SND_PCM_ACCESS_RW_INTERLEAVED,
ChannelLayoutToChannelCount(kTestChannelLayout),
- kTestSampleRate,
- 1,
- expected_micros))
+ kTestSampleRate, 1, expected_micros))
.WillOnce(Return(0));
EXPECT_CALL(mock_alsa_wrapper_, PcmGetParams(kFakeHandle, _, _))
.WillOnce(DoAll(SetArgPointee<1>(kTestFramesPerPacket),
diff --git a/chromium/media/audio/alsa/alsa_util.cc b/chromium/media/audio/alsa/alsa_util.cc
index b94cd17bf05..127e1719e8b 100644
--- a/chromium/media/audio/alsa/alsa_util.cc
+++ b/chromium/media/audio/alsa/alsa_util.cc
@@ -62,25 +62,6 @@ static std::string DeviceNameToControlName(const std::string& device_name) {
return control_name;
}
-snd_pcm_format_t BitsToFormat(int bits_per_sample) {
- switch (bits_per_sample) {
- case 8:
- return SND_PCM_FORMAT_U8;
-
- case 16:
- return SND_PCM_FORMAT_S16;
-
- case 24:
- return SND_PCM_FORMAT_S24;
-
- case 32:
- return SND_PCM_FORMAT_S32;
-
- default:
- return SND_PCM_FORMAT_UNKNOWN;
- }
-}
-
int CloseDevice(media::AlsaWrapper* wrapper, snd_pcm_t* handle) {
std::string device_name = wrapper->PcmName(handle);
int error = wrapper->PcmClose(handle);
diff --git a/chromium/media/audio/alsa/alsa_util.h b/chromium/media/audio/alsa/alsa_util.h
index a23ab317dd7..d24584a523d 100644
--- a/chromium/media/audio/alsa/alsa_util.h
+++ b/chromium/media/audio/alsa/alsa_util.h
@@ -14,8 +14,6 @@ class AlsaWrapper;
namespace alsa_util {
-snd_pcm_format_t BitsToFormat(int bits_per_sample);
-
snd_pcm_t* OpenCaptureDevice(media::AlsaWrapper* wrapper,
const char* device_name,
int channels,
diff --git a/chromium/media/audio/alsa/audio_manager_alsa.cc b/chromium/media/audio/alsa/audio_manager_alsa.cc
index 5f5bc6d7e64..91108f0f920 100644
--- a/chromium/media/audio/alsa/audio_manager_alsa.cc
+++ b/chromium/media/audio/alsa/audio_manager_alsa.cc
@@ -77,9 +77,9 @@ AudioParameters AudioManagerAlsa::GetInputStreamParameters(
const std::string& device_id) {
static const int kDefaultInputBufferSize = 1024;
- return AudioParameters(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_STEREO,
- kDefaultSampleRate, 16, kDefaultInputBufferSize);
+ return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO, kDefaultSampleRate,
+ kDefaultInputBufferSize);
}
const char* AudioManagerAlsa::GetName() {
@@ -280,7 +280,6 @@ AudioParameters AudioManagerAlsa::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = kDefaultSampleRate;
int buffer_size = kDefaultOutputBufferSize;
- int bits_per_sample = 16;
if (input_params.IsValid()) {
// Some clients, such as WebRTC, have a more limited use case and work
// acceptably with a smaller buffer size. The check below allows clients
@@ -288,7 +287,6 @@ AudioParameters AudioManagerAlsa::GetPreferredOutputStreamParameters(
// TODO(dalecurtis): This should include bits per channel and channel layout
// eventually.
sample_rate = input_params.sample_rate();
- bits_per_sample = input_params.bits_per_sample();
channel_layout = input_params.channel_layout();
buffer_size = std::min(input_params.frames_per_buffer(), buffer_size);
}
@@ -298,7 +296,7 @@ AudioParameters AudioManagerAlsa::GetPreferredOutputStreamParameters(
buffer_size = user_buffer_size;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
}
AudioOutputStream* AudioManagerAlsa::MakeOutputStream(
diff --git a/chromium/media/audio/android/audio_android_unittest.cc b/chromium/media/audio/android/audio_android_unittest.cc
index fff57476a78..99a0852087d 100644
--- a/chromium/media/audio/android/audio_android_unittest.cc
+++ b/chromium/media/audio/android/audio_android_unittest.cc
@@ -54,8 +54,8 @@ const char kSpeechFile_16b_s_44k[] = "speech_16b_stereo_44kHz.raw";
const char kSpeechFile_16b_m_44k[] = "speech_16b_mono_44kHz.raw";
const float kCallbackTestTimeMs = 2000.0;
-const int kBitsPerSample = 16;
-const int kBytesPerSample = kBitsPerSample / 8;
+const int kBytesPerSample = 2;
+const SampleFormat kSampleFormat = kSampleFormatS16;
// Converts AudioParameters::Format enumerator to readable string.
std::string FormatToString(AudioParameters::Format format) {
@@ -109,7 +109,6 @@ void CheckDeviceDescriptions(
AudioDeviceDescriptions::const_iterator it = device_descriptions.begin();
// The first device in the list should always be the default device.
- EXPECT_EQ(AudioDeviceDescription::GetDefaultDeviceName(), it->device_name);
EXPECT_EQ(std::string(AudioDeviceDescription::kDefaultDeviceId),
it->unique_id);
++it;
@@ -142,15 +141,16 @@ int RealOnMoreData(base::TimeDelta /* delay */,
std::ostream& operator<<(std::ostream& os, const AudioParameters& params) {
using std::endl;
- os << endl << "format: " << FormatToString(params.format()) << endl
+ os << endl
+ << "format: " << FormatToString(params.format()) << endl
<< "channel layout: " << LayoutToString(params.channel_layout()) << endl
<< "sample rate: " << params.sample_rate() << endl
- << "bits per sample: " << params.bits_per_sample() << endl
<< "frames per buffer: " << params.frames_per_buffer() << endl
<< "channels: " << params.channels() << endl
- << "bytes per buffer: " << params.GetBytesPerBuffer() << endl
- << "bytes per second: " << params.GetBytesPerSecond() << endl
- << "bytes per frame: " << params.GetBytesPerFrame() << endl
+ << "bytes per buffer: " << params.GetBytesPerBuffer(kSampleFormat) << endl
+ << "bytes per second: "
+ << params.sample_rate() * params.GetBytesPerFrame(kSampleFormat) << endl
+ << "bytes per frame: " << params.GetBytesPerFrame(kSampleFormat) << endl
<< "chunk size in ms: " << ExpectedTimeBetweenCallbacks(params) << endl
<< "echo_canceller: "
<< (params.effects() & AudioParameters::ECHO_CANCELLER);
@@ -242,12 +242,13 @@ class FileAudioSink : public AudioInputStream::AudioInputCallback {
const std::string& file_name)
: event_(event), params_(params) {
// Allocate space for ~10 seconds of data.
- const int kMaxBufferSize = 10 * params.GetBytesPerSecond();
+ const int kMaxBufferSize =
+ 10 * params.sample_rate() * params.GetBytesPerFrame(kSampleFormat);
buffer_.reset(new media::SeekableBuffer(0, kMaxBufferSize));
// Open up the binary file which will be written to in the destructor.
base::FilePath file_path;
- EXPECT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
file_path = file_path.AppendASCII(file_name.c_str());
binary_file_ = base::OpenFile(file_path, "wb");
DLOG_IF(ERROR, !binary_file_) << "Failed to open binary PCM data file.";
@@ -314,8 +315,9 @@ class FullDuplexAudioSinkSource
started_(false) {
// Start with a reasonably small FIFO size. It will be increased
// dynamically during the test if required.
- fifo_.reset(new media::SeekableBuffer(0, 2 * params.GetBytesPerBuffer()));
- buffer_.reset(new uint8_t[params_.GetBytesPerBuffer()]);
+ size_t buffer_size = params.GetBytesPerBuffer(kSampleFormat);
+ fifo_.reset(new media::SeekableBuffer(0, 2 * buffer_size));
+ buffer_.reset(new uint8_t[buffer_size]);
}
~FullDuplexAudioSinkSource() override {}
@@ -327,7 +329,6 @@ class FullDuplexAudioSinkSource
const base::TimeTicks now_time = base::TimeTicks::Now();
const int diff = (now_time - previous_time_).InMilliseconds();
- EXPECT_EQ(params_.bits_per_sample(), 16);
const int num_samples = src->frames() * src->channels();
std::unique_ptr<int16_t> interleaved(new int16_t[num_samples]);
const int bytes_per_sample = sizeof(*interleaved);
@@ -368,8 +369,8 @@ class FullDuplexAudioSinkSource
int /* prior_frames_skipped */,
AudioBus* dest) override {
const int size_in_bytes =
- (params_.bits_per_sample() / 8) * dest->frames() * dest->channels();
- EXPECT_EQ(size_in_bytes, params_.GetBytesPerBuffer());
+ kBytesPerSample * dest->frames() * dest->channels();
+ EXPECT_EQ(size_in_bytes, params_.GetBytesPerBuffer(kSampleFormat));
base::AutoLock lock(lock_);
@@ -387,8 +388,8 @@ class FullDuplexAudioSinkSource
dest->Zero();
} else {
fifo_->Read(buffer_.get(), size_in_bytes);
- dest->FromInterleaved(
- buffer_.get(), dest->frames(), params_.bits_per_sample() / 8);
+ dest->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<int16_t*>(buffer_.get()), dest->frames());
}
return dest->frames();
@@ -398,7 +399,7 @@ class FullDuplexAudioSinkSource
// Converts from bytes to milliseconds given number of bytes and existing
// audio parameters.
double BytesToMilliseconds(int bytes) const {
- const int frames = bytes / params_.GetBytesPerFrame();
+ const int frames = bytes / params_.GetBytesPerFrame(kSampleFormat);
return (base::TimeDelta::FromMicroseconds(
frames * base::Time::kMicrosecondsPerSecond /
static_cast<double>(params_.sample_rate()))).InMillisecondsF();
@@ -835,7 +836,6 @@ TEST_F(AudioAndroidOutputTest, StartOutputStreamCallbacksNonDefaultParameters) {
AudioParameters params(audio_output_parameters().format(),
CHANNEL_LAYOUT_MONO,
audio_output_parameters().sample_rate(),
- audio_output_parameters().bits_per_sample(),
audio_output_parameters().sample_rate() / 100);
StartOutputStreamCallbacks(params);
}
diff --git a/chromium/media/audio/android/audio_manager_android.cc b/chromium/media/audio/android/audio_manager_android.cc
index 6d780fdb91f..45287af3971 100644
--- a/chromium/media/audio/android/audio_manager_android.cc
+++ b/chromium/media/audio/android/audio_manager_android.cc
@@ -12,7 +12,6 @@
#include "base/android/scoped_java_ref.h"
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/strings/string_number_conversions.h"
#include "jni/AudioManagerAndroid_jni.h"
#include "media/audio/android/audio_record_input.h"
@@ -157,7 +156,7 @@ AudioParameters AudioManagerAndroid::GetInputStreamParameters(
buffer_size = user_buffer_size;
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- GetNativeOutputSampleRate(), 16, buffer_size);
+ GetNativeOutputSampleRate(), buffer_size);
params.set_effects(effects);
return params;
}
@@ -327,11 +326,9 @@ AudioParameters AudioManagerAndroid::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = GetNativeOutputSampleRate();
int buffer_size = GetOptimalOutputFrameSize(sample_rate, 2);
- int bits_per_sample = 16;
if (input_params.IsValid()) {
// Use the client's input parameters if they are valid.
sample_rate = input_params.sample_rate();
- bits_per_sample = input_params.bits_per_sample();
// Pre-Lollipop devices don't support > stereo OpenSLES output and the
// AudioManager APIs for GetOptimalOutputFrameSize() don't support channel
@@ -360,7 +357,7 @@ AudioParameters AudioManagerAndroid::GetPreferredOutputStreamParameters(
buffer_size = user_buffer_size;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
}
bool AudioManagerAndroid::HasNoAudioInputStreams() {
diff --git a/chromium/media/audio/android/audio_record_input.cc b/chromium/media/audio/android/audio_record_input.cc
index 657a19825cb..791768040fe 100644
--- a/chromium/media/audio/android/audio_record_input.cc
+++ b/chromium/media/audio/android/audio_record_input.cc
@@ -13,6 +13,8 @@ using base::android::JavaParamRef;
namespace media {
+constexpr SampleFormat kSampleFormat = kSampleFormatS16;
+
AudioRecordInputStream::AudioRecordInputStream(
AudioManagerAndroid* audio_manager,
const AudioParameters& params)
@@ -20,18 +22,14 @@ AudioRecordInputStream::AudioRecordInputStream(
callback_(NULL),
direct_buffer_address_(NULL),
audio_bus_(media::AudioBus::Create(params)),
- bytes_per_sample_(params.bits_per_sample() / 8) {
+ bytes_per_sample_(SampleFormatToBytesPerChannel(kSampleFormat)) {
DVLOG(2) << __PRETTY_FUNCTION__;
DCHECK(params.IsValid());
- j_audio_record_.Reset(
- Java_AudioRecordInput_createAudioRecordInput(
- base::android::AttachCurrentThread(),
- reinterpret_cast<intptr_t>(this),
- params.sample_rate(),
- params.channels(),
- params.bits_per_sample(),
- params.GetBytesPerBuffer(),
- params.effects() & AudioParameters::ECHO_CANCELLER));
+ j_audio_record_.Reset(Java_AudioRecordInput_createAudioRecordInput(
+ base::android::AttachCurrentThread(), reinterpret_cast<intptr_t>(this),
+ params.sample_rate(), params.channels(), bytes_per_sample_ * 8,
+ params.GetBytesPerBuffer(kSampleFormat),
+ params.effects() & AudioParameters::ECHO_CANCELLER));
}
AudioRecordInputStream::~AudioRecordInputStream() {
@@ -144,4 +142,9 @@ bool AudioRecordInputStream::IsMuted() {
return false;
}
+void AudioRecordInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Do nothing. This is handled at a different layer on Android.
+}
+
} // namespace media
diff --git a/chromium/media/audio/android/audio_record_input.h b/chromium/media/audio/android/audio_record_input.h
index f1cf02ee12a..f5d544bb32a 100644
--- a/chromium/media/audio/android/audio_record_input.h
+++ b/chromium/media/audio/android/audio_record_input.h
@@ -43,6 +43,7 @@ class MEDIA_EXPORT AudioRecordInputStream : public AudioInputStream {
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
// Called from Java when data is available.
void OnData(JNIEnv* env,
diff --git a/chromium/media/audio/android/opensles_input.cc b/chromium/media/audio/android/opensles_input.cc
index 22d9384cd31..cd265043e98 100644
--- a/chromium/media/audio/android/opensles_input.cc
+++ b/chromium/media/audio/android/opensles_input.cc
@@ -32,16 +32,19 @@ OpenSLESInputStream::OpenSLESInputStream(AudioManagerAndroid* audio_manager,
started_(false),
audio_bus_(media::AudioBus::Create(params)) {
DVLOG(2) << __PRETTY_FUNCTION__;
+
+ const SampleFormat kSampleFormat = kSampleFormatS16;
+
format_.formatType = SL_DATAFORMAT_PCM;
format_.numChannels = static_cast<SLuint32>(params.channels());
// Provides sampling rate in milliHertz to OpenSLES.
format_.samplesPerSec = static_cast<SLuint32>(params.sample_rate() * 1000);
- format_.bitsPerSample = params.bits_per_sample();
- format_.containerSize = params.bits_per_sample();
+ format_.bitsPerSample = format_.containerSize =
+ SampleFormatToBitsPerChannel(kSampleFormat);
format_.endianness = SL_BYTEORDER_LITTLEENDIAN;
format_.channelMask = ChannelCountToSLESChannelMask(params.channels());
- buffer_size_bytes_ = params.GetBytesPerBuffer();
+ buffer_size_bytes_ = params.GetBytesPerBuffer(kSampleFormat);
hardware_delay_ = base::TimeDelta::FromSecondsD(
params.frames_per_buffer() / static_cast<double>(params.sample_rate()));
@@ -189,6 +192,11 @@ bool OpenSLESInputStream::IsMuted() {
return false;
}
+void OpenSLESInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
bool OpenSLESInputStream::CreateRecorder() {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!engine_object_.Get());
@@ -302,9 +310,9 @@ void OpenSLESInputStream::ReadBufferQueue() {
TRACE_EVENT0("audio", "OpenSLESOutputStream::ReadBufferQueue");
// Convert from interleaved format to deinterleaved audio bus format.
- audio_bus_->FromInterleaved(audio_data_[active_buffer_index_],
- audio_bus_->frames(),
- format_.bitsPerSample / 8);
+ audio_bus_->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<int16_t*>(audio_data_[active_buffer_index_]),
+ audio_bus_->frames());
// TODO(henrika): Investigate if it is possible to get an accurate
// delay estimation.
diff --git a/chromium/media/audio/android/opensles_input.h b/chromium/media/audio/android/opensles_input.h
index f7385dea8de..f79e3ee63c8 100644
--- a/chromium/media/audio/android/opensles_input.h
+++ b/chromium/media/audio/android/opensles_input.h
@@ -48,6 +48,7 @@ class OpenSLESInputStream : public AudioInputStream {
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
bool CreateRecorder();
diff --git a/chromium/media/audio/android/opensles_output.cc b/chromium/media/audio/android/opensles_output.cc
index b489392506e..4f017570489 100644
--- a/chromium/media/audio/android/opensles_output.cc
+++ b/chromium/media/audio/android/opensles_output.cc
@@ -39,19 +39,18 @@ OpenSLESOutputStream::OpenSLESOutputStream(AudioManagerAndroid* manager,
muted_(false),
volume_(1.0),
samples_per_second_(params.sample_rate()),
- have_float_output_(
- base::android::BuildInfo::GetInstance()->sdk_int() >=
- base::android::SDK_VERSION_LOLLIPOP &&
- // See http://crbug.com/737188; still shipping Lollipop in 2017, so no
- // idea if later phones will be glitch free; thus blacklist all.
- !base::EqualsCaseInsensitiveASCII(
- base::android::BuildInfo::GetInstance()->manufacturer(),
- "vivo")),
- bytes_per_frame_(have_float_output_ ? params.channels() * sizeof(float)
- : params.GetBytesPerFrame()),
- buffer_size_bytes_(have_float_output_
- ? bytes_per_frame_ * params.frames_per_buffer()
- : params.GetBytesPerBuffer()),
+ sample_format_(
+ (base::android::BuildInfo::GetInstance()->sdk_int() >=
+ base::android::SDK_VERSION_LOLLIPOP &&
+ // See http://crbug.com/737188; still shipping Lollipop in 2017, so
+ // no idea if later phones will be glitch free; thus blacklist all.
+ !base::EqualsCaseInsensitiveASCII(
+ base::android::BuildInfo::GetInstance()->manufacturer(),
+ "vivo"))
+ ? kSampleFormatF32
+ : kSampleFormatS16),
+ bytes_per_frame_(params.GetBytesPerFrame(sample_format_)),
+ buffer_size_bytes_(params.GetBytesPerBuffer(sample_format_)),
performance_mode_(SL_ANDROID_PERFORMANCE_NONE),
delay_calculator_(samples_per_second_) {
DVLOG(2) << "OpenSLESOutputStream::OpenSLESOutputStream("
@@ -66,14 +65,14 @@ OpenSLESOutputStream::OpenSLESOutputStream(AudioManagerAndroid* manager,
audio_bus_ = AudioBus::Create(params);
- if (have_float_output_) {
+ if (sample_format_ == kSampleFormatF32) {
float_format_.formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
float_format_.numChannels = static_cast<SLuint32>(params.channels());
// Despite the name, this field is actually the sampling rate in millihertz.
float_format_.sampleRate =
static_cast<SLuint32>(samples_per_second_ * 1000);
- float_format_.bitsPerSample = 32;
- float_format_.containerSize = 32;
+ float_format_.bitsPerSample = float_format_.containerSize =
+ SampleFormatToBitsPerChannel(sample_format_);
float_format_.endianness = SL_BYTEORDER_LITTLEENDIAN;
float_format_.channelMask =
ChannelCountToSLESChannelMask(params.channels());
@@ -85,8 +84,8 @@ OpenSLESOutputStream::OpenSLESOutputStream(AudioManagerAndroid* manager,
format_.numChannels = static_cast<SLuint32>(params.channels());
// Despite the name, this field is actually the sampling rate in millihertz :|
format_.samplesPerSec = static_cast<SLuint32>(samples_per_second_ * 1000);
- format_.bitsPerSample = params.bits_per_sample();
- format_.containerSize = params.bits_per_sample();
+ format_.bitsPerSample = format_.containerSize =
+ SampleFormatToBitsPerChannel(sample_format_);
format_.endianness = SL_BYTEORDER_LITTLEENDIAN;
format_.channelMask = ChannelCountToSLESChannelMask(params.channels());
}
@@ -281,7 +280,7 @@ bool OpenSLESOutputStream::CreatePlayer() {
SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
static_cast<SLuint32>(kMaxNumOfBuffersInQueue)};
SLDataSource audio_source;
- if (have_float_output_)
+ if (sample_format_ == kSampleFormatF32)
audio_source = {&simple_buffer_queue, &float_format_};
else
audio_source = {&simple_buffer_queue, &format_};
@@ -424,10 +423,12 @@ void OpenSLESOutputStream::FillBufferQueueNoLock() {
// raw float, the data must be clipped and sanitized since it may come
// from an untrusted source such as NaCl.
audio_bus_->Scale(muted_ ? 0.0f : volume_);
- if (!have_float_output_) {
- audio_bus_->ToInterleaved(frames_filled, format_.bitsPerSample / 8,
- audio_data_[active_buffer_index_]);
+ if (sample_format_ == kSampleFormatS16) {
+ audio_bus_->ToInterleaved<SignedInt16SampleTypeTraits>(
+ frames_filled,
+ reinterpret_cast<int16_t*>(audio_data_[active_buffer_index_]));
} else {
+ DCHECK_EQ(sample_format_, kSampleFormatF32);
audio_bus_->ToInterleaved<Float32SampleTypeTraits>(
frames_filled,
reinterpret_cast<float*>(audio_data_[active_buffer_index_]));
diff --git a/chromium/media/audio/android/opensles_output.h b/chromium/media/audio/android/opensles_output.h
index 86d5a8526c6..6de09916315 100644
--- a/chromium/media/audio/android/opensles_output.h
+++ b/chromium/media/audio/android/opensles_output.h
@@ -122,8 +122,9 @@ class OpenSLESOutputStream : public MuteableAudioOutputStream {
int samples_per_second_;
- // On Android 5.0+ we can output directly to float instead of in integer.
- bool have_float_output_;
+ // On Android 5.0+ we can output directly to float instead of in integer, so
+ // there we'll use kSampleFormatF32. If not, this will be kSampleFormatS16.
+ SampleFormat sample_format_;
int bytes_per_frame_;
size_t buffer_size_bytes_;
diff --git a/chromium/media/audio/audio_debug_file_writer_unittest.cc b/chromium/media/audio/audio_debug_file_writer_unittest.cc
index eaa026b09d7..5d2474ab519 100644
--- a/chromium/media/audio/audio_debug_file_writer_unittest.cc
+++ b/chromium/media/audio/audio_debug_file_writer_unittest.cc
@@ -57,7 +57,6 @@ class AudioDebugFileWriterTest
params_(AudioParameters::Format::AUDIO_PCM_LINEAR,
std::get<0>(GetParam()),
std::get<1>(GetParam()),
- kBytesPerSample * 8,
std::get<2>(GetParam())),
writes_(std::get<3>(GetParam())),
source_samples_(params_.frames_per_buffer() * params_.channels() *
diff --git a/chromium/media/audio/audio_debug_recording_helper_unittest.cc b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
index d93475e57eb..9bd581d8f1a 100644
--- a/chromium/media/audio/audio_debug_recording_helper_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
@@ -41,7 +41,7 @@ class MockAudioDebugFileWriter : public AudioDebugFileWriter {
~MockAudioDebugFileWriter() override = default;
MOCK_METHOD1(DoStart, void(bool));
- void Start(base::File file) { DoStart(file.IsValid()); }
+ void Start(base::File file) override { DoStart(file.IsValid()); }
MOCK_METHOD0(Stop, void());
// Functions with move-only types as arguments can't be mocked directly, so
@@ -202,7 +202,7 @@ TEST_F(AudioDebugRecordingHelperTest, OnData) {
// AudioBus, the other parameters are ignored.
const int number_of_frames = 100;
const AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- ChannelLayout::CHANNEL_LAYOUT_STEREO, 0, 0,
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 0,
number_of_frames);
// Setup some data.
diff --git a/chromium/media/audio/audio_device_description.cc b/chromium/media/audio/audio_device_description.cc
index dafe34bd845..ebca4a61b23 100644
--- a/chromium/media/audio/audio_device_description.cc
+++ b/chromium/media/audio/audio_device_description.cc
@@ -6,6 +6,7 @@
#include <utility>
+#include "base/bind.h"
#include "base/logging.h"
#include "media/base/localized_strings.h"
@@ -64,7 +65,8 @@ std::string AudioDeviceDescription::GetCommunicationsDeviceName() {
// static
std::string AudioDeviceDescription::GetDefaultDeviceName(
const std::string& real_device_name) {
- DCHECK(!real_device_name.empty());
+ if (real_device_name.empty())
+ return GetDefaultDeviceName();
// TODO(guidou): Put the names together in a localized manner.
// http://crbug.com/788767
return GetDefaultDeviceName() + " - " + real_device_name;
@@ -73,12 +75,30 @@ std::string AudioDeviceDescription::GetDefaultDeviceName(
// static
std::string AudioDeviceDescription::GetCommunicationsDeviceName(
const std::string& real_device_name) {
- DCHECK(!real_device_name.empty());
+ if (real_device_name.empty())
+ return GetCommunicationsDeviceName();
// TODO(guidou): Put the names together in a localized manner.
// http://crbug.com/788767
return GetCommunicationsDeviceName() + " - " + real_device_name;
}
+// static
+void AudioDeviceDescription::LocalizeDeviceDescriptions(
+ AudioDeviceDescriptions* device_descriptions) {
+ for (auto& description : *device_descriptions) {
+ if (media::AudioDeviceDescription::IsDefaultDevice(description.unique_id)) {
+ description.device_name =
+ media::AudioDeviceDescription::GetDefaultDeviceName(
+ description.device_name);
+ } else if (media::AudioDeviceDescription::IsCommunicationsDevice(
+ description.unique_id)) {
+ description.device_name =
+ media::AudioDeviceDescription::GetCommunicationsDeviceName(
+ description.device_name);
+ }
+ }
+}
+
AudioDeviceDescription::AudioDeviceDescription(std::string device_name,
std::string unique_id,
std::string group_id)
diff --git a/chromium/media/audio/audio_device_description.h b/chromium/media/audio/audio_device_description.h
index f978faeca4e..970ec0e822d 100644
--- a/chromium/media/audio/audio_device_description.h
+++ b/chromium/media/audio/audio_device_description.h
@@ -7,6 +7,7 @@
#include <string>
#include <vector>
+
#include "media/base/media_export.h"
namespace media {
@@ -52,6 +53,8 @@ struct MEDIA_EXPORT AudioDeviceDescription {
static bool UseSessionIdToSelectDevice(int session_id,
const std::string& device_id);
+ // The functions dealing with localization are not reliable in the audio
+ // service, and should be avoided there.
// Returns the localized name of the generic "default" device.
static std::string GetDefaultDeviceName();
@@ -68,6 +71,11 @@ struct MEDIA_EXPORT AudioDeviceDescription {
static std::string GetCommunicationsDeviceName(
const std::string& real_device_name);
+ // This prepends localized "Default" or "Communications" strings to
+ // default and communications device names in |device_descriptions|.
+ static void LocalizeDeviceDescriptions(
+ std::vector<AudioDeviceDescription>* device_descriptions);
+
AudioDeviceDescription() = default;
AudioDeviceDescription(const AudioDeviceDescription& other) = default;
AudioDeviceDescription(std::string device_name,
diff --git a/chromium/media/audio/audio_device_name.cc b/chromium/media/audio/audio_device_name.cc
index 781268842db..c14ba73a9b7 100644
--- a/chromium/media/audio/audio_device_name.cc
+++ b/chromium/media/audio/audio_device_name.cc
@@ -17,13 +17,13 @@ AudioDeviceName::AudioDeviceName(std::string device_name, std::string unique_id)
// static
AudioDeviceName AudioDeviceName::CreateDefault() {
- return AudioDeviceName(AudioDeviceDescription::GetDefaultDeviceName(),
+ return AudioDeviceName(std::string(),
AudioDeviceDescription::kDefaultDeviceId);
}
// static
AudioDeviceName AudioDeviceName::CreateCommunications() {
- return AudioDeviceName(AudioDeviceDescription::GetCommunicationsDeviceName(),
+ return AudioDeviceName(std::string(),
AudioDeviceDescription::kCommunicationsDeviceId);
}
diff --git a/chromium/media/audio/audio_device_name.h b/chromium/media/audio/audio_device_name.h
index 1d74d19d46b..a0ecfb5d7fe 100644
--- a/chromium/media/audio/audio_device_name.h
+++ b/chromium/media/audio/audio_device_name.h
@@ -16,9 +16,13 @@ struct MEDIA_EXPORT AudioDeviceName {
AudioDeviceName(std::string device_name, std::string unique_id);
// Creates default device representation.
+ // Shouldn't be used in the audio service, since the audio service doesn't
+ // have access to localized device names.
static AudioDeviceName CreateDefault();
// Creates communications device representation.
+ // Shouldn't be used in the audio service, since the audio service doesn't
+ // have access to localized device names.
static AudioDeviceName CreateCommunications();
std::string device_name; // Friendly name of the device.
diff --git a/chromium/media/audio/audio_input_controller.cc b/chromium/media/audio/audio_input_controller.cc
index 1de2298e440..1f0f81f19af 100644
--- a/chromium/media/audio/audio_input_controller.cc
+++ b/chromium/media/audio/audio_input_controller.cc
@@ -315,6 +315,20 @@ void AudioInputController::SetVolume(double volume) {
base::BindOnce(&AudioInputController::DoSetVolume, this, volume));
}
+void AudioInputController::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
+
+ if (task_runner_->BelongsToCurrentThread()) {
+ DoSetOutputDeviceForAec(output_device_id);
+ return;
+ }
+
+ task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&AudioInputController::DoSetOutputDeviceForAec,
+ this, output_device_id));
+}
+
void AudioInputController::DoCreate(AudioManager* audio_manager,
const AudioParameters& params,
const std::string& device_id,
@@ -503,6 +517,13 @@ void AudioInputController::DoSetVolume(double volume) {
stream_->SetVolume(max_volume_ * volume);
}
+void AudioInputController::DoSetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ if (stream_)
+ stream_->SetOutputDeviceForAec(output_device_id);
+}
+
void AudioInputController::DoLogAudioLevels(float level_dbfs,
int microphone_volume_percent) {
#if defined(AUDIO_POWER_MONITORING)
@@ -663,7 +684,6 @@ void AudioInputController::CheckMutedState() {
const bool new_state = stream_->IsMuted();
if (new_state != is_muted_) {
is_muted_ = new_state;
- // We don't log OnMuted here, but leave that for AudioInputRendererHost.
handler_->OnMuted(is_muted_);
}
}
diff --git a/chromium/media/audio/audio_input_controller.h b/chromium/media/audio/audio_input_controller.h
index bd98c33256e..9705fccd4ac 100644
--- a/chromium/media/audio/audio_input_controller.h
+++ b/chromium/media/audio/audio_input_controller.h
@@ -213,6 +213,10 @@ class MEDIA_EXPORT AudioInputController
// to muted and 1.0 to maximum volume.
virtual void SetVolume(double volume);
+ // Sets the output device which will be used to cancel audio from, if this
+ // input device supports echo cancellation.
+ virtual void SetOutputDeviceForAec(const std::string& output_device_id);
+
protected:
friend class base::RefCountedThreadSafe<AudioInputController>;
@@ -283,6 +287,7 @@ class MEDIA_EXPORT AudioInputController
void DoReportError();
void DoSetVolume(double volume);
void DoLogAudioLevels(float level_dbfs, int microphone_volume_percent);
+ void DoSetOutputDeviceForAec(const std::string& output_device_id);
#if defined(AUDIO_POWER_MONITORING)
// Updates the silence state, see enum SilenceState above for state
@@ -377,15 +382,13 @@ class MEDIA_EXPORT AudioInputController
// A weak pointer factory that we use when posting tasks to the audio thread
// that we want to be automatically discarded after Close() has been called
// and that we do not want to keep the AudioInputController instance alive
- // beyond what is desired by the user of the instance (e.g.
- // AudioInputRendererHost). An example of where this is important is when
- // we fire error notifications from the hw callback thread, post them to
- // the audio thread. In that case, we do not want the error notification to
- // keep the AudioInputController alive for as long as the error notification
- // is pending and then make a callback from an AudioInputController that has
- // already been closed.
- // The weak_ptr_factory_ and all outstanding weak pointers, are invalidated
- // at the end of DoClose.
+ // beyond what is desired by the user of the instance. An example of where
+ // this is important is when we fire error notifications from the hw callback
+ // thread, post them to the audio thread. In that case, we do not want the
+ // error notification to keep the AudioInputController alive for as long as
+ // the error notification is pending and then make a callback from an
+ // AudioInputController that has already been closed.
+ // All outstanding weak pointers, are invalidated at the end of DoClose.
base::WeakPtrFactory<AudioInputController> weak_ptr_factory_;
DISALLOW_COPY_AND_ASSIGN(AudioInputController);
diff --git a/chromium/media/audio/audio_input_controller_unittest.cc b/chromium/media/audio/audio_input_controller_unittest.cc
index 026277fd36c..3bc3203441d 100644
--- a/chromium/media/audio/audio_input_controller_unittest.cc
+++ b/chromium/media/audio/audio_input_controller_unittest.cc
@@ -28,7 +28,6 @@ namespace media {
namespace {
const int kSampleRate = AudioParameters::kAudioCDSampleRate;
-const int kBitsPerSample = 16;
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
const int kSamplesPerPacket = kSampleRate / 10;
@@ -39,12 +38,12 @@ const double kMaxVolume = 1.0;
constexpr base::TimeDelta kOnMuteWaitTimeout =
base::TimeDelta::FromMilliseconds(1500);
-// Posts base::MessageLoop::QuitWhenIdleClosure() on specified message loop
-// after a certain number of calls given by |limit|.
+// Posts base::RunLoop::QuitCurrentWhenIdleClosureDeprecated() on specified
+// message loop after a certain number of calls given by |limit|.
ACTION_P3(CheckCountAndPostQuitTask, count, limit, loop_or_proxy) {
if (++*count >= limit) {
- loop_or_proxy->PostTask(FROM_HERE,
- base::MessageLoop::QuitWhenIdleClosure());
+ loop_or_proxy->PostTask(
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
}
}
@@ -61,7 +60,7 @@ class MockAudioInputControllerEventHandler
public:
MockAudioInputControllerEventHandler() = default;
- void OnLog(base::StringPiece) {}
+ void OnLog(base::StringPiece) override {}
MOCK_METHOD1(OnCreated, void(bool initially_muted));
MOCK_METHOD1(OnError, void(AudioInputController::ErrorCode error_code));
@@ -87,10 +86,10 @@ class MockUserInputMonitor : public UserInputMonitor {
public:
MockUserInputMonitor() = default;
- size_t GetKeyPressCount() const { return 0; }
+ uint32_t GetKeyPressCount() const override { return 0; }
- MOCK_METHOD0(StartKeyboardMonitoring, void());
- MOCK_METHOD0(StopKeyboardMonitoring, void());
+ MOCK_METHOD0(EnableKeyPressMonitoring, void());
+ MOCK_METHOD0(DisableKeyPressMonitoring, void());
};
class MockAudioInputStream : public AudioInputStream {
@@ -98,14 +97,15 @@ class MockAudioInputStream : public AudioInputStream {
MockAudioInputStream() {}
~MockAudioInputStream() override {}
- void Start(AudioInputCallback*) {}
- void Stop() {}
- void Close() {}
- double GetMaxVolume() { return kMaxVolume; }
- double GetVolume() { return 0; }
- bool SetAutomaticGainControl(bool) { return false; }
- bool GetAutomaticGainControl() { return false; }
- bool IsMuted() { return false; }
+ void Start(AudioInputCallback*) override {}
+ void Stop() override {}
+ void Close() override {}
+ double GetMaxVolume() override { return kMaxVolume; }
+ double GetVolume() override { return 0; }
+ bool SetAutomaticGainControl(bool) override { return false; }
+ bool GetAutomaticGainControl() override { return false; }
+ bool IsMuted() override { return false; }
+ void SetOutputDeviceForAec(const std::string&) override {}
MOCK_METHOD0(Open, bool());
MOCK_METHOD1(SetVolume, void(double));
@@ -120,7 +120,6 @@ class AudioInputControllerTest : public testing::TestWithParam<bool> {
params_(AudioParameters::AUDIO_FAKE,
kChannelLayout,
kSampleRate,
- kBitsPerSample,
kSamplesPerPacket) {}
~AudioInputControllerTest() override {
@@ -142,7 +141,7 @@ class AudioInputControllerTest : public testing::TestWithParam<bool> {
return;
}
- controller_->Close(base::MessageLoop::QuitWhenIdleClosure());
+ controller_->Close(base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
base::RunLoop().Run();
}
@@ -187,13 +186,13 @@ TEST_P(AudioInputControllerTest, CreateRecordAndClose) {
.Times(AtLeast(10))
.WillRepeatedly(
CheckCountAndPostQuitTask(&count, 10, message_loop_.task_runner()));
- EXPECT_CALL(user_input_monitor_, StartKeyboardMonitoring());
+ EXPECT_CALL(user_input_monitor_, EnableKeyPressMonitoring());
controller_->Record();
// Record and wait until ten Write() callbacks are received.
base::RunLoop().Run();
- EXPECT_CALL(user_input_monitor_, StopKeyboardMonitoring());
+ EXPECT_CALL(user_input_monitor_, DisableKeyPressMonitoring());
EXPECT_CALL(sync_writer_, Close());
CloseAudioController();
}
@@ -214,10 +213,10 @@ TEST_P(AudioInputControllerTest, CloseTwice) {
CreateAudioController();
ASSERT_TRUE(controller_.get());
- EXPECT_CALL(user_input_monitor_, StartKeyboardMonitoring());
+ EXPECT_CALL(user_input_monitor_, EnableKeyPressMonitoring());
controller_->Record();
- EXPECT_CALL(user_input_monitor_, StopKeyboardMonitoring());
+ EXPECT_CALL(user_input_monitor_, DisableKeyPressMonitoring());
EXPECT_CALL(sync_writer_, Close());
CloseAudioController();
diff --git a/chromium/media/audio/audio_input_delegate.h b/chromium/media/audio/audio_input_delegate.h
index 5d678eb0203..203c978966d 100644
--- a/chromium/media/audio/audio_input_delegate.h
+++ b/chromium/media/audio/audio_input_delegate.h
@@ -14,7 +14,7 @@
namespace base {
class CancelableSyncSocket;
-class SharedMemory;
+class ReadOnlySharedMemoryRegion;
} // namespace base
namespace media {
@@ -30,7 +30,7 @@ class MEDIA_EXPORT AudioInputDelegate {
// Called when the underlying stream is ready for recording.
virtual void OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> socket,
bool initially_muted) = 0;
@@ -48,6 +48,8 @@ class MEDIA_EXPORT AudioInputDelegate {
// Stream control:
virtual void OnRecordStream() = 0;
virtual void OnSetVolume(double volume) = 0;
+ virtual void OnSetOutputDeviceForAec(
+ const std::string& raw_output_device_id) = 0;
};
} // namespace media
diff --git a/chromium/media/audio/audio_input_device.cc b/chromium/media/audio/audio_input_device.cc
index d8777a71603..facb465549b 100644
--- a/chromium/media/audio/audio_input_device.cc
+++ b/chromium/media/audio/audio_input_device.cc
@@ -67,7 +67,7 @@ class AudioInputDevice::AudioThreadCallback
private:
const base::TimeTicks start_time_;
- const double bytes_per_ms_;
+ bool no_callbacks_received_;
size_t current_segment_id_;
uint32_t last_buffer_id_;
std::vector<std::unique_ptr<const media::AudioBus>> audio_buses_;
@@ -84,16 +84,11 @@ class AudioInputDevice::AudioThreadCallback
DISALLOW_COPY_AND_ASSIGN(AudioThreadCallback);
};
-AudioInputDevice::AudioInputDevice(
- std::unique_ptr<AudioInputIPC> ipc,
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
- : ScopedTaskRunnerObserver(io_task_runner),
- callback_(NULL),
+AudioInputDevice::AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc)
+ : callback_(nullptr),
ipc_(std::move(ipc)),
state_(IDLE),
- session_id_(0),
- agc_is_enabled_(false),
- stopping_hack_(false) {
+ agc_is_enabled_(false) {
CHECK(ipc_);
// The correctness of the code depends on the relative values assigned in the
@@ -104,44 +99,60 @@ AudioInputDevice::AudioInputDevice(
}
void AudioInputDevice::Initialize(const AudioParameters& params,
- CaptureCallback* callback,
- int session_id) {
- task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&AudioInputDevice::InitializeOnIOThread, this,
- params, callback, session_id));
-}
-
-void AudioInputDevice::InitializeOnIOThread(const AudioParameters& params,
- CaptureCallback* callback,
- int session_id) {
+ CaptureCallback* callback) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(params.IsValid());
DCHECK(!callback_);
- DCHECK_EQ(0, session_id_);
audio_parameters_ = params;
callback_ = callback;
- session_id_ = session_id;
}
void AudioInputDevice::Start() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(callback_) << "Initialize hasn't been called";
TRACE_EVENT0("audio", "AudioInputDevice::Start");
- task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&AudioInputDevice::StartUpOnIOThread, this));
+
+ // Make sure we don't call Start() more than once.
+ if (state_ != IDLE)
+ return;
+
+ state_ = CREATING_STREAM;
+ ipc_->CreateStream(this, audio_parameters_, agc_is_enabled_,
+ kRequestedSharedMemoryCount);
}
void AudioInputDevice::Stop() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::Stop");
- {
- base::AutoLock auto_lock(audio_thread_lock_);
- audio_thread_.reset();
- stopping_hack_ = true;
+ UMA_HISTOGRAM_BOOLEAN(
+ "Media.Audio.Capture.DetectedMissingCallbacks",
+ alive_checker_ ? alive_checker_->DetectedDead() : false);
+
+ UMA_HISTOGRAM_BOOLEAN("Media.Audio.Capture.StreamCallbackError",
+ had_callback_error_);
+
+ // Close the stream, if we haven't already.
+ if (state_ >= CREATING_STREAM) {
+ ipc_->CloseStream();
+ state_ = IDLE;
+ agc_is_enabled_ = false;
}
- task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&AudioInputDevice::ShutDownOnIOThread, this));
+ // We can run into an issue where Stop is called right after
+ // OnStreamCreated is called in cases where Start/Stop are called before we
+ // get the OnStreamCreated callback. To handle that corner case, we call
+ // audio_tread.reset(). In most cases, the thread will already be stopped.
+ //
+ // |alive_checker_| must outlive |audio_callback_|.
+ base::ScopedAllowBlocking allow_blocking;
+ audio_thread_.reset();
+ audio_callback_.reset();
+ alive_checker_.reset();
}
void AudioInputDevice::SetVolume(double volume) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT1("audio", "AudioInputDevice::SetVolume", "volume", volume);
if (volume < 0 || volume > 1.0) {
@@ -149,26 +160,41 @@ void AudioInputDevice::SetVolume(double volume) {
return;
}
- task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&AudioInputDevice::SetVolumeOnIOThread, this, volume));
+ if (state_ >= CREATING_STREAM)
+ ipc_->SetVolume(volume);
}
void AudioInputDevice::SetAutomaticGainControl(bool enabled) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT1("audio", "AudioInputDevice::SetAutomaticGainControl", "enabled",
enabled);
- task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&AudioInputDevice::SetAutomaticGainControlOnIOThread, this,
- enabled));
+ if (state_ >= CREATING_STREAM) {
+ DLOG(WARNING) << "The AGC state can not be modified after starting.";
+ return;
+ }
+
+ // We simply store the new AGC setting here. This value will be used when
+ // a new stream is initialized and by GetAutomaticGainControl().
+ agc_is_enabled_ = enabled;
+}
+
+void AudioInputDevice::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ TRACE_EVENT1("audio", "AudioInputDevice::SetOutputDeviceForAec",
+ "output_device_id", output_device_id);
+
+ output_device_id_for_aec_ = output_device_id;
+ if (state_ > CREATING_STREAM)
+ ipc_->SetOutputDeviceForAec(output_device_id);
}
void AudioInputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
base::SyncSocket::Handle socket_handle,
bool initially_muted) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::OnStreamCreated");
- DCHECK(task_runner()->BelongsToCurrentThread());
DCHECK(base::SharedMemory::IsHandleValid(handle));
#if defined(OS_WIN)
DCHECK(socket_handle);
@@ -180,22 +206,18 @@ void AudioInputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
if (state_ != CREATING_STREAM)
return;
- base::AutoLock auto_lock(audio_thread_lock_);
- // TODO(miu): See TODO in OnStreamCreated method for AudioOutputDevice.
- // Interface changes need to be made; likely, after AudioInputDevice is merged
- // into AudioOutputDevice (http://crbug.com/179597).
- if (stopping_hack_)
- return;
-
DCHECK(!audio_callback_);
DCHECK(!audio_thread_);
if (initially_muted)
callback_->OnCaptureMuted(true);
+ if (output_device_id_for_aec_)
+ ipc_->SetOutputDeviceForAec(*output_device_id_for_aec_);
+
// Set up checker for detecting missing audio data. We pass a callback which
// holds a reference to this. |alive_checker_| is deleted in
-// ShutDownOnIOThread() which we expect to always be called (see comment in
+// Stop() which we expect to always be called (see comment in
// destructor). Suspend/resume notifications are not supported on Linux and
// there's a risk of false positives when suspending. So on Linux we only detect
// missing audio data until the first audio buffer arrives. Note that there's
@@ -231,8 +253,8 @@ void AudioInputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
}
void AudioInputDevice::OnError() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::OnError");
- DCHECK(task_runner()->BelongsToCurrentThread());
// Do nothing if the stream has been closed.
if (state_ < CREATING_STREAM)
@@ -256,15 +278,14 @@ void AudioInputDevice::OnError() {
// TODO(tommi): Add an explicit contract for clearing the callback
// object. Possibly require calling Initialize again or provide
// a callback object via Start() and clear it in Stop().
- base::AutoLock auto_lock_(audio_thread_lock_);
if (audio_thread_)
callback_->OnCaptureError("IPC delegate state error.");
}
}
void AudioInputDevice::OnMuted(bool is_muted) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::OnMuted");
- DCHECK(task_runner()->BelongsToCurrentThread());
// Do nothing if the stream has been closed.
if (state_ < CREATING_STREAM)
@@ -273,8 +294,8 @@ void AudioInputDevice::OnMuted(bool is_muted) {
}
void AudioInputDevice::OnIPCClosed() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("audio", "AudioInputDevice::OnIPCClosed");
- DCHECK(task_runner()->BelongsToCurrentThread());
state_ = IPC_CLOSED;
ipc_.reset();
@@ -283,95 +304,14 @@ void AudioInputDevice::OnIPCClosed() {
AudioInputDevice::~AudioInputDevice() {
#if DCHECK_IS_ON()
// Make sure we've stopped the stream properly before destructing |this|.
- DCHECK(audio_thread_lock_.Try());
DCHECK_LE(state_, IDLE);
DCHECK(!audio_thread_);
DCHECK(!audio_callback_);
DCHECK(!alive_checker_);
- DCHECK(!stopping_hack_);
- audio_thread_lock_.Release();
#endif // DCHECK_IS_ON()
}
-void AudioInputDevice::StartUpOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
- DCHECK(callback_) << "Initialize hasn't been called";
-
- // Make sure we don't call Start() more than once.
- if (state_ != IDLE)
- return;
-
- if (session_id_ <= 0) {
- DLOG(WARNING) << "Invalid session id for the input stream " << session_id_;
- return;
- }
-
- state_ = CREATING_STREAM;
- ipc_->CreateStream(this, session_id_, audio_parameters_,
- agc_is_enabled_, kRequestedSharedMemoryCount);
-}
-
-void AudioInputDevice::ShutDownOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
-
- UMA_HISTOGRAM_BOOLEAN(
- "Media.Audio.Capture.DetectedMissingCallbacks",
- alive_checker_ ? alive_checker_->DetectedDead() : false);
-
- UMA_HISTOGRAM_BOOLEAN("Media.Audio.Capture.StreamCallbackError",
- had_callback_error_);
-
- // Close the stream, if we haven't already.
- if (state_ >= CREATING_STREAM) {
- ipc_->CloseStream();
- state_ = IDLE;
- agc_is_enabled_ = false;
- }
-
- // We can run into an issue where ShutDownOnIOThread is called right after
- // OnStreamCreated is called in cases where Start/Stop are called before we
- // get the OnStreamCreated callback. To handle that corner case, we call
- // Stop(). In most cases, the thread will already be stopped.
- //
- // Another situation is when the IO thread goes away before Stop() is called
- // in which case, we cannot use the message loop to close the thread handle
- // and can't not rely on the main thread existing either.
- //
- // |alive_checker_| must outlive |audio_callback_|.
- base::AutoLock auto_lock_(audio_thread_lock_);
- base::ThreadRestrictions::ScopedAllowIO allow_io;
- audio_thread_.reset();
- audio_callback_.reset();
- alive_checker_.reset();
- stopping_hack_ = false;
-}
-
-void AudioInputDevice::SetVolumeOnIOThread(double volume) {
- DCHECK(task_runner()->BelongsToCurrentThread());
- if (state_ >= CREATING_STREAM)
- ipc_->SetVolume(volume);
-}
-
-void AudioInputDevice::SetAutomaticGainControlOnIOThread(bool enabled) {
- DCHECK(task_runner()->BelongsToCurrentThread());
-
- if (state_ >= CREATING_STREAM) {
- DLOG(WARNING) << "The AGC state can not be modified after starting.";
- return;
- }
-
- // We simply store the new AGC setting here. This value will be used when
- // a new stream is initialized and by GetAutomaticGainControl().
- agc_is_enabled_ = enabled;
-}
-
-void AudioInputDevice::WillDestroyCurrentMessageLoop() {
- LOG(ERROR) << "IO loop going away before the input device has been stopped";
- ShutDownOnIOThread();
-}
-
void AudioInputDevice::DetectedDeadInputStream() {
- DCHECK(task_runner()->BelongsToCurrentThread());
callback_->OnCaptureError("No audio received from audio capture device.");
}
@@ -389,8 +329,7 @@ AudioInputDevice::AudioThreadCallback::AudioThreadCallback(
ComputeAudioInputBufferSize(audio_parameters, 1u),
total_segments),
start_time_(base::TimeTicks::Now()),
- bytes_per_ms_(static_cast<double>(audio_parameters.GetBytesPerSecond()) /
- base::Time::kMillisecondsPerSecond),
+ no_callbacks_received_(true),
current_segment_id_(0u),
last_buffer_id_(UINT32_MAX),
capture_callback_(capture_callback),
@@ -426,6 +365,13 @@ void AudioInputDevice::AudioThreadCallback::MapSharedMemory() {
void AudioInputDevice::AudioThreadCallback::Process(uint32_t pending_data) {
TRACE_EVENT_BEGIN0("audio", "AudioInputDevice::AudioThreadCallback::Process");
+
+ if (no_callbacks_received_) {
+ UMA_HISTOGRAM_TIMES("Media.Audio.Render.InputDeviceStartTime",
+ base::TimeTicks::Now() - start_time_);
+ no_callbacks_received_ = false;
+ }
+
// The shared memory represents parameters, size of the data buffer and the
// actual data buffer containing audio data. Map the memory into this
// structure and parse out parameters and the data area.
diff --git a/chromium/media/audio/audio_input_device.h b/chromium/media/audio/audio_input_device.h
index 6d3b8474c0d..bc6bfb43022 100644
--- a/chromium/media/audio/audio_input_device.h
+++ b/chromium/media/audio/audio_input_device.h
@@ -3,7 +3,7 @@
// found in the LICENSE file.
// Low-latency audio capturing class utilizing audio input stream provided
-// by a server (browser) process by use of an IPC interface.
+// by a server process by use of an IPC interface.
//
// Relationship of classes:
//
@@ -11,8 +11,8 @@
// ^ ^
// | |
// v IPC v
-// AudioInputRendererHost <-----------> AudioInputIPC
-// ^ (AudioInputMessageFilter)
+// MojoAudioInputStream <-----------> AudioInputIPC
+// ^ (MojoAudioInputIPC)
// |
// v
// AudioInputDeviceManager
@@ -22,30 +22,22 @@
// The AudioInputDevice user registers an AudioInputDevice::CaptureCallback by
// calling Initialize(). The callback will be called with recorded audio from
// the underlying audio layers.
-// The session ID is used by the AudioInputRendererHost to start the device
-// referenced by this ID.
+// The session ID is used by the RenderFrameAudioInputStreamFactory to start
+// the device referenced by this ID.
//
// State sequences:
//
-// Start -> InitializeOnIOThread -> CreateStream ->
+// Start -> CreateStream ->
// <- OnStreamCreated <-
-// -> StartOnIOThread -> PlayStream ->
-//
+// -> RecordStream ->
//
// AudioInputDevice::Capture => low latency audio transport on audio thread =>
-// |
-// Stop --> ShutDownOnIOThread ------> CloseStream -> Close
//
-// This class depends on two threads to function:
+// Stop -> CloseStream -> Close
//
-// 1. An IO thread.
-// This thread is used to asynchronously process Start/Stop etc operations
-// that are available via the public interface. The public methods are
-// asynchronous and simply post a task to the IO thread to actually perform
-// the work.
-// 2. Audio transport thread.
-// Responsible for calling the CaptureCallback and feed audio samples from
-// the server side audio layer using a socket and shared memory.
+// This class depends on the audio transport thread. That thread is responsible
+// for calling the CaptureCallback and feeding it audio samples from the server
+// side audio layer using a socket and shared memory.
//
// Implementation notes:
// - The user must call Stop() before deleting the class instance.
@@ -59,41 +51,32 @@
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/memory/shared_memory.h"
+#include "base/optional.h"
+#include "base/sequence_checker.h"
#include "base/time/time.h"
-#include "base/timer/timer.h"
#include "media/audio/alive_checker.h"
#include "media/audio/audio_device_thread.h"
#include "media/audio/audio_input_ipc.h"
-#include "media/audio/scoped_task_runner_observer.h"
#include "media/base/audio_capturer_source.h"
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
namespace media {
-// TODO(henrika): This class is based on the AudioOutputDevice class and it has
-// many components in common. Investigate potential for re-factoring.
-// See http://crbug.com/179597.
-// TODO(henrika): Add support for event handling (e.g. OnStateChanged,
-// OnCaptureStopped etc.) and ensure that we can deliver these notifications
-// to any clients using this class.
class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
- public AudioInputIPCDelegate,
- public ScopedTaskRunnerObserver {
+ public AudioInputIPCDelegate {
public:
// NOTE: Clients must call Initialize() before using.
- AudioInputDevice(
- std::unique_ptr<AudioInputIPC> ipc,
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
+ AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc);
// AudioCapturerSource implementation.
void Initialize(const AudioParameters& params,
- CaptureCallback* callback,
- int session_id) override;
+ CaptureCallback* callback) override;
void Start() override;
void Stop() override;
void SetVolume(double volume) override;
void SetAutomaticGainControl(bool enabled) override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
friend class base::RefCountedThreadSafe<AudioInputDevice>;
@@ -111,7 +94,6 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
~AudioInputDevice() override;
- // Methods called on IO thread ----------------------------------------------
// AudioInputIPCDelegate implementation.
void OnStreamCreated(base::SharedMemoryHandle handle,
base::SyncSocket::Handle socket_handle,
@@ -120,24 +102,8 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
void OnMuted(bool is_muted) override;
void OnIPCClosed() override;
- // Methods called on IO thread ----------------------------------------------
- // The following methods are tasks posted on the IO thread that needs to
- // be executed on that thread. They interact with AudioInputMessageFilter and
- // sends IPC messages on that thread.
- void InitializeOnIOThread(const AudioParameters& params,
- CaptureCallback* callback,
- int session_id);
- void StartUpOnIOThread();
- void ShutDownOnIOThread();
- void SetVolumeOnIOThread(double volume);
- void SetAutomaticGainControlOnIOThread(bool enabled);
-
- // base::MessageLoop::DestructionObserver implementation for the IO loop.
- // If the IO loop dies before we do, we shut down the audio thread from here.
- void WillDestroyCurrentMessageLoop() override;
-
// This is called by |alive_checker_| if it detects that the input stream is
- // dead. Called on the IO thread.
+ // dead.
void DetectedDeadInputStream();
AudioParameters audio_parameters_;
@@ -145,46 +111,30 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
CaptureCallback* callback_;
// A pointer to the IPC layer that takes care of sending requests over to
- // the AudioInputRendererHost. Only valid when state_ != IPC_CLOSED and must
- // only be accessed on the IO thread.
+ // the stream implementation. Only valid when state_ != IPC_CLOSED.
std::unique_ptr<AudioInputIPC> ipc_;
- // Current state (must only be accessed from the IO thread). See comments for
- // State enum above.
+ // Current state. See comments for State enum above.
State state_;
- // For UMA stats. May only be accessed on the IO thread.
+ // For UMA stats.
bool had_callback_error_ = false;
- // The media session ID used to identify which input device to be started.
- // Only modified in Initialize() and ShutDownOnIOThread().
- int session_id_;
-
// Stores the Automatic Gain Control state. Default is false.
- // Only modified on the IO thread.
bool agc_is_enabled_;
- // In order to avoid a race between OnStreamCreated and Stop(), we use this
- // guard to control stopping and starting the audio thread.
- base::Lock audio_thread_lock_;
-
// Checks regularly that the input stream is alive and notifies us if it
- // isn't by calling DetectedDeadInputStream(). Created and deleted on the IO
- // thread. Must outlive |audio_callback_|.
+ // isn't by calling DetectedDeadInputStream(). Must outlive |audio_callback_|.
std::unique_ptr<AliveChecker> alive_checker_;
- // Created and deleted on the IO thread, with the exception of in Stop(),
- // where |audio_thread_| is reset (see comment on |audio_thread_lock_| above).
std::unique_ptr<AudioInputDevice::AudioThreadCallback> audio_callback_;
std::unique_ptr<AudioDeviceThread> audio_thread_;
- // Temporary hack to ignore OnStreamCreated() due to the user calling Stop()
- // so we don't start the audio thread pointing to a potentially freed
- // |callback_|.
- //
- // TODO(miu): Replace this by changing AudioCapturerSource to accept the
- // callback via Start(). See http://crbug.com/151051 for details.
- bool stopping_hack_;
+ SEQUENCE_CHECKER(sequence_checker_);
+
+ // Cache the output device used for AEC in case it's called before the stream
+ // is created.
+ base::Optional<std::string> output_device_id_for_aec_;
DISALLOW_IMPLICIT_CONSTRUCTORS(AudioInputDevice);
};
diff --git a/chromium/media/audio/audio_input_device_unittest.cc b/chromium/media/audio/audio_input_device_unittest.cc
index 7861aa3347f..ad5c30e412a 100644
--- a/chromium/media/audio/audio_input_device_unittest.cc
+++ b/chromium/media/audio/audio_input_device_unittest.cc
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/audio/audio_input_device.h"
#include "base/memory/ptr_util.h"
#include "base/memory/shared_memory.h"
#include "base/message_loop/message_loop.h"
@@ -9,7 +10,7 @@
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/sync_socket.h"
-#include "media/audio/audio_input_device.h"
+#include "base/test/scoped_task_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gmock_mutant.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -32,14 +33,14 @@ class MockAudioInputIPC : public AudioInputIPC {
MockAudioInputIPC() = default;
~MockAudioInputIPC() override = default;
- MOCK_METHOD5(CreateStream,
+ MOCK_METHOD4(CreateStream,
void(AudioInputIPCDelegate* delegate,
- int session_id,
const AudioParameters& params,
bool automatic_gain_control,
uint32_t total_segments));
MOCK_METHOD0(RecordStream, void());
MOCK_METHOD1(SetVolume, void(double volume));
+ MOCK_METHOD1(SetOutputDeviceForAec, void(const std::string&));
MOCK_METHOD0(CloseStream, void());
};
@@ -59,12 +60,6 @@ class MockCaptureCallback : public AudioCapturerSource::CaptureCallback {
MOCK_METHOD1(OnCaptureMuted, void(bool is_muted));
};
-// Used to terminate a loop from a different thread than the loop belongs to.
-// |task_runner| should be a SingleThreadTaskRunner.
-ACTION_P(QuitLoop, task_runner) {
- task_runner->PostTask(FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
-}
-
} // namespace.
// Regular construction.
@@ -72,7 +67,7 @@ TEST(AudioInputDeviceTest, Noop) {
base::MessageLoopForIO io_loop;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc), io_loop.task_runner()));
+ new AudioInputDevice(base::WrapUnique(input_ipc)));
}
ACTION_P(ReportStateChange, device) {
@@ -82,22 +77,18 @@ ACTION_P(ReportStateChange, device) {
// Verify that we get an OnCaptureError() callback if CreateStream fails.
TEST(AudioInputDeviceTest, FailToCreateStream) {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 480);
+ CHANNEL_LAYOUT_STEREO, 48000, 480);
- base::MessageLoopForIO io_loop;
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc), io_loop.task_runner()));
- device->Initialize(params, &callback, 1);
- device->Start();
- EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _, _))
+ new AudioInputDevice(base::WrapUnique(input_ipc)));
+ device->Initialize(params, &callback);
+ EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
.WillOnce(ReportStateChange(device.get()));
- EXPECT_CALL(callback, OnCaptureError(_))
- .WillOnce(QuitLoop(io_loop.task_runner()));
- base::RunLoop().Run();
+ EXPECT_CALL(callback, OnCaptureError(_));
+ device->Start();
device->Stop();
- base::RunLoop().RunUntilIdle();
}
ACTION_P3(ReportOnStreamCreated, device, handle, socket) {
@@ -107,7 +98,7 @@ ACTION_P3(ReportOnStreamCreated, device, handle, socket) {
TEST(AudioInputDeviceTest, CreateStream) {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 480);
+ CHANNEL_LAYOUT_STEREO, 48000, 480);
SharedMemory shared_memory;
CancelableSyncSocket browser_socket;
CancelableSyncSocket renderer_socket;
@@ -127,24 +118,23 @@ TEST(AudioInputDeviceTest, CreateStream) {
shared_memory.handle().Duplicate();
ASSERT_TRUE(duplicated_memory_handle.IsValid());
- base::MessageLoopForIO io_loop;
+ base::test::ScopedTaskEnvironment ste;
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(
- new AudioInputDevice(base::WrapUnique(input_ipc), io_loop.task_runner()));
- device->Initialize(params, &callback, 1);
- device->Start();
+ new AudioInputDevice(base::WrapUnique(input_ipc)));
+ device->Initialize(params, &callback);
- EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _, _))
+ EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
.WillOnce(ReportOnStreamCreated(
device.get(), duplicated_memory_handle,
SyncSocket::UnwrapHandle(audio_device_socket_descriptor)));
EXPECT_CALL(*input_ipc, RecordStream());
- EXPECT_CALL(callback, OnCaptureStarted())
- .WillOnce(QuitLoop(io_loop.task_runner()));
- base::RunLoop().Run();
+
+ EXPECT_CALL(callback, OnCaptureStarted());
+ device->Start();
+ EXPECT_CALL(*input_ipc, CloseStream());
device->Stop();
- base::RunLoop().RunUntilIdle();
duplicated_memory_handle.Close();
}
diff --git a/chromium/media/audio/audio_input_ipc.h b/chromium/media/audio/audio_input_ipc.h
index bc3dc272ee9..231b7e08845 100644
--- a/chromium/media/audio/audio_input_ipc.h
+++ b/chromium/media/audio/audio_input_ipc.h
@@ -20,13 +20,8 @@ namespace media {
class MEDIA_EXPORT AudioInputIPCDelegate {
public:
// Called when an AudioInputController has been created.
- // The shared memory |handle| points to a memory section that's used to
- // transfer data between the AudioInputDevice and AudioInputController
- // objects. The implementation of OnStreamCreated takes ownership.
- // The |socket_handle| is used by the AudioInputController to signal
- // notifications that more data is available and can optionally provide
- // parameter changes back. The AudioInputDevice must read from this socket
- // and process the shared memory whenever data is read from the socket.
+ // See media/mojo/interfaces/audio_data_pipe.mojom for documentation of
+ // |handle| and |socket_handle|.
virtual void OnStreamCreated(base::SharedMemoryHandle handle,
base::SyncSocket::Handle socket_handle,
bool initially_muted) = 0;
@@ -61,7 +56,6 @@ class MEDIA_EXPORT AudioInputIPC {
// memory buffer. Once the stream has been created, the implementation will
// notify |delegate| by calling OnStreamCreated().
virtual void CreateStream(AudioInputIPCDelegate* delegate,
- int session_id,
const AudioParameters& params,
bool automatic_gain_control,
uint32_t total_segments) = 0;
@@ -72,6 +66,11 @@ class MEDIA_EXPORT AudioInputIPC {
// Sets the volume of the audio stream.
virtual void SetVolume(double volume) = 0;
+ // Sets the output device from which to cancel echo, if supported. The
+ // |output_device_id| can be gotten from a device enumeration. Must not be
+ // called before the stream has been successfully created.
+ virtual void SetOutputDeviceForAec(const std::string& output_device_id) = 0;
+
// Closes the audio stream, which should shut down the corresponding
// AudioInputController in the peer process.
virtual void CloseStream() = 0;
diff --git a/chromium/media/audio/audio_input_stream_data_interceptor.cc b/chromium/media/audio/audio_input_stream_data_interceptor.cc
index 24c248c1746..b50303851f5 100644
--- a/chromium/media/audio/audio_input_stream_data_interceptor.cc
+++ b/chromium/media/audio/audio_input_stream_data_interceptor.cc
@@ -80,6 +80,12 @@ bool AudioInputStreamDataInterceptor::GetAutomaticGainControl() {
return stream_->GetAutomaticGainControl();
}
+void AudioInputStreamDataInterceptor::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ return stream_->SetOutputDeviceForAec(output_device_id);
+}
+
void AudioInputStreamDataInterceptor::OnData(const AudioBus* source,
base::TimeTicks capture_time,
double volume) {
diff --git a/chromium/media/audio/audio_input_stream_data_interceptor.h b/chromium/media/audio/audio_input_stream_data_interceptor.h
index 42719f4a024..495f3798033 100644
--- a/chromium/media/audio/audio_input_stream_data_interceptor.h
+++ b/chromium/media/audio/audio_input_stream_data_interceptor.h
@@ -46,6 +46,7 @@ class MEDIA_EXPORT AudioInputStreamDataInterceptor
bool IsMuted() override;
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
// Implementation of AudioInputCallback
void OnData(const AudioBus* source,
diff --git a/chromium/media/audio/audio_input_stream_data_interceptor_unittest.cc b/chromium/media/audio/audio_input_stream_data_interceptor_unittest.cc
index 05b8357c965..dee8e9d9b1e 100644
--- a/chromium/media/audio/audio_input_stream_data_interceptor_unittest.cc
+++ b/chromium/media/audio/audio_input_stream_data_interceptor_unittest.cc
@@ -28,7 +28,7 @@ const double kVolume = 0.3456;
class MockStream : public AudioInputStream {
public:
MockStream() = default;
- ~MockStream() = default;
+ ~MockStream() override = default;
MOCK_METHOD0(Open, bool());
MOCK_METHOD1(Start, void(AudioInputStream::AudioInputCallback*));
MOCK_METHOD0(Stop, void());
@@ -39,19 +39,20 @@ class MockStream : public AudioInputStream {
MOCK_METHOD1(SetAutomaticGainControl, bool(bool));
MOCK_METHOD0(GetAutomaticGainControl, bool());
MOCK_METHOD0(IsMuted, bool());
+ MOCK_METHOD1(SetOutputDeviceForAec, void(const std::string&));
};
class MockDebugRecorder : public AudioDebugRecorder {
public:
MockDebugRecorder() = default;
- ~MockDebugRecorder() = default;
+ ~MockDebugRecorder() override = default;
MOCK_METHOD1(OnData, void(const AudioBus* source));
};
class MockCallback : public AudioInputStream::AudioInputCallback {
public:
MockCallback() = default;
- ~MockCallback() = default;
+ ~MockCallback() override = default;
MOCK_METHOD3(OnData, void(const AudioBus*, base::TimeTicks, double));
MOCK_METHOD0(OnError, void());
diff --git a/chromium/media/audio/audio_input_sync_writer.cc b/chromium/media/audio/audio_input_sync_writer.cc
index 55f13c34cf0..cb1484fabf5 100644
--- a/chromium/media/audio/audio_input_sync_writer.cc
+++ b/chromium/media/audio/audio_input_sync_writer.cc
@@ -8,7 +8,6 @@
#include <utility>
#include "base/format_macros.h"
-#include "base/memory/shared_memory.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
@@ -45,33 +44,36 @@ operator=(AudioInputSyncWriter::OverflowData&& other) = default;
AudioInputSyncWriter::AudioInputSyncWriter(
base::RepeatingCallback<void(const std::string&)> log_callback,
- std::unique_ptr<base::SharedMemory> shared_memory,
+ base::MappedReadOnlyRegion shared_memory,
std::unique_ptr<base::CancelableSyncSocket> socket,
uint32_t shared_memory_segment_count,
const AudioParameters& params)
: log_callback_(std::move(log_callback)),
socket_(std::move(socket)),
- shared_memory_(std::move(shared_memory)),
+ shared_memory_region_(std::move(shared_memory.region)),
+ shared_memory_mapping_(std::move(shared_memory.mapping)),
shared_memory_segment_size_(
(CHECK(shared_memory_segment_count > 0),
- shared_memory_->requested_size() / shared_memory_segment_count)),
+ shared_memory_mapping_.size() / shared_memory_segment_count)),
creation_time_(base::TimeTicks::Now()),
audio_bus_memory_size_(AudioBus::CalculateMemorySize(params)) {
// We use CHECKs since this class is used for IPC.
+ DCHECK(log_callback_);
CHECK(socket_);
- CHECK(shared_memory_);
+ CHECK(shared_memory_region_.IsValid());
+ CHECK(shared_memory_mapping_.IsValid());
CHECK_EQ(shared_memory_segment_size_ * shared_memory_segment_count,
- shared_memory_->requested_size());
+ shared_memory_mapping_.size());
CHECK_EQ(shared_memory_segment_size_,
audio_bus_memory_size_ + sizeof(AudioInputBufferParameters));
- DVLOG(1) << "shared memory size: " << shared_memory_->requested_size();
+ DVLOG(1) << "shared memory size: " << shared_memory_mapping_.size();
DVLOG(1) << "shared memory segment count: " << shared_memory_segment_count;
DVLOG(1) << "audio bus memory size: " << audio_bus_memory_size_;
audio_buses_.resize(shared_memory_segment_count);
// Create vector of audio buses by wrapping existing blocks of memory.
- uint8_t* ptr = static_cast<uint8_t*>(shared_memory_->memory());
+ uint8_t* ptr = static_cast<uint8_t*>(shared_memory_mapping_.memory());
CHECK(ptr);
for (auto& bus : audio_buses_) {
CHECK_EQ(0U, reinterpret_cast<uintptr_t>(ptr) &
@@ -142,14 +144,10 @@ std::unique_ptr<AudioInputSyncWriter> AudioInputSyncWriter::Create(
return nullptr;
// Make sure we can share the memory read-only with the client.
- base::SharedMemoryCreateOptions shmem_options;
- shmem_options.size = requested_memory_size.ValueOrDie();
- shmem_options.share_read_only = true;
- auto shared_memory = std::make_unique<base::SharedMemory>();
- if (!shared_memory->Create(shmem_options) ||
- !shared_memory->Map(shmem_options.size)) {
+ auto shared_memory = base::ReadOnlySharedMemoryRegion::Create(
+ requested_memory_size.ValueOrDie());
+ if (!shared_memory.IsValid())
return nullptr;
- }
auto socket = std::make_unique<base::CancelableSyncSocket>();
if (!base::CancelableSyncSocket::CreatePair(socket.get(), foreign_socket)) {
@@ -161,6 +159,12 @@ std::unique_ptr<AudioInputSyncWriter> AudioInputSyncWriter::Create(
shared_memory_segment_count, params);
}
+base::ReadOnlySharedMemoryRegion
+AudioInputSyncWriter::TakeSharedMemoryRegion() {
+ DCHECK(shared_memory_region_.IsValid());
+ return std::move(shared_memory_region_);
+}
+
void AudioInputSyncWriter::Write(const AudioBus* data,
double volume,
bool key_pressed,
@@ -342,7 +346,7 @@ void AudioInputSyncWriter::WriteParametersToCurrentSegment(
base::TimeTicks capture_time) {
TRACE_EVENT1("audio", "WriteParametersToCurrentSegment", "capture time (ms)",
(capture_time - base::TimeTicks()).InMillisecondsF());
- uint8_t* ptr = static_cast<uint8_t*>(shared_memory_->memory());
+ uint8_t* ptr = static_cast<uint8_t*>(shared_memory_mapping_.memory());
CHECK_LT(current_segment_id_, audio_buses_.size());
ptr += current_segment_id_ * shared_memory_segment_size_;
AudioInputBuffer* buffer = reinterpret_cast<AudioInputBuffer*>(ptr);
diff --git a/chromium/media/audio/audio_input_sync_writer.h b/chromium/media/audio/audio_input_sync_writer.h
index 20f1b271c41..5c579c3a402 100644
--- a/chromium/media/audio/audio_input_sync_writer.h
+++ b/chromium/media/audio/audio_input_sync_writer.h
@@ -15,6 +15,7 @@
#include "base/containers/circular_deque.h"
#include "base/gtest_prod_util.h"
#include "base/macros.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/sync_socket.h"
#include "base/time/time.h"
#include "build/build_config.h"
@@ -27,10 +28,6 @@
#include "base/file_descriptor_posix.h"
#endif
-namespace base {
-class SharedMemory;
-}
-
namespace media {
// A AudioInputController::SyncWriter implementation using SyncSocket. This
@@ -48,7 +45,7 @@ class MEDIA_EXPORT AudioInputSyncWriter
// and should be strongly preferred over calling the constructor directly!
AudioInputSyncWriter(
base::RepeatingCallback<void(const std::string&)> log_callback,
- std::unique_ptr<base::SharedMemory> shared_memory,
+ base::MappedReadOnlyRegion shared_memory,
std::unique_ptr<base::CancelableSyncSocket> socket,
uint32_t shared_memory_segment_count,
const AudioParameters& params);
@@ -61,9 +58,9 @@ class MEDIA_EXPORT AudioInputSyncWriter
const AudioParameters& params,
base::CancelableSyncSocket* foreign_socket);
- const base::SharedMemory* shared_memory() const {
- return shared_memory_.get();
- }
+ // Transfers shared memory region ownership to a caller. It shouldn't be
+ // called more than once.
+ base::ReadOnlySharedMemoryRegion TakeSharedMemoryRegion();
size_t shared_memory_segment_count() const { return audio_buses_.size(); }
@@ -111,7 +108,8 @@ class MEDIA_EXPORT AudioInputSyncWriter
const std::unique_ptr<base::CancelableSyncSocket> socket_;
// Shared memory for audio data and associated metadata.
- const std::unique_ptr<base::SharedMemory> shared_memory_;
+ base::ReadOnlySharedMemoryRegion shared_memory_region_;
+ const base::WritableSharedMemoryMapping shared_memory_mapping_;
// The size in bytes of a single audio segment in the shared memory.
const uint32_t shared_memory_segment_size_;
diff --git a/chromium/media/audio/audio_input_sync_writer_unittest.cc b/chromium/media/audio/audio_input_sync_writer_unittest.cc
index 61e01d6ccc8..c887c331c1b 100644
--- a/chromium/media/audio/audio_input_sync_writer_unittest.cc
+++ b/chromium/media/audio/audio_input_sync_writer_unittest.cc
@@ -14,7 +14,7 @@
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/sync_socket.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_task_environment.h"
@@ -107,15 +107,14 @@ class AudioInputSyncWriterTest : public testing::Test {
AudioInputSyncWriterTest() {
const int sampling_frequency_hz = 16000;
const int frames = sampling_frequency_hz / 100; // 10 ms
- const int bits_per_sample = 16;
- const AudioParameters audio_params(
- AudioParameters::AUDIO_FAKE, CHANNEL_LAYOUT_MONO, sampling_frequency_hz,
- bits_per_sample, frames);
+ const AudioParameters audio_params(AudioParameters::AUDIO_FAKE,
+ CHANNEL_LAYOUT_MONO,
+ sampling_frequency_hz, frames);
const uint32_t data_size =
ComputeAudioInputBufferSize(audio_params, kSegments);
- auto shared_memory = std::make_unique<base::SharedMemory>();
- EXPECT_TRUE(shared_memory->CreateAndMapAnonymous(data_size));
+ auto shared_memory = base::ReadOnlySharedMemoryRegion::Create(data_size);
+ EXPECT_TRUE(shared_memory.IsValid());
auto socket = std::make_unique<MockCancelableSyncSocket>(kSegments);
socket_ = socket.get();
diff --git a/chromium/media/audio/audio_io.h b/chromium/media/audio/audio_io.h
index a781f7e6aab..ed22a2944a7 100644
--- a/chromium/media/audio/audio_io.h
+++ b/chromium/media/audio/audio_io.h
@@ -177,6 +177,11 @@ class MEDIA_EXPORT AudioInputStream {
// Returns the current muting state for the microphone.
virtual bool IsMuted() = 0;
+
+ // Sets the output device from which to cancel echo, if echo cancellation is
+ // supported by this stream. E.g. called by WebRTC when it changes playback
+ // devices.
+ virtual void SetOutputDeviceForAec(const std::string& output_device_id) = 0;
};
} // namespace media
diff --git a/chromium/media/audio/audio_low_latency_input_output_unittest.cc b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
index 9ab51dea6f1..cb2100829a5 100644
--- a/chromium/media/audio/audio_low_latency_input_output_unittest.cc
+++ b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
@@ -126,7 +126,7 @@ class FullDuplexAudioSinkSource
// Get complete file path to output file in the directory containing
// media_unittests.exe. Example: src/build/Debug/audio_delay_values_ms.txt.
base::FilePath file_name;
- EXPECT_TRUE(PathService::Get(base::DIR_EXE, &file_name));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_EXE, &file_name));
file_name = file_name.AppendASCII(kDelayValuesFileName);
FILE* text_file = base::OpenFile(file_name, "wt");
@@ -279,15 +279,14 @@ class StreamWrapper {
typedef typename StreamTraits::StreamType StreamType;
explicit StreamWrapper(AudioManager* audio_manager)
- :
- audio_manager_(audio_manager),
+ : audio_manager_(audio_manager),
format_(AudioParameters::AUDIO_PCM_LOW_LATENCY),
#if defined(OS_ANDROID)
- channel_layout_(CHANNEL_LAYOUT_MONO),
+ channel_layout_(CHANNEL_LAYOUT_MONO)
#else
- channel_layout_(CHANNEL_LAYOUT_STEREO),
+ channel_layout_(CHANNEL_LAYOUT_STEREO)
#endif
- bits_per_sample_(16) {
+ {
// Use the preferred sample rate.
const AudioParameters& params =
StreamTraits::GetDefaultAudioStreamParameters(audio_manager_);
@@ -309,15 +308,14 @@ class StreamWrapper {
int channels() const {
return ChannelLayoutToChannelCount(channel_layout_);
}
- int bits_per_sample() const { return bits_per_sample_; }
int sample_rate() const { return sample_rate_; }
int samples_per_packet() const { return samples_per_packet_; }
private:
StreamType* CreateStream() {
- StreamType* stream = StreamTraits::CreateStream(audio_manager_,
- AudioParameters(format_, channel_layout_, sample_rate_,
- bits_per_sample_, samples_per_packet_));
+ StreamType* stream = StreamTraits::CreateStream(
+ audio_manager_, AudioParameters(format_, channel_layout_, sample_rate_,
+ samples_per_packet_));
EXPECT_TRUE(stream);
return stream;
}
@@ -325,7 +323,6 @@ class StreamWrapper {
AudioManager* audio_manager_;
AudioParameters::Format format_;
ChannelLayout channel_layout_;
- int bits_per_sample_;
int sample_rate_;
int samples_per_packet_;
};
@@ -364,8 +361,7 @@ TEST_F(AudioLowLatencyInputOutputTest, DISABLED_FullDuplexDelayMeasurement) {
// buffer sizes for input and output.
if (aisw.sample_rate() != aosw.sample_rate() ||
aisw.samples_per_packet() != aosw.samples_per_packet() ||
- aisw.channels()!= aosw.channels() ||
- aisw.bits_per_sample() != aosw.bits_per_sample()) {
+ aisw.channels() != aosw.channels()) {
LOG(ERROR) << "This test requires symmetric input and output parameters. "
"Ensure that sample rate and number of channels are identical in "
"both directions";
@@ -392,7 +388,7 @@ TEST_F(AudioLowLatencyInputOutputTest, DISABLED_FullDuplexDelayMeasurement) {
// in loop back during this time. At the same time, delay recordings are
// performed and stored in the output text file.
message_loop()->task_runner()->PostDelayedTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure(),
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated(),
TestTimeouts::action_timeout());
base::RunLoop().Run();
diff --git a/chromium/media/audio/audio_manager.cc b/chromium/media/audio/audio_manager.cc
index 0dd98875b14..ee635709921 100644
--- a/chromium/media/audio/audio_manager.cc
+++ b/chromium/media/audio/audio_manager.cc
@@ -13,7 +13,6 @@
#include "base/command_line.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram_macros.h"
#include "base/power_monitor/power_monitor.h"
#include "base/single_thread_task_runner.h"
@@ -286,9 +285,7 @@ std::unique_ptr<AudioManager> AudioManager::Create(
AudioLogFactory* audio_log_factory) {
std::unique_ptr<AudioManager> manager =
CreateAudioManager(std::move(audio_thread), audio_log_factory);
-#if BUILDFLAG(ENABLE_WEBRTC)
manager->InitializeDebugRecording();
-#endif
return manager;
}
diff --git a/chromium/media/audio/audio_manager.h b/chromium/media/audio/audio_manager.h
index 12d9e0c7fa5..6376261d219 100644
--- a/chromium/media/audio/audio_manager.h
+++ b/chromium/media/audio/audio_manager.h
@@ -132,7 +132,6 @@ class MEDIA_EXPORT AudioManager {
// Factory to create audio recording streams.
// |channels| can be 1 or 2.
// |sample_rate| is in hertz and can be any value supported by the platform.
- // |bits_per_sample| can be any value supported by the platform.
// |samples_per_packet| is in hertz as well and can be 0 to |sample_rate|,
// with 0 suggesting that the implementation use a default value for that
// platform.
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index d9dbf264304..f4e67c240ea 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -25,9 +25,7 @@
#include "media/audio/fake_audio_output_stream.h"
#include "media/base/media_switches.h"
-#if BUILDFLAG(ENABLE_WEBRTC)
#include "media/audio/audio_input_stream_data_interceptor.h"
-#endif // BUILDFLAG(ENABLE_WEBRTC)
namespace media {
@@ -41,7 +39,7 @@ const int kDefaultMaxOutputStreams = 16;
// Default maximum number of input streams that can be open simultaneously
// for all platforms.
-const int kDefaultMaxInputStreams = 16;
+const int kMaxInputStreams = 16;
const int kMaxInputChannels = 3;
@@ -108,7 +106,6 @@ AudioManagerBase::AudioManagerBase(std::unique_ptr<AudioThread> audio_thread,
AudioLogFactory* audio_log_factory)
: AudioManager(std::move(audio_thread)),
max_num_output_streams_(kDefaultMaxOutputStreams),
- max_num_input_streams_(kDefaultMaxInputStreams),
num_output_streams_(0),
// TODO(dalecurtis): Switch this to an base::ObserverListThreadSafe, so we
// don't block the UI thread when swapping devices.
@@ -167,26 +164,14 @@ void AudioManagerBase::GetAudioDeviceDescriptions(
}
for (auto& name : device_names) {
- // The |device_name| field as returned by get_device_names() contains a
- // a generic string such as "Default" or "Communications" for the default
- // and communications devices. If the names of the real devices mapped to
- // the default or communications devices were found, append the name of
- // the real devices to the corresponding entries.
- // It is possible that the real names were not found if a new device was
- // plugged in and designated as default/communications device after
- // get_device_names() returns and before get_default_device_id() or
- // get_communications_device_id() is called.
- std::string device_name = std::move(name.device_name);
- if (AudioDeviceDescription::IsDefaultDevice(name.unique_id) &&
- !real_default_name.empty()) {
- device_name += " - " + real_default_name;
- } else if (AudioDeviceDescription::IsCommunicationsDevice(name.unique_id) &&
- !real_communications_name.empty()) {
- device_name += " - " + real_communications_name;
- }
+ if (AudioDeviceDescription::IsDefaultDevice(name.unique_id))
+ name.device_name = real_default_name;
+ else if (AudioDeviceDescription::IsCommunicationsDevice(name.unique_id))
+ name.device_name = real_communications_name;
std::string group_id = (this->*get_group_id)(name.unique_id);
- device_descriptions->emplace_back(
- std::move(device_name), std::move(name.unique_id), std::move(group_id));
+ device_descriptions->emplace_back(std::move(name.device_name),
+ std::move(name.unique_id),
+ std::move(group_id));
}
}
@@ -196,9 +181,14 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStream(
const LogCallback& log_callback) {
CHECK(GetTaskRunner()->BelongsToCurrentThread());
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kFailAudioStreamCreation)) {
+ return nullptr;
+ }
+
if (!params.IsValid()) {
DLOG(ERROR) << "Audio parameters are invalid";
- return NULL;
+ return nullptr;
}
// Limit the number of audio streams opened. This is to prevent using
@@ -210,7 +200,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStream(
<< num_output_streams_
<< " exceed the max allowed number "
<< max_num_output_streams_;
- return NULL;
+ return nullptr;
}
AudioOutputStream* stream;
@@ -231,7 +221,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStream(
stream = FakeAudioOutputStream::MakeFakeStream(this, params);
break;
default:
- stream = NULL;
+ stream = nullptr;
break;
}
@@ -255,17 +245,22 @@ AudioInputStream* AudioManagerBase::MakeAudioInputStream(
const LogCallback& log_callback) {
CHECK(GetTaskRunner()->BelongsToCurrentThread());
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kFailAudioStreamCreation)) {
+ return nullptr;
+ }
+
if (!params.IsValid() || (params.channels() > kMaxInputChannels) ||
device_id.empty()) {
DLOG(ERROR) << "Audio parameters are invalid for device " << device_id;
- return NULL;
+ return nullptr;
}
- if (input_stream_count() >= max_num_input_streams_) {
+ if (input_stream_count() >= kMaxInputStreams) {
DLOG(ERROR) << "Number of opened input audio streams "
<< input_stream_count() << " exceed the max allowed number "
- << max_num_input_streams_;
- return NULL;
+ << kMaxInputStreams;
+ return nullptr;
}
DVLOG(2) << "Creating a new AudioInputStream with buffer size = "
@@ -283,14 +278,13 @@ AudioInputStream* AudioManagerBase::MakeAudioInputStream(
stream = FakeAudioInputStream::MakeFakeStream(this, params);
break;
default:
- stream = NULL;
+ stream = nullptr;
break;
}
if (stream) {
input_streams_.insert(stream);
-#if BUILDFLAG(ENABLE_WEBRTC)
if (!params.IsBitstreamFormat() && debug_recording_manager_) {
// Using unretained for |debug_recording_manager_| is safe since it
// outlives the audio thread, on which streams are operated.
@@ -305,7 +299,6 @@ AudioInputStream* AudioManagerBase::MakeAudioInputStream(
AudioDebugRecordingStreamType::kInput, params),
stream);
}
-#endif // BUILDFLAG(ENABLE_WEBRTC)
}
return stream;
@@ -366,11 +359,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStreamProxy(
// output device based on the input parameters. This may happen if the OS
// provided us junk values for the hardware configuration.
LOG(ERROR) << "Invalid audio output parameters received; using fake "
- << "audio path. Channels: " << output_params.channels() << ", "
- << "Sample Rate: " << output_params.sample_rate() << ", "
- << "Bits Per Sample: " << output_params.bits_per_sample()
- << ", Frames Per Buffer: "
- << output_params.frames_per_buffer();
+ << "audio path: " << output_params.AsHumanReadableString();
// Tell the AudioManager to create a fake output device.
output_params = params;
@@ -544,6 +533,16 @@ std::string AudioManagerBase::GetGroupIDInput(
return GetGroupIDOutput(output_device_id);
}
+void AudioManagerBase::CloseAllInputStreams() {
+ for (auto iter = input_streams_.begin(); iter != input_streams_.end();) {
+ // Note: Closing the stream will invalidate the iterator.
+ // Increment the iterator before closing the stream.
+ AudioInputStream* stream = *iter++;
+ stream->Close();
+ }
+ CHECK(input_streams_.empty());
+}
+
std::string AudioManagerBase::GetDefaultInputDeviceID() {
return std::string();
}
@@ -603,10 +602,4 @@ AudioDebugRecordingManager* AudioManagerBase::GetAudioDebugRecordingManager() {
return debug_recording_manager_.get();
}
-void AudioManagerBase::SetMaxStreamCountForTesting(int max_input,
- int max_output) {
- max_num_output_streams_ = max_output;
- max_num_input_streams_ = max_input;
-}
-
} // namespace media
diff --git a/chromium/media/audio/audio_manager_base.h b/chromium/media/audio/audio_manager_base.h
index e8568d978aa..6dc353903c6 100644
--- a/chromium/media/audio/audio_manager_base.h
+++ b/chromium/media/audio/audio_manager_base.h
@@ -33,6 +33,8 @@ class AudioOutputDispatcher;
// AudioManagerBase provides AudioManager functions common for all platforms.
class MEDIA_EXPORT AudioManagerBase : public AudioManager {
public:
+ enum class VoiceProcessingMode { kDisabled = 0, kEnabled = 1 };
+
~AudioManagerBase() override;
AudioOutputStream* MakeAudioOutputStream(
@@ -55,8 +57,6 @@ class MEDIA_EXPORT AudioManagerBase : public AudioManager {
AudioLogFactory::AudioComponent component,
int component_id) override;
- void SetMaxStreamCountForTesting(int max_input, int max_output) final;
-
// AudioManagerBase:
// Called internally by the audio stream when it has been closed.
@@ -168,6 +168,9 @@ class MEDIA_EXPORT AudioManagerBase : public AudioManager {
virtual std::string GetGroupIDOutput(const std::string& output_device_id);
virtual std::string GetGroupIDInput(const std::string& input_device_id);
+ // Closes all currently open input streams.
+ void CloseAllInputStreams();
+
private:
FRIEND_TEST_ALL_PREFIXES(AudioManagerTest, AudioDebugRecording);
@@ -190,9 +193,6 @@ class MEDIA_EXPORT AudioManagerBase : public AudioManager {
// SetMaxOutputStreamsAllowed().
int max_num_output_streams_;
- // Max number of open input streams.
- int max_num_input_streams_;
-
// Number of currently open output streams.
int num_output_streams_;
diff --git a/chromium/media/audio/audio_manager_unittest.cc b/chromium/media/audio/audio_manager_unittest.cc
index 71db2ec42d3..9cfbb127f2c 100644
--- a/chromium/media/audio/audio_manager_unittest.cc
+++ b/chromium/media/audio/audio_manager_unittest.cc
@@ -218,7 +218,7 @@ class AudioManagerTest : public ::testing::Test {
public:
void HandleDefaultDeviceIDsTest() {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 2048);
+ CHANNEL_LAYOUT_STEREO, 48000, 2048);
// Create a stream with the default device id "".
AudioOutputStream* stream =
@@ -288,9 +288,6 @@ class AudioManagerTest : public ::testing::Test {
AudioDeviceDescriptions::const_iterator it = device_descriptions.begin();
// The first device in the list should always be the default device.
- EXPECT_TRUE(base::StartsWith(
- it->device_name, AudioDeviceDescription::GetDefaultDeviceName(),
- base::CompareCase::SENSITIVE));
EXPECT_EQ(std::string(AudioDeviceDescription::kDefaultDeviceId),
it->unique_id);
++it;
diff --git a/chromium/media/audio/audio_output_controller_unittest.cc b/chromium/media/audio/audio_output_controller_unittest.cc
index f654b07bcca..5847d481396 100644
--- a/chromium/media/audio/audio_output_controller_unittest.cc
+++ b/chromium/media/audio/audio_output_controller_unittest.cc
@@ -46,7 +46,6 @@ using ::testing::Mock;
namespace media {
static const int kSampleRate = AudioParameters::kAudioCDSampleRate;
-static const int kBitsPerSample = 16;
static const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
static const int kSamplesPerPacket = kSampleRate / 1000;
static const double kTestVolume = 0.25;
@@ -54,7 +53,7 @@ static const float kBufferNonZeroData = 1.0f;
AudioParameters AOCTestParams() {
return AudioParameters(AudioParameters::AUDIO_FAKE, kChannelLayout,
- kSampleRate, kBitsPerSample, kSamplesPerPacket);
+ kSampleRate, kSamplesPerPacket);
}
class MockAudioOutputControllerEventHandler
@@ -66,7 +65,7 @@ class MockAudioOutputControllerEventHandler
MOCK_METHOD0(OnControllerPlaying, void());
MOCK_METHOD0(OnControllerPaused, void());
MOCK_METHOD0(OnControllerError, void());
- void OnLog(base::StringPiece) {}
+ void OnLog(base::StringPiece) override {}
private:
DISALLOW_COPY_AND_ASSIGN(MockAudioOutputControllerEventHandler);
diff --git a/chromium/media/audio/audio_output_delegate.h b/chromium/media/audio/audio_output_delegate.h
index 447221c7fcc..8cd95aa9d40 100644
--- a/chromium/media/audio/audio_output_delegate.h
+++ b/chromium/media/audio/audio_output_delegate.h
@@ -13,7 +13,7 @@
#include "media/base/media_export.h"
namespace base {
-class SharedMemory;
+class UnsafeSharedMemoryRegion;
class CancelableSyncSocket;
}
@@ -30,7 +30,7 @@ class MEDIA_EXPORT AudioOutputDelegate {
// Called when the underlying stream is ready for playout.
virtual void OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> socket) = 0;
// Called if stream encounters an error and has become unusable.
diff --git a/chromium/media/audio/audio_output_device.cc b/chromium/media/audio/audio_output_device.cc
index c683bbb367f..575a3455ce1 100644
--- a/chromium/media/audio/audio_output_device.cc
+++ b/chromium/media/audio/audio_output_device.cc
@@ -13,6 +13,7 @@
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
+#include "base/single_thread_task_runner.h"
#include "base/threading/thread_restrictions.h"
#include "base/timer/timer.h"
#include "base/trace_event/trace_event.h"
@@ -43,8 +44,16 @@ class AudioOutputDevice::AudioThreadCallback
// Will always return true if DCHECKs are not enabled.
bool CurrentThreadIsAudioDeviceThread();
+ // Sets |first_play_start_time_| to the current time unless it's already set,
+ // in which case it's a no-op. The first call to this method MUST have
+ // completed by the time we recieve our first Process() callback to avoid
+ // data races.
+ void InitializePlayStartTime();
+
private:
const base::TimeTicks start_time_;
+ // If set, this is used to record the startup duration UMA stat.
+ base::Optional<base::TimeTicks> first_play_start_time_;
AudioRendererSink::RenderCallback* render_callback_;
std::unique_ptr<AudioBus> output_bus_;
uint64_t callback_num_;
@@ -57,39 +66,26 @@ AudioOutputDevice::AudioOutputDevice(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
int session_id,
const std::string& device_id,
- const url::Origin& security_origin,
base::TimeDelta authorization_timeout)
- : ScopedTaskRunnerObserver(io_task_runner),
+ : io_task_runner_(io_task_runner),
callback_(NULL),
ipc_(std::move(ipc)),
state_(IDLE),
- start_on_authorized_(false),
- play_on_start_(true),
session_id_(session_id),
device_id_(device_id),
- security_origin_(security_origin),
stopping_hack_(false),
did_receive_auth_(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED),
output_params_(AudioParameters::UnavailableDeviceParams()),
device_status_(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL),
auth_timeout_(authorization_timeout) {
- CHECK(ipc_);
-
- // The correctness of the code depends on the relative values assigned in the
- // State enum.
- static_assert(IPC_CLOSED < IDLE, "invalid enum value assignment 0");
- static_assert(IDLE < AUTHORIZING, "invalid enum value assignment 1");
- static_assert(AUTHORIZING < AUTHORIZED, "invalid enum value assignment 2");
- static_assert(AUTHORIZED < CREATING_STREAM,
- "invalid enum value assignment 3");
- static_assert(CREATING_STREAM < PAUSED, "invalid enum value assignment 4");
- static_assert(PAUSED < PLAYING, "invalid enum value assignment 5");
+ DCHECK(ipc_);
+ DCHECK(io_task_runner_);
}
void AudioOutputDevice::Initialize(const AudioParameters& params,
RenderCallback* callback) {
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&AudioOutputDevice::InitializeOnIOThread, this,
params, callback));
}
@@ -106,7 +102,7 @@ AudioOutputDevice::~AudioOutputDevice() {
#if DCHECK_IS_ON()
// Make sure we've stopped the stream properly before destructing |this|.
DCHECK(audio_thread_lock_.Try());
- DCHECK_LE(state_, IDLE);
+ DCHECK_EQ(state_, IDLE);
DCHECK(!audio_thread_);
DCHECK(!audio_callback_);
DCHECK(!stopping_hack_);
@@ -116,7 +112,7 @@ AudioOutputDevice::~AudioOutputDevice() {
void AudioOutputDevice::RequestDeviceAuthorization() {
TRACE_EVENT0("audio", "AudioOutputDevice::RequestDeviceAuthorization");
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&AudioOutputDevice::RequestDeviceAuthorizationOnIOThread,
this));
@@ -124,7 +120,7 @@ void AudioOutputDevice::RequestDeviceAuthorization() {
void AudioOutputDevice::Start() {
TRACE_EVENT0("audio", "AudioOutputDevice::Start");
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&AudioOutputDevice::CreateStreamOnIOThread, this));
}
@@ -137,19 +133,19 @@ void AudioOutputDevice::Stop() {
stopping_hack_ = true;
}
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&AudioOutputDevice::ShutDownOnIOThread, this));
}
void AudioOutputDevice::Play() {
TRACE_EVENT0("audio", "AudioOutputDevice::Play");
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&AudioOutputDevice::PlayOnIOThread, this));
}
void AudioOutputDevice::Pause() {
TRACE_EVENT0("audio", "AudioOutputDevice::Pause");
- task_runner()->PostTask(
+ io_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&AudioOutputDevice::PauseOnIOThread, this));
}
@@ -159,14 +155,14 @@ bool AudioOutputDevice::SetVolume(double volume) {
if (volume < 0 || volume > 1.0)
return false;
- return task_runner()->PostTask(
+ return io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&AudioOutputDevice::SetVolumeOnIOThread, this, volume));
}
OutputDeviceInfo AudioOutputDevice::GetOutputDeviceInfo() {
TRACE_EVENT0("audio", "AudioOutputDevice::GetOutputDeviceInfo");
- DCHECK(!task_runner()->BelongsToCurrentThread());
+ DCHECK(!io_task_runner_->BelongsToCurrentThread());
did_receive_auth_.Wait();
return OutputDeviceInfo(AudioDeviceDescription::UseSessionIdToSelectDevice(
@@ -188,13 +184,11 @@ bool AudioOutputDevice::CurrentThreadIsRenderingThread() {
}
void AudioOutputDevice::RequestDeviceAuthorizationOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, IDLE);
- state_ = AUTHORIZING;
- auth_start_time_ = base::TimeTicks::Now();
- ipc_->RequestDeviceAuthorization(this, session_id_, device_id_,
- security_origin_);
+ state_ = AUTHORIZATION_REQUESTED;
+ ipc_->RequestDeviceAuthorization(this, session_id_, device_id_);
if (auth_timeout_ > base::TimeDelta()) {
// Create the timer on the thread it's used on. It's guaranteed to be
@@ -211,77 +205,47 @@ void AudioOutputDevice::RequestDeviceAuthorizationOnIOThread() {
void AudioOutputDevice::CreateStreamOnIOThread() {
TRACE_EVENT0("audio", "AudioOutputDevice::Create");
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
DCHECK(callback_) << "Initialize hasn't been called";
- switch (state_) {
- case IPC_CLOSED:
- // We must make sure to not access |callback_| in case Stop() has already
- // been called.
- NotifyRenderCallbackOfError();
- break;
-
- case IDLE:
- if (did_receive_auth_.IsSignaled() && device_id_.empty() &&
- security_origin_.unique()) {
- state_ = CREATING_STREAM;
- ipc_->CreateStream(this, audio_parameters_);
- } else {
- RequestDeviceAuthorizationOnIOThread();
- start_on_authorized_ = true;
- }
- break;
-
- case AUTHORIZING:
- start_on_authorized_ = true;
- break;
-
- case AUTHORIZED:
- state_ = CREATING_STREAM;
- ipc_->CreateStream(this, audio_parameters_);
- start_on_authorized_ = false;
- break;
-
- case CREATING_STREAM:
- case PAUSED:
- case PLAYING:
- NOTREACHED();
- break;
+ DCHECK_NE(state_, STREAM_CREATION_REQUESTED);
+
+ if (!ipc_) {
+ NotifyRenderCallbackOfError();
+ return;
}
+
+ if (state_ == IDLE && !(did_receive_auth_.IsSignaled() && device_id_.empty()))
+ RequestDeviceAuthorizationOnIOThread();
+
+ ipc_->CreateStream(this, audio_parameters_);
+ // By default, start playing right away.
+ ipc_->PlayStream();
+ state_ = STREAM_CREATION_REQUESTED;
}
void AudioOutputDevice::PlayOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
- if (state_ == PAUSED) {
- TRACE_EVENT_ASYNC_BEGIN0(
- "audio", "StartingPlayback", audio_callback_.get());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
+ if (audio_callback_)
+ audio_callback_->InitializePlayStartTime();
+
+ if (ipc_)
ipc_->PlayStream();
- state_ = PLAYING;
- play_on_start_ = false;
- } else {
- play_on_start_ = true;
- }
}
void AudioOutputDevice::PauseOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
- if (state_ == PLAYING) {
- TRACE_EVENT_ASYNC_END0(
- "audio", "StartingPlayback", audio_callback_.get());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
+
+ if (ipc_)
ipc_->PauseStream();
- state_ = PAUSED;
- }
- play_on_start_ = false;
}
void AudioOutputDevice::ShutDownOnIOThread() {
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
- // Close the stream, if we haven't already.
- if (state_ >= AUTHORIZING) {
+ if (ipc_)
ipc_->CloseStream();
- state_ = IDLE;
- }
- start_on_authorized_ = false;
+
+ state_ = IDLE;
// Destoy the timer on the thread it's used on.
auth_timeout_action_.reset();
@@ -305,18 +269,18 @@ void AudioOutputDevice::ShutDownOnIOThread() {
}
void AudioOutputDevice::SetVolumeOnIOThread(double volume) {
- DCHECK(task_runner()->BelongsToCurrentThread());
- if (state_ >= CREATING_STREAM)
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
+ if (ipc_)
ipc_->SetVolume(volume);
}
void AudioOutputDevice::OnError() {
TRACE_EVENT0("audio", "AudioOutputDevice::OnError");
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
// Do nothing if the stream has been closed.
- if (state_ < CREATING_STREAM)
+ if (state_ == IDLE)
return;
had_callback_error_ = true;
@@ -333,18 +297,12 @@ void AudioOutputDevice::OnDeviceAuthorized(
OutputDeviceStatus device_status,
const media::AudioParameters& output_params,
const std::string& matched_device_id) {
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
auth_timeout_action_.reset();
- // Times over 15 s should be very rare, so we don't lose interesting data by
- // making it the upper limit.
- UMA_HISTOGRAM_CUSTOM_TIMES("Media.Audio.Render.OutputDeviceAuthorizationTime",
- base::TimeTicks::Now() - auth_start_time_,
- base::TimeDelta::FromMilliseconds(1),
- base::TimeDelta::FromSeconds(15), 100);
// Do nothing if late authorization is received after timeout.
- if (state_ == IPC_CLOSED)
+ if (!ipc_)
return;
UMA_HISTOGRAM_BOOLEAN("Media.Audio.Render.OutputDeviceAuthorizationTimedOut",
@@ -352,16 +310,14 @@ void AudioOutputDevice::OnDeviceAuthorized(
LOG_IF(WARNING, device_status == OUTPUT_DEVICE_STATUS_ERROR_TIMED_OUT)
<< "Output device authorization timed out";
- DCHECK_EQ(state_, AUTHORIZING);
-
// It may happen that a second authorization is received as a result to a
// call to Start() after Stop(). If the status for the second authorization
// differs from the first, it will not be reflected in |device_status_|
// to avoid a race.
// This scenario is unlikely. If it occurs, the new value will be
// different from OUTPUT_DEVICE_STATUS_OK, so the AudioOutputDevice
- // will enter the IPC_CLOSED state anyway, which is the safe thing to do.
- // This is preferable to holding a lock.
+ // will enter the |ipc_| == nullptr state anyway, which is the safe thing to
+ // do. This is preferable to holding a lock.
if (!did_receive_auth_.IsSignaled()) {
device_status_ = device_status;
UMA_HISTOGRAM_ENUMERATION("Media.Audio.Render.OutputDeviceStatus",
@@ -371,7 +327,6 @@ void AudioOutputDevice::OnDeviceAuthorized(
if (device_status == OUTPUT_DEVICE_STATUS_OK) {
TRACE_EVENT0("audio", "AudioOutputDevice authorized");
- state_ = AUTHORIZED;
if (!did_receive_auth_.IsSignaled()) {
output_params_ = output_params;
@@ -389,8 +344,6 @@ void AudioOutputDevice::OnDeviceAuthorized(
did_receive_auth_.Signal();
}
- if (start_on_authorized_)
- CreateStreamOnIOThread();
} else {
TRACE_EVENT1("audio", "AudioOutputDevice not authorized", "auth status",
device_status_);
@@ -404,12 +357,12 @@ void AudioOutputDevice::OnDeviceAuthorized(
}
}
-void AudioOutputDevice::OnStreamCreated(
- base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle) {
+void AudioOutputDevice::OnStreamCreated(base::SharedMemoryHandle handle,
+ base::SyncSocket::Handle socket_handle,
+ bool playing_automatically) {
TRACE_EVENT0("audio", "AudioOutputDevice::OnStreamCreated")
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
DCHECK(base::SharedMemory::IsHandleValid(handle));
#if defined(OS_WIN)
DCHECK(socket_handle);
@@ -418,7 +371,7 @@ void AudioOutputDevice::OnStreamCreated(
#endif
DCHECK_GT(handle.GetSize(), 0u);
- if (state_ != CREATING_STREAM)
+ if (state_ != STREAM_CREATION_REQUESTED)
return;
// We can receive OnStreamCreated() on the IO thread after the client has
@@ -443,23 +396,19 @@ void AudioOutputDevice::OnStreamCreated(
audio_callback_.reset(new AudioOutputDevice::AudioThreadCallback(
audio_parameters_, handle, callback_));
+ if (playing_automatically)
+ audio_callback_->InitializePlayStartTime();
audio_thread_.reset(new AudioDeviceThread(
audio_callback_.get(), socket_handle, "AudioOutputDevice"));
- state_ = PAUSED;
-
- // We handle the case where Play() and/or Pause() may have been called
- // multiple times before OnStreamCreated() gets called.
- if (play_on_start_)
- PlayOnIOThread();
}
}
void AudioOutputDevice::OnIPCClosed() {
TRACE_EVENT0("audio", "AudioOutputDevice::OnIPCClosed");
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
- state_ = IPC_CLOSED;
ipc_.reset();
+ state_ = IDLE;
// Signal to unblock any blocked threads waiting for parameters
did_receive_auth_.Signal();
@@ -467,7 +416,7 @@ void AudioOutputDevice::OnIPCClosed() {
void AudioOutputDevice::NotifyRenderCallbackOfError() {
TRACE_EVENT0("audio", "AudioOutputDevice::NotifyRenderCallbackOfError");
- DCHECK(task_runner()->BelongsToCurrentThread());
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(audio_thread_lock_);
// Avoid signaling error if Initialize() hasn't been called yet, or if
@@ -476,11 +425,6 @@ void AudioOutputDevice::NotifyRenderCallbackOfError() {
callback_->OnRenderError();
}
-void AudioOutputDevice::WillDestroyCurrentMessageLoop() {
- LOG(ERROR) << "IO loop going away before the audio device has been stopped";
- ShutDownOnIOThread();
-}
-
// AudioOutputDevice::AudioThreadCallback
AudioOutputDevice::AudioThreadCallback::AudioThreadCallback(
@@ -494,6 +438,7 @@ AudioOutputDevice::AudioThreadCallback::AudioThreadCallback(
ComputeAudioOutputBufferSize(audio_parameters),
/*segment count*/ 1),
start_time_(base::TimeTicks::Now()),
+ first_play_start_time_(base::nullopt),
render_callback_(render_callback),
callback_num_(0) {}
@@ -538,8 +483,13 @@ void AudioOutputDevice::AudioThreadCallback::Process(uint32_t control_signal) {
// When playback starts, we get an immediate callback to Process to make sure
// that we have some data, we'll get another one after the device is awake and
// ingesting data, which is what we want to track with this trace.
- if (callback_num_ == 2)
+ if (callback_num_ == 2) {
+ if (first_play_start_time_) {
+ UMA_HISTOGRAM_TIMES("Media.Audio.Render.OutputDeviceStartTime",
+ base::TimeTicks::Now() - *first_play_start_time_);
+ }
TRACE_EVENT_ASYNC_END0("audio", "StartingPlayback", this);
+ }
// Update the audio-delay measurement, inform about the number of skipped
// frames, and ask client to render audio. Since |output_bus_| is wrapping
@@ -563,4 +513,9 @@ bool AudioOutputDevice::AudioThreadCallback::
return thread_checker_.CalledOnValidThread();
}
+void AudioOutputDevice::AudioThreadCallback::InitializePlayStartTime() {
+ if (!first_play_start_time_.has_value())
+ first_play_start_time_ = base::TimeTicks::Now();
+}
+
} // namespace media
diff --git a/chromium/media/audio/audio_output_device.h b/chromium/media/audio/audio_output_device.h
index 679d7dc366d..43f95ae46f2 100644
--- a/chromium/media/audio/audio_output_device.h
+++ b/chromium/media/audio/audio_output_device.h
@@ -8,10 +8,10 @@
// Relationship of classes.
//
// AudioOutputController AudioOutputDevice
-// ^ ^
-// | |
-// v IPC v
-// AudioRendererHost <---------> AudioOutputIPC (AudioMessageFilter)
+// ^ ^
+// | |
+// v IPC v
+// MojoAudioOutputStream <---------> AudioOutputIPC (MojoAudioOutputIPC)
//
// Transportation of audio samples from the render to the browser process
// is done by using shared memory in combination with a sync socket pair
@@ -72,7 +72,6 @@
#include "base/time/time.h"
#include "media/audio/audio_device_thread.h"
#include "media/audio/audio_output_ipc.h"
-#include "media/audio/scoped_task_runner_observer.h"
#include "media/base/audio_parameters.h"
#include "media/base/audio_renderer_sink.h"
#include "media/base/media_export.h"
@@ -80,13 +79,13 @@
namespace base {
class OneShotTimer;
+class SingleThreadTaskRunner;
}
namespace media {
class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
- public AudioOutputIPCDelegate,
- public ScopedTaskRunnerObserver {
+ public AudioOutputIPCDelegate {
public:
// NOTE: Clients must call Initialize() before using.
AudioOutputDevice(
@@ -94,7 +93,6 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
int session_id,
const std::string& device_id,
- const url::Origin& security_origin,
base::TimeDelta authorization_timeout);
// Request authorization to use the device specified in the constructor.
@@ -119,7 +117,8 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
const media::AudioParameters& output_params,
const std::string& matched_device_id) override;
void OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle) override;
+ base::SyncSocket::Handle socket_handle,
+ bool play_automatically) override;
void OnIPCClosed() override;
protected:
@@ -129,15 +128,12 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
~AudioOutputDevice() override;
private:
- // Note: The ordering of members in this enum is critical to correct behavior!
- enum State {
- IPC_CLOSED, // No more IPCs can take place.
- IDLE, // Not started.
- AUTHORIZING, // Sent device authorization request, waiting for reply.
- AUTHORIZED, // Successful device authorization received.
- CREATING_STREAM, // Waiting for OnStreamCreated() to be called back.
- PAUSED, // Paused. OnStreamCreated() has been called. Can Play()/Stop().
- PLAYING, // Playing back. Can Pause()/Stop().
+ enum StartupState {
+ IDLE, // Authorization not requested.
+ AUTHORIZATION_REQUESTED, // Sent (possibly completed) device
+ // authorization request.
+ STREAM_CREATION_REQUESTED, // Sent (possibly completed) device creation
+ // request. Can Play()/Pause()/Stop().
};
// Methods called on IO thread ----------------------------------------------
@@ -162,31 +158,25 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
void NotifyRenderCallbackOfError();
- // base::MessageLoop::DestructionObserver implementation for the IO loop.
- // If the IO loop dies before we do, we shut down the audio thread from here.
- void WillDestroyCurrentMessageLoop() override;
+ const scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
AudioParameters audio_parameters_;
RenderCallback* callback_;
// A pointer to the IPC layer that takes care of sending requests over to
- // the AudioRendererHost. Only valid when state_ != IPC_CLOSED and must only
- // be accessed on the IO thread.
+ // the implementation. May be set to nullptr after errors.
std::unique_ptr<AudioOutputIPC> ipc_;
// Current state (must only be accessed from the IO thread). See comments for
// State enum above.
- State state_;
-
- // State of Start() calls before OnDeviceAuthorized() is called.
- bool start_on_authorized_;
+ StartupState state_;
// For UMA stats. May only be accessed on the IO thread.
bool had_callback_error_ = false;
- // State of Play() / Pause() calls before OnStreamCreated() is called.
- bool play_on_start_;
+ // Last set volume.
+ double volume_ = 1.0;
// The media session ID used to identify which input device to be started.
// Only used by Unified IO.
@@ -194,7 +184,6 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// ID of hardware output device to be used (provided |session_id_| is zero)
const std::string device_id_;
- const url::Origin security_origin_;
// If |device_id_| is empty and |session_id_| is not, |matched_device_id_| is
// received in OnDeviceAuthorized().
@@ -224,9 +213,6 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
const base::TimeDelta auth_timeout_;
std::unique_ptr<base::OneShotTimer> auth_timeout_action_;
- // Set when authorization starts, for UMA stats.
- base::TimeTicks auth_start_time_;
-
DISALLOW_COPY_AND_ASSIGN(AudioOutputDevice);
};
diff --git a/chromium/media/audio/audio_output_device_unittest.cc b/chromium/media/audio/audio_output_device_unittest.cc
index 6a0aee311c4..1c8dc659447 100644
--- a/chromium/media/audio/audio_output_device_unittest.cc
+++ b/chromium/media/audio/audio_output_device_unittest.cc
@@ -2,30 +2,27 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/audio/audio_output_device.h"
+
#include <stdint.h>
+#include <utility>
#include <vector>
-#include "base/at_exit.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/memory/shared_memory.h"
-#include "base/message_loop/message_loop.h"
-#include "base/process/process_handle.h"
-#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/sync_socket.h"
#include "base/task_runner.h"
-#include "base/test/test_timeouts.h"
-#include "base/threading/thread.h"
+#include "base/test/scoped_task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "media/audio/audio_output_device.h"
-#include "media/base/sample_rates.h"
-#include "media/base/test_helpers.h"
+#include "build/build_config.h"
+#include "media/audio/audio_sync_reader.h"
+#include "mojo/public/cpp/system/platform_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gmock_mutant.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::CancelableSyncSocket;
@@ -35,26 +32,32 @@ using testing::_;
using testing::DoAll;
using testing::Invoke;
using testing::Return;
-using testing::WithArgs;
+using testing::WithArg;
using testing::StrictMock;
-using testing::Values;
+using testing::NiceMock;
+using testing::NotNull;
+using testing::Mock;
namespace media {
namespace {
-const char kDefaultDeviceId[] = "";
-const char kNonDefaultDeviceId[] = "valid-nondefault-device-id";
-const char kUnauthorizedDeviceId[] = "unauthorized-device-id";
-const int kAuthTimeoutForTestingMs = 500;
-const int kOutputDelayMs = 20;
-const uint32_t kBitstreamFrames = 1024;
-const size_t kBitstreamDataSize = 512;
+constexpr char kDefaultDeviceId[] = "";
+constexpr char kNonDefaultDeviceId[] = "valid-nondefault-device-id";
+constexpr char kUnauthorizedDeviceId[] = "unauthorized-device-id";
+constexpr float kAudioData = 0.618;
+constexpr base::TimeDelta kAuthTimeout =
+ base::TimeDelta::FromMilliseconds(10000);
+constexpr base::TimeDelta kDelay = base::TimeDelta::FromMicroseconds(123);
+constexpr int kFramesSkipped = 456;
+constexpr int kFrames = 789;
+constexpr int kBitstreamFrames = 101;
+constexpr size_t kBitstreamDataSize = 512;
class MockRenderCallback : public AudioRendererSink::RenderCallback {
public:
MockRenderCallback() = default;
- virtual ~MockRenderCallback() = default;
+ ~MockRenderCallback() override = default;
MOCK_METHOD4(Render,
int(base::TimeDelta delay,
@@ -64,26 +67,15 @@ class MockRenderCallback : public AudioRendererSink::RenderCallback {
MOCK_METHOD0(OnRenderError, void());
};
-void RenderAudioBus(base::TimeDelta delay,
- base::TimeTicks timestamp,
- int prior_frames_skipped,
- AudioBus* dest) {
- if (dest->is_bitstream_format()) {
- dest->SetBitstreamFrames(kBitstreamFrames);
- dest->SetBitstreamDataSize(kBitstreamDataSize);
- }
-}
-
class MockAudioOutputIPC : public AudioOutputIPC {
public:
MockAudioOutputIPC() = default;
- virtual ~MockAudioOutputIPC() = default;
+ ~MockAudioOutputIPC() override = default;
- MOCK_METHOD4(RequestDeviceAuthorization,
+ MOCK_METHOD3(RequestDeviceAuthorization,
void(AudioOutputIPCDelegate* delegate,
int session_id,
- const std::string& device_id,
- const url::Origin& security_origin));
+ const std::string& device_id));
MOCK_METHOD2(CreateStream,
void(AudioOutputIPCDelegate* delegate,
const AudioParameters& params));
@@ -93,23 +85,23 @@ class MockAudioOutputIPC : public AudioOutputIPC {
MOCK_METHOD1(SetVolume, void(double volume));
};
-ACTION_P2(RequestMoreData, socket, shared_memory) {
- AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory->memory());
- buffer->params.frames_skipped = 0;
- buffer->params.delay_us =
- base::TimeDelta::FromMilliseconds(kOutputDelayMs).InMicroseconds();
- buffer->params.delay_timestamp_us =
- (base::TimeTicks::Now() - base::TimeTicks()).InMicroseconds();
-
- constexpr int kControlSignal = 0;
- socket->Send(&kControlSignal, sizeof(kControlSignal));
-}
-
-// Used to terminate a loop from a different thread than the loop belongs to.
-// |task_runner| should be a SingleThreadTaskRunner.
-ACTION_P(QuitLoop, task_runner) {
- task_runner->PostTask(FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+// Converts a new-style shared memory region to a old-style shared memory
+// handle using a mojo::ScopedSharedBufferHandle that supports both types.
+// TODO(https://crbug.com/844508): get rid of this when AudioOutputDevice shared
+// memory refactor is done.
+base::SharedMemoryHandle ToSharedMemoryHandle(
+ base::UnsafeSharedMemoryRegion region) {
+ mojo::ScopedSharedBufferHandle buffer_handle =
+ mojo::WrapUnsafeSharedMemoryRegion(std::move(region));
+ base::SharedMemoryHandle memory_handle;
+ mojo::UnwrappedSharedMemoryHandleProtection protection;
+ size_t memory_length = 0;
+ auto result = mojo::UnwrapSharedMemoryHandle(
+ std::move(buffer_handle), &memory_handle, &memory_length, &protection);
+ DCHECK_EQ(result, MOJO_RESULT_OK);
+ DCHECK_EQ(protection,
+ mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
+ return memory_handle;
}
} // namespace.
@@ -117,26 +109,19 @@ ACTION_P(QuitLoop, task_runner) {
class AudioOutputDeviceTest : public testing::Test {
public:
AudioOutputDeviceTest();
- ~AudioOutputDeviceTest();
+ ~AudioOutputDeviceTest() override;
- void SetupBitstreamParameters();
void ReceiveAuthorization(OutputDeviceStatus device_status);
void StartAudioDevice();
- void CreateStream();
- void ExpectRenderCallback();
- void WaitUntilRenderCallback();
- void WaitForAudioThreadCallbackProcessCompletion();
+ void CallOnStreamCreated();
void StopAudioDevice();
void CreateDevice(const std::string& device_id);
void SetDevice(const std::string& device_id);
void CheckDeviceStatus(OutputDeviceStatus device_status);
- void VerifyBitstreamFields();
protected:
- // Used to clean up TLS pointers that the test(s) will initialize.
- // Must remain the first member of this class.
- base::ShadowingAtExitManager at_exit_manager_;
- base::MessageLoopForIO io_loop_;
+ base::test::ScopedTaskEnvironment task_env_{
+ base::test::ScopedTaskEnvironment::MainThreadType::MOCK_TIME};
AudioParameters default_audio_parameters_;
StrictMock<MockRenderCallback> callback_;
MockAudioOutputIPC* audio_output_ipc_; // owned by audio_device_
@@ -156,12 +141,12 @@ class AudioOutputDeviceTest : public testing::Test {
AudioOutputDeviceTest::AudioOutputDeviceTest()
: device_status_(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL) {
default_audio_parameters_.Reset(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 1024);
+ CHANNEL_LAYOUT_STEREO, 48000, 1024);
SetDevice(kDefaultDeviceId);
}
AudioOutputDeviceTest::~AudioOutputDeviceTest() {
- audio_device_ = NULL;
+ audio_device_ = nullptr;
}
void AudioOutputDeviceTest::CreateDevice(const std::string& device_id) {
@@ -169,19 +154,18 @@ void AudioOutputDeviceTest::CreateDevice(const std::string& device_id) {
if (audio_device_)
StopAudioDevice();
- audio_output_ipc_ = new MockAudioOutputIPC();
- audio_device_ = new AudioOutputDevice(
- base::WrapUnique(audio_output_ipc_), io_loop_.task_runner(), 0, device_id,
- url::Origin(),
- base::TimeDelta::FromMilliseconds(kAuthTimeoutForTestingMs));
+ audio_output_ipc_ = new NiceMock<MockAudioOutputIPC>();
+ audio_device_ = new AudioOutputDevice(base::WrapUnique(audio_output_ipc_),
+ task_env_.GetMainThreadTaskRunner(), 0,
+ device_id, kAuthTimeout);
}
void AudioOutputDeviceTest::SetDevice(const std::string& device_id) {
CreateDevice(device_id);
EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, device_id, _));
+ RequestDeviceAuthorization(audio_device_.get(), 0, device_id));
audio_device_->RequestDeviceAuthorization();
- base::RunLoop().RunUntilIdle();
+ task_env_.FastForwardBy(base::TimeDelta());
// Simulate response from browser
OutputDeviceStatus device_status =
@@ -195,7 +179,7 @@ void AudioOutputDeviceTest::SetDevice(const std::string& device_id) {
}
void AudioOutputDeviceTest::CheckDeviceStatus(OutputDeviceStatus status) {
- DCHECK(!io_loop_.task_runner()->BelongsToCurrentThread());
+ DCHECK(!task_env_.GetMainThreadTaskRunner()->BelongsToCurrentThread());
EXPECT_EQ(status, audio_device_->GetOutputDeviceInfo().device_status());
}
@@ -206,7 +190,7 @@ void AudioOutputDeviceTest::ReceiveAuthorization(OutputDeviceStatus status) {
audio_device_->OnDeviceAuthorized(device_status_, default_audio_parameters_,
kDefaultDeviceId);
- base::RunLoop().RunUntilIdle();
+ task_env_.FastForwardBy(base::TimeDelta());
}
void AudioOutputDeviceTest::StartAudioDevice() {
@@ -216,10 +200,10 @@ void AudioOutputDeviceTest::StartAudioDevice() {
EXPECT_CALL(callback_, OnRenderError());
audio_device_->Start();
- base::RunLoop().RunUntilIdle();
+ task_env_.FastForwardBy(base::TimeDelta());
}
-void AudioOutputDeviceTest::CreateStream() {
+void AudioOutputDeviceTest::CallOnStreamCreated() {
const uint32_t kMemorySize =
ComputeAudioOutputBufferSize(default_audio_parameters_);
@@ -243,47 +227,9 @@ void AudioOutputDeviceTest::CreateStream() {
// https://crbug.com/640840.
audio_device_->OnStreamCreated(
duplicated_memory_handle,
- SyncSocket::UnwrapHandle(audio_device_socket_descriptor));
- base::RunLoop().RunUntilIdle();
-}
-
-void AudioOutputDeviceTest::ExpectRenderCallback() {
- // We should get a 'play' notification when we call OnStreamCreated().
- // Respond by asking for some audio data. This should ask our callback
- // to provide some audio data that AudioOutputDevice then writes into the
- // shared memory section.
- EXPECT_CALL(*audio_output_ipc_, PlayStream())
- .WillOnce(RequestMoreData(&browser_socket_, &shared_memory_));
-
- // We expect calls to our audio renderer callback, which returns the number
- // of frames written to the memory section.
- // Here's the second place where it gets hacky: There's no way for us to
- // know (without using a sleep loop!) when the AudioOutputDevice has finished
- // writing the interleaved audio data into the shared memory section.
- // So, for the sake of this test, we consider the call to Render a sign
- // of success and quit the loop.
- const int kNumberOfFramesToProcess = 0;
- EXPECT_CALL(
- callback_,
- Render(base::TimeDelta::FromMilliseconds(kOutputDelayMs), _, _, _))
- .WillOnce(DoAll(Invoke(RenderAudioBus), QuitLoop(io_loop_.task_runner()),
- Return(kNumberOfFramesToProcess)));
-}
-
-void AudioOutputDeviceTest::WaitUntilRenderCallback() {
- // Don't hang the test if we never get the Render() callback.
- io_loop_.task_runner()->PostDelayedTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure(),
- TestTimeouts::action_timeout());
- base::RunLoop().Run();
-}
-
-void AudioOutputDeviceTest::WaitForAudioThreadCallbackProcessCompletion() {
- uint32_t buffer_index;
- size_t bytes_read = browser_socket_.ReceiveWithTimeout(
- &buffer_index, sizeof(buffer_index),
- base::TimeDelta::FromMilliseconds(900));
- EXPECT_EQ(bytes_read, sizeof(buffer_index));
+ SyncSocket::UnwrapHandle(audio_device_socket_descriptor),
+ /*playing_automatically*/ false);
+ task_env_.FastForwardBy(base::TimeDelta());
}
void AudioOutputDeviceTest::StopAudioDevice() {
@@ -291,20 +237,7 @@ void AudioOutputDeviceTest::StopAudioDevice() {
EXPECT_CALL(*audio_output_ipc_, CloseStream());
audio_device_->Stop();
- base::RunLoop().RunUntilIdle();
-}
-
-void AudioOutputDeviceTest::SetupBitstreamParameters() {
- default_audio_parameters_.Reset(AudioParameters::AUDIO_BITSTREAM_EAC3,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 1024);
- SetDevice(kNonDefaultDeviceId);
-}
-
-void AudioOutputDeviceTest::VerifyBitstreamFields() {
- AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_.memory());
- EXPECT_EQ(kBitstreamDataSize, buffer->params.bitstream_data_size);
- EXPECT_EQ(kBitstreamFrames, buffer->params.bitstream_frames);
+ task_env_.FastForwardBy(base::TimeDelta());
}
TEST_F(AudioOutputDeviceTest, Initialize) {
@@ -339,26 +272,7 @@ TEST_F(AudioOutputDeviceTest, StopBeforeRender) {
// Expect us to shutdown IPC but not to render anything despite the stream
// getting created.
EXPECT_CALL(*audio_output_ipc_, CloseStream());
- CreateStream();
-}
-
-// Full test with output only.
-TEST_F(AudioOutputDeviceTest, CreateStream) {
- StartAudioDevice();
- ExpectRenderCallback();
- CreateStream();
- WaitUntilRenderCallback();
- StopAudioDevice();
-}
-
-// Full test with output only with nondefault device.
-TEST_F(AudioOutputDeviceTest, NonDefaultCreateStream) {
- SetDevice(kNonDefaultDeviceId);
- StartAudioDevice();
- ExpectRenderCallback();
- CreateStream();
- WaitUntilRenderCallback();
- StopAudioDevice();
+ CallOnStreamCreated();
}
// Multiple start/stop with nondefault device
@@ -368,7 +282,7 @@ TEST_F(AudioOutputDeviceTest, NonDefaultStartStopStartStop) {
StopAudioDevice();
EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, _, _));
+ RequestDeviceAuthorization(audio_device_.get(), 0, _));
StartAudioDevice();
// Simulate reply from browser
ReceiveAuthorization(OUTPUT_DEVICE_STATUS_OK);
@@ -395,18 +309,17 @@ TEST_F(AudioOutputDeviceTest,
TEST_F(AudioOutputDeviceTest, AuthorizationFailsBeforeInitialize_NoError) {
// Clear audio device set by fixture.
StopAudioDevice();
- audio_output_ipc_ = new MockAudioOutputIPC();
- audio_device_ = new AudioOutputDevice(
- base::WrapUnique(audio_output_ipc_), io_loop_.task_runner(), 0,
- kDefaultDeviceId, url::Origin(),
- base::TimeDelta::FromMilliseconds(kAuthTimeoutForTestingMs));
+ audio_output_ipc_ = new NiceMock<MockAudioOutputIPC>();
+ audio_device_ = new AudioOutputDevice(base::WrapUnique(audio_output_ipc_),
+ task_env_.GetMainThreadTaskRunner(), 0,
+ kDefaultDeviceId, kAuthTimeout);
EXPECT_CALL(
*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, kDefaultDeviceId, _));
+ RequestDeviceAuthorization(audio_device_.get(), 0, kDefaultDeviceId));
audio_device_->RequestDeviceAuthorization();
audio_device_->Initialize(default_audio_parameters_, &callback_);
- base::RunLoop().RunUntilIdle();
+ task_env_.FastForwardBy(base::TimeDelta());
audio_device_->Stop();
// We've stopped, so accessing |callback_| isn't ok.
@@ -414,49 +327,225 @@ TEST_F(AudioOutputDeviceTest, AuthorizationFailsBeforeInitialize_NoError) {
audio_device_->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_ERROR_NOT_AUTHORIZED,
default_audio_parameters_,
kDefaultDeviceId);
- base::RunLoop().RunUntilIdle();
+ task_env_.FastForwardBy(base::TimeDelta());
}
TEST_F(AudioOutputDeviceTest, AuthorizationTimedOut) {
- base::Thread thread("DeviceInfo");
- thread.Start();
-
CreateDevice(kNonDefaultDeviceId);
- EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0,
- kNonDefaultDeviceId, _));
+ EXPECT_CALL(
+ *audio_output_ipc_,
+ RequestDeviceAuthorization(audio_device_.get(), 0, kNonDefaultDeviceId));
EXPECT_CALL(*audio_output_ipc_, CloseStream());
// Request authorization; no reply from the browser.
audio_device_->RequestDeviceAuthorization();
- media::WaitableMessageLoopEvent event;
+ // Advance time until we hit the timeout.
+ task_env_.FastForwardUntilNoTasksRemain();
- // Request device info on another thread.
- thread.task_runner()->PostTaskAndReply(
- FROM_HERE,
- base::Bind(&AudioOutputDeviceTest::CheckDeviceStatus,
- base::Unretained(this), OUTPUT_DEVICE_STATUS_ERROR_TIMED_OUT),
- event.GetClosure());
+ audio_device_->Stop();
+ task_env_.FastForwardBy(base::TimeDelta());
+}
+
+namespace {
- base::RunLoop().RunUntilIdle();
+// This struct collects useful stuff without doing anything magical. It is used
+// below, where the test fixture is too inflexible.
+struct TestEnvironment {
+ explicit TestEnvironment(const AudioParameters& params) {
+ const uint32_t memory_size = ComputeAudioOutputBufferSize(params);
+ auto shared_memory_region =
+ base::UnsafeSharedMemoryRegion::Create(memory_size);
+ auto shared_memory_mapping = shared_memory_region.Map();
+ CHECK(shared_memory_region.IsValid());
+ CHECK(shared_memory_mapping.IsValid());
+ auto browser_socket = std::make_unique<base::CancelableSyncSocket>();
+ CHECK(CancelableSyncSocket::CreatePair(browser_socket.get(),
+ &renderer_socket));
+ reader = std::make_unique<AudioSyncReader>(
+ /*log callback*/ base::DoNothing(), params,
+ std::move(shared_memory_region), std::move(shared_memory_mapping),
+ std::move(browser_socket));
+ time_stamp = base::TimeTicks::Now();
+
+#if defined(OS_FUCHSIA)
+ // Raise the timeout limits to reduce bot flakiness.
+ // Fuchsia's task scheduler suffers from bad jitter on systems running many
+ // tests simultaneously on nested virtualized deployments (e.g. test bots),
+ // leading some read operations to randomly timeout.
+ reader->set_max_wait_timeout_for_test(
+ base::TimeDelta::FromMilliseconds(50));
+#endif
+ }
- // Runs the loop and waits for |thread| to call event's closure.
- event.RunAndWait();
+ base::CancelableSyncSocket renderer_socket;
+ StrictMock<MockRenderCallback> callback;
+ std::unique_ptr<AudioSyncReader> reader;
+ base::TimeTicks time_stamp;
+};
- audio_device_->Stop();
- base::RunLoop().RunUntilIdle();
+} // namespace
+
+TEST_F(AudioOutputDeviceTest, VerifyDataFlow) {
+ // The test fixture isn't used in this test, but we still have to clean up
+ // after it.
+ StopAudioDevice();
+
+ auto params = AudioParameters::UnavailableDeviceParams();
+ params.set_frames_per_buffer(kFrames);
+ ASSERT_EQ(2, params.channels());
+ TestEnvironment env(params);
+ auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
+ auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
+ base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(), 0,
+ kDefaultDeviceId, kAuthTimeout);
+
+ // Start a stream.
+ audio_device->RequestDeviceAuthorization();
+ audio_device->Initialize(params, &env.callback);
+ audio_device->Start();
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ kDefaultDeviceId));
+ EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _));
+ EXPECT_CALL(*ipc, PlayStream());
+ task_env_.RunUntilIdle();
+ Mock::VerifyAndClear(ipc);
+ audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
+ kDefaultDeviceId);
+ audio_device->OnStreamCreated(
+ ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
+ env.renderer_socket.Release(), /*playing_automatically*/ false);
+
+ task_env_.RunUntilIdle();
+ // At this point, the callback thread should be running. Send some data over
+ // and verify that it's propagated to |env.callback|. Do it a few times.
+ auto test_bus = AudioBus::Create(params);
+ for (int i = 0; i < 10; ++i) {
+ test_bus->Zero();
+ EXPECT_CALL(env.callback,
+ Render(kDelay, env.time_stamp, kFramesSkipped, NotNull()))
+ .WillOnce(WithArg<3>(Invoke([](AudioBus* renderer_bus) -> int {
+ // Place some test data in the bus so that we can check that it was
+ // copied to the browser side.
+ std::fill_n(renderer_bus->channel(0), renderer_bus->frames(),
+ kAudioData);
+ std::fill_n(renderer_bus->channel(1), renderer_bus->frames(),
+ kAudioData);
+ return renderer_bus->frames();
+ })));
+ env.reader->RequestMoreData(kDelay, env.time_stamp, kFramesSkipped);
+ env.reader->Read(test_bus.get());
+
+ Mock::VerifyAndClear(&env.callback);
+ for (int i = 0; i < kFrames; ++i) {
+ EXPECT_EQ(kAudioData, test_bus->channel(0)[i]);
+ EXPECT_EQ(kAudioData, test_bus->channel(1)[i]);
+ }
+ }
+
+ audio_device->Stop();
+ EXPECT_CALL(*ipc, CloseStream());
+ task_env_.RunUntilIdle();
}
-TEST_F(AudioOutputDeviceTest, BitstreamFormatTest) {
- SetupBitstreamParameters();
- StartAudioDevice();
- ExpectRenderCallback();
- CreateStream();
- WaitUntilRenderCallback();
- WaitForAudioThreadCallbackProcessCompletion();
- VerifyBitstreamFields();
+TEST_F(AudioOutputDeviceTest, CreateNondefaultDevice) {
+ // The test fixture isn't used in this test, but we still have to clean up
+ // after it.
StopAudioDevice();
+
+ auto params = AudioParameters::UnavailableDeviceParams();
+ params.set_frames_per_buffer(kFrames);
+ ASSERT_EQ(2, params.channels());
+ TestEnvironment env(params);
+ auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
+ auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
+ base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(), 0,
+ kNonDefaultDeviceId, kAuthTimeout);
+
+ audio_device->RequestDeviceAuthorization();
+ audio_device->Initialize(params, &env.callback);
+ audio_device->Start();
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ kNonDefaultDeviceId));
+ EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _));
+ EXPECT_CALL(*ipc, PlayStream());
+ task_env_.RunUntilIdle();
+ Mock::VerifyAndClear(ipc);
+ audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
+ kNonDefaultDeviceId);
+ audio_device->OnStreamCreated(
+ ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
+ env.renderer_socket.Release(), /*playing_automatically*/ false);
+
+ audio_device->Stop();
+ EXPECT_CALL(*ipc, CloseStream());
+ task_env_.RunUntilIdle();
+}
+
+TEST_F(AudioOutputDeviceTest, CreateBitStreamStream) {
+ // The test fixture isn't used in this test, but we still have to clean up
+ // after it.
+ StopAudioDevice();
+
+ const int kAudioParameterFrames = 4321;
+ AudioParameters params(AudioParameters::AUDIO_BITSTREAM_EAC3,
+ CHANNEL_LAYOUT_STEREO, 48000, kAudioParameterFrames);
+
+ TestEnvironment env(params);
+ auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
+ auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
+ base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(), 0,
+ kNonDefaultDeviceId, kAuthTimeout);
+
+ // Start a stream.
+ audio_device->RequestDeviceAuthorization();
+ audio_device->Initialize(params, &env.callback);
+ audio_device->Start();
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ kNonDefaultDeviceId));
+ EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _));
+ EXPECT_CALL(*ipc, PlayStream());
+ task_env_.RunUntilIdle();
+ Mock::VerifyAndClear(ipc);
+ audio_device->OnDeviceAuthorized(OUTPUT_DEVICE_STATUS_OK, params,
+ kNonDefaultDeviceId);
+ audio_device->OnStreamCreated(
+ ToSharedMemoryHandle(env.reader->TakeSharedMemoryRegion()),
+ env.renderer_socket.Release(), /*playing_automatically*/ false);
+
+ task_env_.RunUntilIdle();
+ // At this point, the callback thread should be running. Send some data over
+ // and verify that it's propagated to |env.callback|. Do it a few times.
+ auto test_bus = AudioBus::Create(params);
+ for (int i = 0; i < 10; ++i) {
+ test_bus->Zero();
+ EXPECT_CALL(env.callback,
+ Render(kDelay, env.time_stamp, kFramesSkipped, NotNull()))
+ .WillOnce(WithArg<3>(Invoke([](AudioBus* renderer_bus) -> int {
+ EXPECT_TRUE(renderer_bus->is_bitstream_format());
+ // Place some test data in the bus so that we can check that it was
+ // copied to the browser side.
+ std::fill_n(renderer_bus->channel(0),
+ kBitstreamDataSize / sizeof(float), kAudioData);
+ renderer_bus->SetBitstreamFrames(kBitstreamFrames);
+ renderer_bus->SetBitstreamDataSize(kBitstreamDataSize);
+ return renderer_bus->frames();
+ })));
+ env.reader->RequestMoreData(kDelay, env.time_stamp, kFramesSkipped);
+ env.reader->Read(test_bus.get());
+
+ Mock::VerifyAndClear(&env.callback);
+ EXPECT_TRUE(test_bus->is_bitstream_format());
+ EXPECT_EQ(kBitstreamFrames, test_bus->GetBitstreamFrames());
+ EXPECT_EQ(kBitstreamDataSize, test_bus->GetBitstreamDataSize());
+ for (size_t i = 0; i < kBitstreamDataSize / sizeof(float); ++i) {
+ EXPECT_EQ(kAudioData, test_bus->channel(0)[i]);
+ }
+ }
+
+ audio_device->Stop();
+ EXPECT_CALL(*ipc, CloseStream());
+ task_env_.RunUntilIdle();
}
} // namespace media.
diff --git a/chromium/media/audio/audio_output_ipc.h b/chromium/media/audio/audio_output_ipc.h
index 159a089a862..897f7825ad6 100644
--- a/chromium/media/audio/audio_output_ipc.h
+++ b/chromium/media/audio/audio_output_ipc.h
@@ -12,7 +12,6 @@
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
#include "media/base/output_device_info.h"
-#include "url/origin.h"
namespace media {
@@ -32,15 +31,13 @@ class MEDIA_EXPORT AudioOutputIPCDelegate {
const std::string& matched_device_id) = 0;
// Called when an audio stream has been created.
- // The shared memory |handle| points to a memory section that's used to
- // transfer audio buffers from the AudioOutputIPCDelegate back to the
- // AudioRendererHost. The implementation of OnStreamCreated takes ownership.
- // The |socket_handle| is used by AudioRendererHost to signal requests for
- // audio data to be written into the shared memory. The AudioOutputIPCDelegate
- // must read from this socket and provide audio whenever data (search for
- // "pending_bytes") is received.
+ // See media/mojo/interfaces/audio_data_pipe.mojom for documentation of
+ // |handle| and |socket_handle|. |playing_automatically| indicates if the
+ // AudioOutputIPCDelegate is playing right away due to an earlier call to
+ // Play();
virtual void OnStreamCreated(base::SharedMemoryHandle handle,
- base::SyncSocket::Handle socket_handle) = 0;
+ base::SyncSocket::Handle socket_handle,
+ bool playing_automatically) = 0;
// Called when the AudioOutputIPC object is going away and/or when the IPC
// channel has been closed and no more ipc requests can be made.
@@ -72,11 +69,9 @@ class MEDIA_EXPORT AudioOutputIPC {
// the default device.
// Once the authorization process is complete, the implementation will
// notify |delegate| by calling OnDeviceAuthorized().
- virtual void RequestDeviceAuthorization(
- AudioOutputIPCDelegate* delegate,
- int session_id,
- const std::string& device_id,
- const url::Origin& security_origin) = 0;
+ virtual void RequestDeviceAuthorization(AudioOutputIPCDelegate* delegate,
+ int session_id,
+ const std::string& device_id) = 0;
// Sends a request to create an AudioOutputController object in the peer
// process and configures it to use the specified audio |params| including
diff --git a/chromium/media/audio/audio_output_proxy.cc b/chromium/media/audio/audio_output_proxy.cc
index c3551b1096b..f5dd3a12d87 100644
--- a/chromium/media/audio/audio_output_proxy.cc
+++ b/chromium/media/audio/audio_output_proxy.cc
@@ -5,7 +5,6 @@
#include "media/audio/audio_output_proxy.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "media/audio/audio_manager.h"
#include "media/audio/audio_output_dispatcher.h"
diff --git a/chromium/media/audio/audio_output_proxy_unittest.cc b/chromium/media/audio/audio_output_proxy_unittest.cc
index 81d463e6735..ff493b671db 100644
--- a/chromium/media/audio/audio_output_proxy_unittest.cc
+++ b/chromium/media/audio/audio_output_proxy_unittest.cc
@@ -70,17 +70,17 @@ class MockAudioOutputStream : public AudioOutputStream {
FakeAudioOutputStream::MakeFakeStream(manager, params_)) {
}
- void Start(AudioSourceCallback* callback) {
+ void Start(AudioSourceCallback* callback) override {
start_called_ = true;
fake_output_stream_->Start(callback);
}
- void Stop() {
+ void Stop() override {
stop_called_ = true;
fake_output_stream_->Stop();
}
- ~MockAudioOutputStream() = default;
+ ~MockAudioOutputStream() override = default;
bool start_called() { return start_called_; }
bool stop_called() { return stop_called_; }
@@ -171,7 +171,7 @@ class AudioOutputProxyTest : public testing::Test {
// FakeAudioOutputStream will keep the message loop busy indefinitely; i.e.,
// RunUntilIdle() will never terminate.
params_ = AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 8000, 16, 2048);
+ CHANNEL_LAYOUT_STEREO, 8000, 2048);
InitDispatcher(base::TimeDelta::FromMilliseconds(kTestCloseDelayMs));
}
@@ -501,9 +501,8 @@ class AudioOutputResamplerTest : public AudioOutputProxyTest {
// Use a low sample rate and large buffer size when testing otherwise the
// FakeAudioOutputStream will keep the message loop busy indefinitely; i.e.,
// RunUntilIdle() will never terminate.
- resampler_params_ = AudioParameters(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_STEREO,
- 16000, 16, 1024);
+ resampler_params_ = AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO, 16000, 1024);
resampler_ = std::make_unique<AudioOutputResampler>(
&manager(), params_, resampler_params_, std::string(), close_delay,
base::BindRepeating(&RegisterDebugRecording));
diff --git a/chromium/media/audio/audio_output_resampler.cc b/chromium/media/audio/audio_output_resampler.cc
index 7da0eb67f74..bb2e6766b0e 100644
--- a/chromium/media/audio/audio_output_resampler.cc
+++ b/chromium/media/audio/audio_output_resampler.cc
@@ -7,7 +7,9 @@
#include <stdint.h>
#include <algorithm>
+#include <memory>
#include <string>
+#include <utility>
#include "base/bind.h"
#include "base/bind_helpers.h"
@@ -60,10 +62,6 @@ class OnMoreDataConverter
// AudioConverter::InputCallback implementation.
double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override;
- // Ratio of input bytes to output bytes used to correct playback delay with
- // regard to buffering and resampling.
- const double io_ratio_;
-
// Source callback.
AudioOutputStream::AudioSourceCallback* source_callback_;
@@ -96,9 +94,6 @@ namespace {
// Record UMA statistics for hardware output configuration.
static void RecordStats(const AudioParameters& output_params) {
- UMA_HISTOGRAM_EXACT_LINEAR("Media.HardwareAudioBitsPerChannel",
- output_params.bits_per_sample(),
- static_cast<int>(limits::kMaxBitsPerSample));
UMA_HISTOGRAM_ENUMERATION(
"Media.HardwareAudioChannelLayout", output_params.channel_layout(),
CHANNEL_LAYOUT_MAX + 1);
@@ -120,9 +115,6 @@ static void RecordStats(const AudioParameters& output_params) {
// Record UMA statistics for hardware output configuration after fallback.
static void RecordFallbackStats(const AudioParameters& output_params) {
UMA_HISTOGRAM_BOOLEAN("Media.FallbackToHighLatencyAudioPath", true);
- UMA_HISTOGRAM_EXACT_LINEAR("Media.FallbackHardwareAudioBitsPerChannel",
- output_params.bits_per_sample(),
- static_cast<int>(limits::kMaxBitsPerSample));
UMA_HISTOGRAM_ENUMERATION(
"Media.FallbackHardwareAudioChannelLayout",
output_params.channel_layout(), CHANNEL_LAYOUT_MAX + 1);
@@ -211,7 +203,6 @@ AudioParameters GetFallbackOutputParams(
return AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
original_output_params.channel_layout(),
original_output_params.sample_rate(),
- original_output_params.bits_per_sample(),
frames_per_buffer);
}
#endif
@@ -285,7 +276,6 @@ AudioOutputResampler::AudioOutputResampler(
// Record UMA statistics for the hardware configuration.
RecordStats(output_params);
-
}
AudioOutputResampler::~AudioOutputResampler() {
@@ -408,7 +398,7 @@ bool AudioOutputResampler::OpenStream() {
}
// Resetting the malfunctioning dispatcher.
- dispatcher_.reset();
+ Reinitialize();
UMA_HISTOGRAM_ENUMERATION("Media.AudioOutputResampler.OpenLowLatencyStream",
OPEN_STREAM_FAIL, OPEN_STREAM_MAX + 1);
return false;
@@ -503,9 +493,7 @@ OnMoreDataConverter::OnMoreDataConverter(
const AudioParameters& input_params,
const AudioParameters& output_params,
std::unique_ptr<AudioDebugRecorder> debug_recorder)
- : io_ratio_(static_cast<double>(input_params.GetBytesPerSecond()) /
- output_params.GetBytesPerSecond()),
- source_callback_(nullptr),
+ : source_callback_(nullptr),
input_samples_per_second_(input_params.sample_rate()),
audio_converter_(input_params, output_params, false),
error_occurred_(false),
diff --git a/chromium/media/audio/audio_sync_reader.cc b/chromium/media/audio/audio_sync_reader.cc
index 560eb5c92f3..6d297b36dc1 100644
--- a/chromium/media/audio/audio_sync_reader.cc
+++ b/chromium/media/audio/audio_sync_reader.cc
@@ -12,7 +12,6 @@
#include "base/command_line.h"
#include "base/format_macros.h"
#include "base/memory/ptr_util.h"
-#include "base/memory/shared_memory.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/safe_conversions.h"
#include "base/strings/stringprintf.h"
@@ -44,10 +43,12 @@ namespace media {
AudioSyncReader::AudioSyncReader(
base::RepeatingCallback<void(const std::string&)> log_callback,
const AudioParameters& params,
- std::unique_ptr<base::SharedMemory> shared_memory,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ base::WritableSharedMemoryMapping shared_memory_mapping,
std::unique_ptr<base::CancelableSyncSocket> socket)
: log_callback_(std::move(log_callback)),
- shared_memory_(std::move(shared_memory)),
+ shared_memory_region_(std::move(shared_memory_region)),
+ shared_memory_mapping_(std::move(shared_memory_mapping)),
mute_audio_(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kMuteAudio)),
had_socket_error_(false),
@@ -66,10 +67,10 @@ AudioSyncReader::AudioSyncReader(
maximum_wait_time_(base::TimeDelta::FromMilliseconds(20)),
#endif
buffer_index_(0) {
- DCHECK_EQ(base::checked_cast<uint32_t>(shared_memory_->requested_size()),
+ DCHECK_EQ(base::checked_cast<uint32_t>(shared_memory_mapping_.size()),
ComputeAudioOutputBufferSize(params));
AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_->memory());
+ reinterpret_cast<AudioOutputBuffer*>(shared_memory_mapping_.memory());
output_bus_ = AudioBus::WrapMemory(params, buffer->audio);
output_bus_->Zero();
output_bus_->set_is_bitstream_format(params.IsBitstreamFormat());
@@ -122,19 +123,30 @@ std::unique_ptr<AudioSyncReader> AudioSyncReader::Create(
base::CancelableSyncSocket* foreign_socket) {
base::CheckedNumeric<size_t> memory_size =
ComputeAudioOutputBufferSizeChecked(params);
+ if (!memory_size.IsValid())
+ return nullptr;
- auto shared_memory = std::make_unique<base::SharedMemory>();
- auto socket = std::make_unique<base::CancelableSyncSocket>();
+ auto shared_memory_region =
+ base::UnsafeSharedMemoryRegion::Create(memory_size.ValueOrDie());
+ if (!shared_memory_region.IsValid())
+ return nullptr;
- if (!memory_size.IsValid() ||
- !shared_memory->CreateAndMapAnonymous(memory_size.ValueOrDie()) ||
- !base::CancelableSyncSocket::CreatePair(socket.get(), foreign_socket)) {
+ auto shared_memory_mapping = shared_memory_region.Map();
+ if (!shared_memory_mapping.IsValid())
return nullptr;
- }
- return std::make_unique<AudioSyncReader>(std::move(log_callback), params,
- std::move(shared_memory),
- std::move(socket));
+ auto socket = std::make_unique<base::CancelableSyncSocket>();
+ if (!base::CancelableSyncSocket::CreatePair(socket.get(), foreign_socket))
+ return nullptr;
+
+ return std::make_unique<AudioSyncReader>(
+ std::move(log_callback), params, std::move(shared_memory_region),
+ std::move(shared_memory_mapping), std::move(socket));
+}
+
+base::UnsafeSharedMemoryRegion AudioSyncReader::TakeSharedMemoryRegion() {
+ DCHECK(shared_memory_region_.IsValid());
+ return std::move(shared_memory_region_);
}
// AudioOutputController::SyncReader implementations.
@@ -145,7 +157,7 @@ void AudioSyncReader::RequestMoreData(base::TimeDelta delay,
// bytes might lead to being descheduled. The reading side will zero
// them when consumed.
AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_->memory());
+ reinterpret_cast<AudioOutputBuffer*>(shared_memory_mapping_.memory());
// Increase the number of skipped frames stored in shared memory.
buffer->params.frames_skipped += prior_frames_skipped;
buffer->params.delay_us = delay.InMicroseconds();
@@ -211,7 +223,7 @@ void AudioSyncReader::Read(AudioBus* dest) {
if (output_bus_->is_bitstream_format()) {
// For bitstream formats, we need the real data size and PCM frame count.
AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shared_memory_->memory());
+ reinterpret_cast<AudioOutputBuffer*>(shared_memory_mapping_.memory());
uint32_t data_size = buffer->params.bitstream_data_size;
uint32_t bitstream_frames = buffer->params.bitstream_frames;
// |bitstream_frames| is cast to int below, so it must fit.
diff --git a/chromium/media/audio/audio_sync_reader.h b/chromium/media/audio/audio_sync_reader.h
index a36c4a01cd7..5709b089884 100644
--- a/chromium/media/audio/audio_sync_reader.h
+++ b/chromium/media/audio/audio_sync_reader.h
@@ -13,6 +13,8 @@
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/macros.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/process/process.h"
#include "base/sync_socket.h"
#include "base/time/time.h"
@@ -25,10 +27,6 @@
#include "base/file_descriptor_posix.h"
#endif
-namespace base {
-class SharedMemory;
-}
-
namespace media {
// A AudioOutputController::SyncReader implementation using SyncSocket. This
@@ -42,7 +40,8 @@ class MEDIA_EXPORT AudioSyncReader : public AudioOutputController::SyncReader {
AudioSyncReader(
base::RepeatingCallback<void(const std::string&)> log_callback,
const AudioParameters& params,
- std::unique_ptr<base::SharedMemory> shared_memory,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
+ base::WritableSharedMemoryMapping shared_memory_mapping,
std::unique_ptr<base::CancelableSyncSocket> socket);
~AudioSyncReader() override;
@@ -53,8 +52,12 @@ class MEDIA_EXPORT AudioSyncReader : public AudioOutputController::SyncReader {
const AudioParameters& params,
base::CancelableSyncSocket* foreign_socket);
- const base::SharedMemory* shared_memory() const {
- return shared_memory_.get();
+ // Transfers shared memory region ownership to a caller. It shouldn't be
+ // called more than once.
+ base::UnsafeSharedMemoryRegion TakeSharedMemoryRegion();
+
+ void set_max_wait_timeout_for_test(base::TimeDelta time) {
+ maximum_wait_time_ = time;
}
// AudioOutputController::SyncReader implementations.
@@ -71,7 +74,8 @@ class MEDIA_EXPORT AudioSyncReader : public AudioOutputController::SyncReader {
const base::RepeatingCallback<void(const std::string&)> log_callback_;
- std::unique_ptr<base::SharedMemory> shared_memory_;
+ base::UnsafeSharedMemoryRegion shared_memory_region_;
+ base::WritableSharedMemoryMapping shared_memory_mapping_;
// Mutes all incoming samples. This is used to prevent audible sound
// during automated testing.
@@ -97,7 +101,7 @@ class MEDIA_EXPORT AudioSyncReader : public AudioOutputController::SyncReader {
// The maximum amount of time to wait for data from the renderer. Calculated
// from the parameters given at construction.
- const base::TimeDelta maximum_wait_time_;
+ base::TimeDelta maximum_wait_time_;
// The index of the audio buffer we're expecting to be sent from the renderer;
// used to block with timeout for audio data.
diff --git a/chromium/media/audio/audio_sync_reader_unittest.cc b/chromium/media/audio/audio_sync_reader_unittest.cc
index 14101691d78..7735a5dc1e9 100644
--- a/chromium/media/audio/audio_sync_reader_unittest.cc
+++ b/chromium/media/audio/audio_sync_reader_unittest.cc
@@ -53,19 +53,18 @@ class AudioSyncReaderBitstreamTest : public TestWithParam<OverflowTestCase> {
TEST_P(AudioSyncReaderBitstreamTest, BitstreamBufferOverflow_DoesNotWriteOOB) {
const int kSampleRate = 44100;
- const int kBitsPerSample = 32;
const int kFramesPerBuffer = 1;
AudioParameters params(AudioParameters::AUDIO_BITSTREAM_AC3,
- CHANNEL_LAYOUT_STEREO, kSampleRate, kBitsPerSample,
- kFramesPerBuffer);
+ CHANNEL_LAYOUT_STEREO, kSampleRate, kFramesPerBuffer);
auto socket = std::make_unique<base::CancelableSyncSocket>();
std::unique_ptr<AudioBus> output_bus = AudioBus::Create(params);
std::unique_ptr<AudioSyncReader> reader = AudioSyncReader::Create(
base::BindRepeating(&NoLog), params, socket.get());
- const base::SharedMemory* shmem = reader->shared_memory();
+ const base::WritableSharedMemoryMapping shmem =
+ reader->TakeSharedMemoryRegion().Map();
AudioOutputBuffer* buffer =
- reinterpret_cast<AudioOutputBuffer*>(shmem->memory());
+ reinterpret_cast<AudioOutputBuffer*>(shmem.memory());
reader->RequestMoreData(base::TimeDelta(), base::TimeTicks(), 0);
uint32_t signal;
@@ -80,15 +79,15 @@ TEST_P(AudioSyncReaderBitstreamTest, BitstreamBufferOverflow_DoesNotWriteOOB) {
break;
case kNoOverflow:
buffer->params.bitstream_data_size =
- shmem->mapped_size() - sizeof(AudioOutputBufferParameters);
+ shmem.mapped_size() - sizeof(AudioOutputBufferParameters);
break;
case kOverflowByOne:
buffer->params.bitstream_data_size =
- shmem->mapped_size() - sizeof(AudioOutputBufferParameters) + 1;
+ shmem.mapped_size() - sizeof(AudioOutputBufferParameters) + 1;
break;
case kOverflowByOneThousand:
buffer->params.bitstream_data_size =
- shmem->mapped_size() - sizeof(AudioOutputBufferParameters) + 1000;
+ shmem.mapped_size() - sizeof(AudioOutputBufferParameters) + 1000;
break;
case kOverflowByMax:
buffer->params.bitstream_data_size = std::numeric_limits<decltype(
diff --git a/chromium/media/audio/audio_system.cc b/chromium/media/audio/audio_system.cc
new file mode 100644
index 00000000000..81c58aa4a3a
--- /dev/null
+++ b/chromium/media/audio/audio_system.cc
@@ -0,0 +1,29 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/audio/audio_system.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "media/audio/audio_device_description.h"
+
+namespace media {
+
+// static
+AudioSystem::OnDeviceDescriptionsCallback
+AudioSystem::WrapCallbackWithDeviceNameLocalization(
+ OnDeviceDescriptionsCallback callback) {
+ return base::BindOnce(
+ [](OnDeviceDescriptionsCallback cb,
+ media::AudioDeviceDescriptions descriptions) {
+ media::AudioDeviceDescription::LocalizeDeviceDescriptions(
+ &descriptions);
+ std::move(cb).Run(std::move(descriptions));
+ },
+ std::move(callback));
+}
+
+} // namespace media
diff --git a/chromium/media/audio/audio_system.h b/chromium/media/audio/audio_system.h
index 48b0a924eaf..c01f8d75e84 100644
--- a/chromium/media/audio/audio_system.h
+++ b/chromium/media/audio/audio_system.h
@@ -75,8 +75,14 @@ class MEDIA_EXPORT AudioSystem {
virtual void GetInputDeviceInfo(
const std::string& input_device_id,
OnInputDeviceInfoCallback on_input_device_info_cb) = 0;
+
+ // This function wraps |callback| with a call to
+ // AudioDeviceDescription::LocalizeDeviceDescriptions for convenience. This is
+ // typically used by AudioSystem implementations, not AudioSystem clients.
+ static OnDeviceDescriptionsCallback WrapCallbackWithDeviceNameLocalization(
+ OnDeviceDescriptionsCallback callback);
};
} // namespace media
-#endif // MEDIA_AUDIO_AUDIO_SYSTEM_H_s
+#endif // MEDIA_AUDIO_AUDIO_SYSTEM_H_
diff --git a/chromium/media/audio/audio_system_impl.cc b/chromium/media/audio/audio_system_impl.cc
index a41c183efaf..74718054e4e 100644
--- a/chromium/media/audio/audio_system_impl.cc
+++ b/chromium/media/audio/audio_system_impl.cc
@@ -4,9 +4,12 @@
#include "media/audio/audio_system_impl.h"
+#include <utility>
+
#include "base/memory/ptr_util.h"
#include "base/single_thread_task_runner.h"
#include "base/task_runner_util.h"
+#include "media/audio/audio_device_description.h"
#include "media/audio/audio_manager.h"
#include "media/base/bind_to_current_loop.h"
@@ -144,10 +147,11 @@ void AudioSystemImpl::GetDeviceDescriptions(
OnDeviceDescriptionsCallback on_descriptions_cb) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
audio_manager_->GetTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&GetDeviceDescriptionsOnAudioThread,
- base::Unretained(audio_manager_), for_input,
- MaybeBindToCurrentLoop(std::move(on_descriptions_cb))));
+ FROM_HERE, base::BindOnce(&GetDeviceDescriptionsOnAudioThread,
+ base::Unretained(audio_manager_), for_input,
+ MaybeBindToCurrentLoop(
+ WrapCallbackWithDeviceNameLocalization(
+ std::move(on_descriptions_cb)))));
}
void AudioSystemImpl::GetAssociatedOutputDeviceID(
diff --git a/chromium/media/audio/audio_system_impl.h b/chromium/media/audio/audio_system_impl.h
index ee947d55c60..59fc179bdeb 100644
--- a/chromium/media/audio/audio_system_impl.h
+++ b/chromium/media/audio/audio_system_impl.h
@@ -5,6 +5,10 @@
#ifndef MEDIA_AUDIO_AUDIO_SYSTEM_IMPL_H_
#define MEDIA_AUDIO_AUDIO_SYSTEM_IMPL_H_
+#include <memory>
+#include <string>
+
+#include "base/callback.h"
#include "base/threading/thread_checker.h"
#include "media/audio/audio_system.h"
#include "media/audio/audio_system_helper.h"
diff --git a/chromium/media/audio/audio_system_test_util.h b/chromium/media/audio/audio_system_test_util.h
index 26aaf626ad8..222015311c0 100644
--- a/chromium/media/audio/audio_system_test_util.h
+++ b/chromium/media/audio/audio_system_test_util.h
@@ -8,7 +8,6 @@
#include "base/bind.h"
#include "base/location.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/optional.h"
#include "base/run_loop.h"
#include "base/threading/thread_checker.h"
@@ -100,15 +99,15 @@ class AudioSystemTestTemplate : public T {
T::SetUp();
input_params_ =
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kTelephoneSampleRate, 16,
+ AudioParameters::kTelephoneSampleRate,
AudioParameters::kTelephoneSampleRate / 10);
output_params_ =
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kTelephoneSampleRate, 16,
+ AudioParameters::kTelephoneSampleRate,
AudioParameters::kTelephoneSampleRate / 20);
default_output_params_ =
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kTelephoneSampleRate, 16,
+ AudioParameters::kTelephoneSampleRate,
AudioParameters::kTelephoneSampleRate / 30);
audio_manager()->SetInputStreamParameters(input_params_);
audio_manager()->SetOutputStreamParameters(output_params_);
diff --git a/chromium/media/audio/audio_thread_impl.cc b/chromium/media/audio/audio_thread_impl.cc
index f7c52863279..1a64cc14186 100644
--- a/chromium/media/audio/audio_thread_impl.cc
+++ b/chromium/media/audio/audio_thread_impl.cc
@@ -5,6 +5,7 @@
#include "media/audio/audio_thread_impl.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "build/build_config.h"
namespace media {
diff --git a/chromium/media/audio/cras/audio_manager_cras.cc b/chromium/media/audio/cras/audio_manager_cras.cc
index a4794c44516..be523527018 100644
--- a/chromium/media/audio/cras/audio_manager_cras.cc
+++ b/chromium/media/audio/cras/audio_manager_cras.cc
@@ -179,7 +179,7 @@ AudioParameters AudioManagerCras::GetInputStreamParameters(
// TODO(hshi): Fine-tune audio parameters based on |device_id|. The optimal
// parameters for the loopback stream may differ from the default.
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, kDefaultSampleRate, 16,
+ CHANNEL_LAYOUT_STEREO, kDefaultSampleRate,
buffer_size);
chromeos::AudioDeviceList devices;
GetAudioDevices(&devices);
@@ -316,10 +316,8 @@ AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = kDefaultSampleRate;
int buffer_size = GetDefaultOutputBufferSizePerBoard();
- int bits_per_sample = 16;
if (input_params.IsValid()) {
sample_rate = input_params.sample_rate();
- bits_per_sample = input_params.bits_per_sample();
channel_layout = input_params.channel_layout();
buffer_size =
std::min(static_cast<int>(limits::kMaxAudioBufferSize),
@@ -332,7 +330,7 @@ AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
buffer_size = user_buffer_size;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
}
AudioOutputStream* AudioManagerCras::MakeOutputStream(
@@ -346,21 +344,6 @@ AudioInputStream* AudioManagerCras::MakeInputStream(
return new CrasInputStream(params, this, device_id);
}
-snd_pcm_format_t AudioManagerCras::BitsToFormat(int bits_per_sample) {
- switch (bits_per_sample) {
- case 8:
- return SND_PCM_FORMAT_U8;
- case 16:
- return SND_PCM_FORMAT_S16;
- case 24:
- return SND_PCM_FORMAT_S24;
- case 32:
- return SND_PCM_FORMAT_S32;
- default:
- return SND_PCM_FORMAT_UNKNOWN;
- }
-}
-
bool AudioManagerCras::IsDefault(const std::string& device_id, bool is_input) {
AudioDeviceNames device_names;
GetAudioDeviceNamesImpl(is_input, &device_names);
diff --git a/chromium/media/audio/cras/audio_manager_cras.h b/chromium/media/audio/cras/audio_manager_cras.h
index c3215edbeb4..2bce9b04f30 100644
--- a/chromium/media/audio/cras/audio_manager_cras.h
+++ b/chromium/media/audio/cras/audio_manager_cras.h
@@ -58,8 +58,6 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerBase {
const std::string& device_id,
const LogCallback& log_callback) override;
- static snd_pcm_format_t BitsToFormat(int bits_per_sample);
-
// Checks if |device_id| corresponds to the default device.
// Set |is_input| to true for capture devices, false for output.
bool IsDefault(const std::string& device_id, bool is_input);
diff --git a/chromium/media/audio/cras/cras_input.cc b/chromium/media/audio/cras/cras_input.cc
index d13facf5449..435c9101bd1 100644
--- a/chromium/media/audio/cras/cras_input.cc
+++ b/chromium/media/audio/cras/cras_input.cc
@@ -62,13 +62,6 @@ bool CrasInputStream::Open() {
return false;
}
- snd_pcm_format_t pcm_format =
- AudioManagerCras::BitsToFormat(params_.bits_per_sample());
- if (pcm_format == SND_PCM_FORMAT_UNKNOWN) {
- DLOG(WARNING) << "Unsupported bits/sample: " << params_.bits_per_sample();
- return false;
- }
-
// Create the client and connect to the CRAS server.
if (cras_client_create(&client_) < 0) {
DLOG(WARNING) << "Couldn't create CRAS client.\n";
@@ -161,9 +154,7 @@ void CrasInputStream::Start(AudioInputCallback* callback) {
// Prepare |audio_format| and |stream_params| for the stream we
// will create.
cras_audio_format* audio_format = cras_audio_format_create(
- AudioManagerCras::BitsToFormat(params_.bits_per_sample()),
- params_.sample_rate(),
- params_.channels());
+ SND_PCM_FORMAT_S16, params_.sample_rate(), params_.channels());
if (!audio_format) {
DLOG(WARNING) << "Error setting up audio parameters.";
callback_->OnError();
@@ -300,8 +291,8 @@ void CrasInputStream::ReadAudio(size_t frames,
DCHECK_EQ(base::TimeTicks::GetClock(),
base::TimeTicks::Clock::LINUX_CLOCK_MONOTONIC);
- audio_bus_->FromInterleaved(buffer, audio_bus_->frames(),
- params_.bits_per_sample() / 8);
+ audio_bus_->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<int16_t*>(buffer), audio_bus_->frames());
callback_->OnData(audio_bus_.get(), capture_time, normalized_volume);
}
@@ -346,6 +337,11 @@ bool CrasInputStream::IsMuted() {
return false;
}
+void CrasInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
double CrasInputStream::GetVolumeRatioFromDecibels(double dB) const {
return pow(10, dB / 20.0);
}
diff --git a/chromium/media/audio/cras/cras_input.h b/chromium/media/audio/cras/cras_input.h
index 9ab548ec3f5..e7917e564d7 100644
--- a/chromium/media/audio/cras/cras_input.h
+++ b/chromium/media/audio/cras/cras_input.h
@@ -47,6 +47,7 @@ class MEDIA_EXPORT CrasInputStream : public AgcAudioStream<AudioInputStream> {
void SetVolume(double volume) override;
double GetVolume() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
// Handles requests to get samples from the provided buffer. This will be
diff --git a/chromium/media/audio/cras/cras_input_unittest.cc b/chromium/media/audio/cras/cras_input_unittest.cc
index 8b1db70db6b..acaa675eaca 100644
--- a/chromium/media/audio/cras/cras_input_unittest.cc
+++ b/chromium/media/audio/cras/cras_input_unittest.cc
@@ -90,7 +90,6 @@ class CrasInputStreamTest : public testing::Test {
AudioParameters params(kTestFormat,
layout,
kTestSampleRate,
- kTestBitsPerSample,
samples_per_packet);
return new CrasInputStream(params, mock_manager_.get(), device_id);
}
@@ -121,7 +120,6 @@ class CrasInputStreamTest : public testing::Test {
test_stream->Close();
}
- static const unsigned int kTestBitsPerSample;
static const unsigned int kTestCaptureDurationMs;
static const ChannelLayout kTestChannelLayout;
static const AudioParameters::Format kTestFormat;
@@ -135,7 +133,6 @@ class CrasInputStreamTest : public testing::Test {
DISALLOW_COPY_AND_ASSIGN(CrasInputStreamTest);
};
-const unsigned int CrasInputStreamTest::kTestBitsPerSample = 16;
const unsigned int CrasInputStreamTest::kTestCaptureDurationMs = 250;
const ChannelLayout CrasInputStreamTest::kTestChannelLayout =
CHANNEL_LAYOUT_STEREO;
@@ -156,24 +153,10 @@ TEST_F(CrasInputStreamTest, OpenStereo) {
test_stream->Close();
}
-TEST_F(CrasInputStreamTest, BadBitsPerSample) {
- AudioParameters bad_bps_params(kTestFormat,
- kTestChannelLayout,
- kTestSampleRate,
- kTestBitsPerSample - 1,
- kTestFramesPerPacket);
- CrasInputStream* test_stream =
- new CrasInputStream(bad_bps_params, mock_manager_.get(),
- AudioDeviceDescription::kDefaultDeviceId);
- EXPECT_FALSE(test_stream->Open());
- test_stream->Close();
-}
-
TEST_F(CrasInputStreamTest, BadSampleRate) {
AudioParameters bad_rate_params(kTestFormat,
kTestChannelLayout,
0,
- kTestBitsPerSample,
kTestFramesPerPacket);
CrasInputStream* test_stream =
new CrasInputStream(bad_rate_params, mock_manager_.get(),
@@ -208,7 +191,6 @@ TEST_F(CrasInputStreamTest, CaptureFrames) {
AudioParameters params_mono(kTestFormat,
CHANNEL_LAYOUT_MONO,
rates[i],
- kTestBitsPerSample,
kTestFramesPerPacket);
CaptureSomeFrames(params_mono, kTestCaptureDurationMs);
}
@@ -218,7 +200,6 @@ TEST_F(CrasInputStreamTest, CaptureFrames) {
AudioParameters params_stereo(kTestFormat,
CHANNEL_LAYOUT_STEREO,
rates[i],
- kTestBitsPerSample,
kTestFramesPerPacket);
CaptureSomeFrames(params_stereo, kTestCaptureDurationMs);
}
diff --git a/chromium/media/audio/cras/cras_unified.cc b/chromium/media/audio/cras/cras_unified.cc
index e829561043f..9687e633834 100644
--- a/chromium/media/audio/cras/cras_unified.cc
+++ b/chromium/media/audio/cras/cras_unified.cc
@@ -94,12 +94,6 @@ bool CrasUnifiedStream::Open() {
return false;
}
- if (AudioManagerCras::BitsToFormat(params_.bits_per_sample()) ==
- SND_PCM_FORMAT_UNKNOWN) {
- LOG(WARNING) << "Unsupported pcm format";
- return false;
- }
-
// Create the client and connect to the CRAS server.
if (cras_client_create(&client_)) {
LOG(WARNING) << "Couldn't create CRAS client.\n";
@@ -165,9 +159,7 @@ void CrasUnifiedStream::Start(AudioSourceCallback* callback) {
// Prepare |audio_format| and |stream_params| for the stream we
// will create.
cras_audio_format* audio_format = cras_audio_format_create(
- AudioManagerCras::BitsToFormat(params_.bits_per_sample()),
- params_.sample_rate(),
- params_.channels());
+ SND_PCM_FORMAT_S16, params_.sample_rate(), params_.channels());
if (!audio_format) {
LOG(WARNING) << "Error setting up audio parameters.";
callback->OnError();
@@ -315,8 +307,8 @@ uint32_t CrasUnifiedStream::WriteAudio(size_t frames,
// Note: If this ever changes to output raw float the data must be clipped and
// sanitized since it may come from an untrusted source such as NaCl.
- output_bus_->ToInterleaved(
- frames_filled, bytes_per_frame_ / params_.channels(), buffer);
+ output_bus_->ToInterleaved<SignedInt16SampleTypeTraits>(
+ frames_filled, reinterpret_cast<int16_t*>(buffer));
return frames_filled;
}
diff --git a/chromium/media/audio/cras/cras_unified_unittest.cc b/chromium/media/audio/cras/cras_unified_unittest.cc
index b47b4692d11..486550c80f8 100644
--- a/chromium/media/audio/cras/cras_unified_unittest.cc
+++ b/chromium/media/audio/cras/cras_unified_unittest.cc
@@ -77,7 +77,7 @@ class CrasUnifiedStreamTest : public testing::Test {
CrasUnifiedStream* CreateStream(ChannelLayout layout,
int32_t samples_per_packet) {
AudioParameters params(kTestFormat, layout, kTestSampleRate,
- kTestBitsPerSample, samples_per_packet);
+ samples_per_packet);
return new CrasUnifiedStream(params, mock_manager_.get(),
AudioDeviceDescription::kDefaultDeviceId);
}
@@ -88,7 +88,6 @@ class CrasUnifiedStreamTest : public testing::Test {
static const ChannelLayout kTestChannelLayout;
static const int kTestSampleRate;
- static const int kTestBitsPerSample;
static const AudioParameters::Format kTestFormat;
static const uint32_t kTestFramesPerPacket;
@@ -103,7 +102,6 @@ const ChannelLayout CrasUnifiedStreamTest::kTestChannelLayout =
CHANNEL_LAYOUT_STEREO;
const int CrasUnifiedStreamTest::kTestSampleRate =
AudioParameters::kAudioCDSampleRate;
-const int CrasUnifiedStreamTest::kTestBitsPerSample = 16;
const AudioParameters::Format CrasUnifiedStreamTest::kTestFormat =
AudioParameters::AUDIO_PCM_LINEAR;
const uint32_t CrasUnifiedStreamTest::kTestFramesPerPacket = 1000;
@@ -123,18 +121,9 @@ TEST_F(CrasUnifiedStreamTest, ConstructedState) {
EXPECT_TRUE(test_stream->Open());
test_stream->Close();
- // Bad bits per sample.
- AudioParameters bad_bps_params(kTestFormat, kTestChannelLayout,
- kTestSampleRate, kTestBitsPerSample - 1,
- kTestFramesPerPacket);
- test_stream = new CrasUnifiedStream(bad_bps_params, mock_manager_.get(),
- AudioDeviceDescription::kDefaultDeviceId);
- EXPECT_FALSE(test_stream->Open());
- test_stream->Close();
-
// Bad sample rate.
- AudioParameters bad_rate_params(kTestFormat, kTestChannelLayout,
- 0, kTestBitsPerSample, kTestFramesPerPacket);
+ AudioParameters bad_rate_params(kTestFormat, kTestChannelLayout, 0,
+ kTestFramesPerPacket);
test_stream = new CrasUnifiedStream(bad_rate_params, mock_manager_.get(),
AudioDeviceDescription::kDefaultDeviceId);
EXPECT_FALSE(test_stream->Open());
diff --git a/chromium/media/audio/fake_audio_input_stream.cc b/chromium/media/audio/fake_audio_input_stream.cc
index 969eb3e157f..16b5457f516 100644
--- a/chromium/media/audio/fake_audio_input_stream.cc
+++ b/chromium/media/audio/fake_audio_input_stream.cc
@@ -97,6 +97,11 @@ bool FakeAudioInputStream::GetAutomaticGainControl() {
return false;
}
+void FakeAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
void FakeAudioInputStream::ReadAudioFromSource() {
DCHECK(audio_manager_->GetWorkerTaskRunner()->BelongsToCurrentThread());
DCHECK(callback_);
diff --git a/chromium/media/audio/fake_audio_input_stream.h b/chromium/media/audio/fake_audio_input_stream.h
index 0be1b7f6fbf..61c41428e73 100644
--- a/chromium/media/audio/fake_audio_input_stream.h
+++ b/chromium/media/audio/fake_audio_input_stream.h
@@ -41,6 +41,7 @@ class MEDIA_EXPORT FakeAudioInputStream
bool IsMuted() override;
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
// Generate one beep sound. This method is called by FakeVideoCaptureDevice to
// test audio/video synchronization. This is a static method because
diff --git a/chromium/media/audio/fake_audio_manager.cc b/chromium/media/audio/fake_audio_manager.cc
index 7460a0121cd..cc6ea938ca0 100644
--- a/chromium/media/audio/fake_audio_manager.cc
+++ b/chromium/media/audio/fake_audio_manager.cc
@@ -67,23 +67,21 @@ AudioParameters FakeAudioManager::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = kDefaultSampleRate;
int buffer_size = kDefaultOutputBufferSize;
- int bits_per_sample = 16;
if (input_params.IsValid()) {
sample_rate = input_params.sample_rate();
- bits_per_sample = input_params.bits_per_sample();
channel_layout = input_params.channel_layout();
buffer_size = std::min(input_params.frames_per_buffer(), buffer_size);
}
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
}
AudioParameters FakeAudioManager::GetInputStreamParameters(
const std::string& device_id) {
- return AudioParameters(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_STEREO,
- kDefaultSampleRate, 16, kDefaultInputBufferSize);
+ return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO, kDefaultSampleRate,
+ kDefaultInputBufferSize);
}
} // namespace media
diff --git a/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc b/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
index 05b9f8dd14b..45d7ed093d6 100644
--- a/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
+++ b/chromium/media/audio/fuchsia/audio_manager_fuchsia.cc
@@ -103,7 +103,7 @@ AudioParameters AudioManagerFuchsia::GetPreferredOutputStreamParameters(
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
GuessChannelLayout(device_params.num_channels),
- sample_rate, 24, device_params.buffer_size);
+ sample_rate, device_params.buffer_size);
}
const char* AudioManagerFuchsia::GetName() {
diff --git a/chromium/media/audio/mac/audio_input_mac.cc b/chromium/media/audio/mac/audio_input_mac.cc
index b7b341e78e3..02082d90cc9 100644
--- a/chromium/media/audio/mac/audio_input_mac.cc
+++ b/chromium/media/audio/mac/audio_input_mac.cc
@@ -38,21 +38,24 @@ PCMQueueInAudioInputStream::PCMQueueInAudioInputStream(
audio_bus_(media::AudioBus::Create(params)) {
// We must have a manager.
DCHECK(manager_);
+
+ const SampleFormat kSampleFormat = kSampleFormatS16;
+
// A frame is one sample across all channels. In interleaved audio the per
// frame fields identify the set of n |channels|. In uncompressed audio, a
// packet is always one frame.
format_.mSampleRate = params.sample_rate();
format_.mFormatID = kAudioFormatLinearPCM;
- format_.mFormatFlags = kLinearPCMFormatFlagIsPacked |
- kLinearPCMFormatFlagIsSignedInteger;
- format_.mBitsPerChannel = params.bits_per_sample();
+ format_.mFormatFlags =
+ kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
+ format_.mBitsPerChannel = SampleFormatToBitsPerChannel(kSampleFormat);
format_.mChannelsPerFrame = params.channels();
format_.mFramesPerPacket = 1;
- format_.mBytesPerPacket = (params.bits_per_sample() * params.channels()) / 8;
- format_.mBytesPerFrame = format_.mBytesPerPacket;
+ format_.mBytesPerPacket = format_.mBytesPerFrame =
+ params.GetBytesPerFrame(kSampleFormat);
format_.mReserved = 0;
- buffer_size_bytes_ = params.GetBytesPerBuffer();
+ buffer_size_bytes_ = params.GetBytesPerBuffer(kSampleFormat);
}
PCMQueueInAudioInputStream::~PCMQueueInAudioInputStream() {
@@ -180,6 +183,11 @@ bool PCMQueueInAudioInputStream::GetAutomaticGainControl() {
return false;
}
+void PCMQueueInAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
void PCMQueueInAudioInputStream::HandleError(OSStatus err) {
if (callback_)
callback_->OnError();
@@ -272,8 +280,9 @@ void PCMQueueInAudioInputStream::HandleInputBuffer(
: base::TimeTicks::Now();
uint8_t* audio_data = reinterpret_cast<uint8_t*>(audio_buffer->mAudioData);
- audio_bus_->FromInterleaved(audio_data, audio_bus_->frames(),
- format_.mBitsPerChannel / 8);
+ DCHECK_EQ(format_.mBitsPerChannel, 16u);
+ audio_bus_->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<int16_t*>(audio_data), audio_bus_->frames());
callback_->OnData(audio_bus_.get(), capture_time, 0.0);
last_fill_ = base::TimeTicks::Now();
diff --git a/chromium/media/audio/mac/audio_input_mac.h b/chromium/media/audio/mac/audio_input_mac.h
index 7cc37308a15..f496a286c25 100644
--- a/chromium/media/audio/mac/audio_input_mac.h
+++ b/chromium/media/audio/mac/audio_input_mac.h
@@ -45,6 +45,7 @@ class PCMQueueInAudioInputStream : public AudioInputStream {
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
// Issue the OnError to |callback_|;
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac.cc b/chromium/media/audio/mac/audio_low_latency_input_mac.cc
index 173167b54df..53586b888d8 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac.cc
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac.cc
@@ -12,15 +12,18 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "base/mac/foundation_util.h"
#include "base/mac/mac_logging.h"
#include "base/mac/mac_util.h"
+#include "base/mac/scoped_cftyperef.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
+#include "base/strings/sys_string_conversions.h"
#include "base/sys_info.h"
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
-#include "media/audio/mac/audio_manager_mac.h"
+#include "media/audio/mac/core_audio_util_mac.h"
#include "media/audio/mac/scoped_audio_unit.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_timestamp_helper.h"
@@ -36,6 +39,13 @@ OSStatus AudioDeviceDuck(AudioDeviceID inDevice,
Float32 inRampDuration) __attribute__((weak_import));
}
+void UndoDucking(AudioDeviceID output_device_id) {
+ if (AudioDeviceDuck != nullptr) {
+ // Ramp the volume back up over half a second.
+ AudioDeviceDuck(output_device_id, 1.0, nullptr, 0.5);
+ }
+}
+
} // namespace
namespace media {
@@ -156,6 +166,45 @@ static void AddSystemInfoToUMA(bool is_on_battery, int num_resumes) {
DVLOG(1) << "resume events: " << num_resumes;
}
+// Finds the first subdevice, in an aggregate device, with output streams.
+static AudioDeviceID FindFirstOutputSubdevice(
+ AudioDeviceID aggregate_device_id) {
+ const AudioObjectPropertyAddress property_address = {
+ kAudioAggregateDevicePropertyFullSubDeviceList,
+ kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster};
+ base::ScopedCFTypeRef<CFArrayRef> subdevices;
+ UInt32 size = sizeof(subdevices);
+ OSStatus result = AudioObjectGetPropertyData(
+ aggregate_device_id, &property_address, 0 /* inQualifierDataSize */,
+ nullptr /* inQualifierData */, &size, subdevices.InitializeInto());
+
+ if (result != noErr) {
+ OSSTATUS_LOG(WARNING, result)
+ << "Failed to read property "
+ << kAudioAggregateDevicePropertyFullSubDeviceList << " for device "
+ << aggregate_device_id;
+ return kAudioObjectUnknown;
+ }
+
+ AudioDeviceID output_subdevice_id = kAudioObjectUnknown;
+ DCHECK_EQ(CFGetTypeID(subdevices), CFArrayGetTypeID());
+ const CFIndex count = CFArrayGetCount(subdevices);
+ for (CFIndex i = 0; i != count; ++i) {
+ CFStringRef value =
+ base::mac::CFCast<CFStringRef>(CFArrayGetValueAtIndex(subdevices, i));
+ if (value) {
+ std::string uid = base::SysCFStringRefToUTF8(value);
+ output_subdevice_id = AudioManagerMac::GetAudioDeviceIdByUId(false, uid);
+ if (output_subdevice_id != kAudioObjectUnknown &&
+ core_audio_mac::GetNumStreams(output_subdevice_id, false) > 0) {
+ break;
+ }
+ }
+ }
+
+ return output_subdevice_id;
+}
+
// See "Technical Note TN2091 - Device input using the HAL Output Audio Unit"
// http://developer.apple.com/library/mac/#technotes/tn2091/_index.html
// for more details and background regarding this implementation.
@@ -165,7 +214,7 @@ AUAudioInputStream::AUAudioInputStream(
const AudioParameters& input_params,
AudioDeviceID audio_device_id,
const AudioManager::LogCallback& log_callback,
- VoiceProcessingMode voice_processing_mode)
+ AudioManagerBase::VoiceProcessingMode voice_processing_mode)
: manager_(manager),
input_params_(input_params),
number_of_frames_provided_(0),
@@ -184,7 +233,8 @@ AUAudioInputStream::AUAudioInputStream(
audio_unit_render_has_worked_(false),
noise_reduction_suppressed_(false),
use_voice_processing_(voice_processing_mode ==
- VoiceProcessingMode::ENABLED),
+ AudioManagerBase::VoiceProcessingMode::kEnabled),
+ output_device_id_for_aec_(kAudioObjectUnknown),
last_sample_time_(0.0),
last_number_of_frames_(0),
total_lost_frames_(0),
@@ -195,20 +245,24 @@ AUAudioInputStream::AUAudioInputStream(
CHECK(!log_callback_.Equals(AudioManager::LogCallback()));
if (use_voice_processing_) {
DCHECK(input_params.channels() == 1 || input_params.channels() == 2);
+ const bool got_default_device =
+ AudioManagerMac::GetDefaultOutputDevice(&output_device_id_for_aec_);
+ DCHECK(got_default_device);
}
+ const SampleFormat kSampleFormat = kSampleFormatS16;
+
// Set up the desired (output) format specified by the client.
format_.mSampleRate = input_params.sample_rate();
format_.mFormatID = kAudioFormatLinearPCM;
format_.mFormatFlags =
kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;
DCHECK(FormatIsInterleaved(format_.mFormatFlags));
- format_.mBitsPerChannel = input_params.bits_per_sample();
+ format_.mBitsPerChannel = SampleFormatToBitsPerChannel(kSampleFormat);
format_.mChannelsPerFrame = input_params.channels();
format_.mFramesPerPacket = 1; // uncompressed audio
- format_.mBytesPerPacket =
- (format_.mBitsPerChannel * input_params.channels()) / 8;
- format_.mBytesPerFrame = format_.mBytesPerPacket;
+ format_.mBytesPerPacket = format_.mBytesPerFrame =
+ input_params.GetBytesPerFrame(kSampleFormat);
format_.mReserved = 0;
DVLOG(1) << "ctor";
@@ -484,7 +538,7 @@ bool AUAudioInputStream::OpenVoiceProcessingAU() {
return false;
}
- // Next, set the audio device to be the Audio Unit's current device.
+ // Next, set the audio device to be the Audio Unit's input device.
result =
AudioUnitSetProperty(audio_unit_, kAudioOutputUnitProperty_CurrentDevice,
kAudioUnitScope_Global, AUElement::INPUT,
@@ -495,6 +549,17 @@ bool AUAudioInputStream::OpenVoiceProcessingAU() {
return false;
}
+ // Followed by the audio device to be the Audio Unit's output device.
+ result = AudioUnitSetProperty(
+ audio_unit_, kAudioOutputUnitProperty_CurrentDevice,
+ kAudioUnitScope_Global, AUElement::OUTPUT, &output_device_id_for_aec_,
+ sizeof(output_device_id_for_aec_));
+
+ if (result != noErr) {
+ HandleError(result);
+ return false;
+ }
+
// Register the input procedure for the AUHAL. This procedure will be called
// when the AUHAL has received new data from the input device.
AURenderCallbackStruct callback;
@@ -604,21 +669,7 @@ bool AUAudioInputStream::OpenVoiceProcessingAU() {
return false;
}
- if (AudioDeviceDuck != nullptr) {
- // Undo the ducking.
- // Obtain the AudioDeviceID of the default output AudioDevice.
- const AudioObjectPropertyAddress pa = {
- kAudioHardwarePropertyDefaultOutputDevice,
- kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster};
- AudioDeviceID output_device = 0;
- UInt32 size = sizeof(output_device);
- OSStatus result = AudioObjectGetPropertyData(kAudioObjectSystemObject, &pa,
- 0, 0, &size, &output_device);
- if (result == noErr) {
- // Ramp the volume back up over half a second.
- AudioDeviceDuck(output_device, 1.0, nullptr, 0.5);
- }
- }
+ UndoDucking(output_device_id_for_aec_);
return true;
}
@@ -892,6 +943,80 @@ bool AUAudioInputStream::IsMuted() {
return result == noErr && muted != 0;
}
+void AUAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ if (!use_voice_processing_)
+ return;
+
+ AudioDeviceID audio_device_id =
+ AudioManagerMac::GetAudioDeviceIdByUId(false, output_device_id);
+ if (audio_device_id == output_device_id_for_aec_)
+ return;
+
+ if (audio_device_id == kAudioObjectUnknown) {
+ log_callback_.Run(
+ base::StringPrintf("AU in: Unable to resolve output device id '%s'",
+ output_device_id.c_str()));
+ return;
+ }
+
+ // If the selected device is an aggregate device, try to use the first output
+ // device of the aggregate device instead.
+ if (core_audio_mac::GetDeviceTransportType(audio_device_id) ==
+ kAudioDeviceTransportTypeAggregate) {
+ const AudioDeviceID output_subdevice_id =
+ FindFirstOutputSubdevice(audio_device_id);
+
+ if (output_subdevice_id == kAudioObjectUnknown) {
+ log_callback_.Run(base::StringPrintf(
+ "AU in: Unable to find an output subdevice in aggregate devie '%s'",
+ output_device_id.c_str()));
+ return;
+ }
+ audio_device_id = output_subdevice_id;
+ }
+
+ if (audio_device_id != output_device_id_for_aec_) {
+ log_callback_.Run(
+ base::StringPrintf("AU in: Output device for AEC changed to '%s' (%d)",
+ output_device_id.c_str(), audio_device_id));
+ SwitchVoiceProcessingOutputDevice(audio_device_id);
+ }
+}
+
+void AUAudioInputStream::SwitchVoiceProcessingOutputDevice(
+ AudioDeviceID output_device_id) {
+ DCHECK(use_voice_processing_);
+
+ output_device_id_for_aec_ = output_device_id;
+ if (!audio_unit_)
+ return;
+
+ OSStatus result = noErr;
+ if (IsRunning()) {
+ result = AudioOutputUnitStop(audio_unit_);
+ DCHECK_EQ(result, noErr);
+ }
+
+ CloseAudioUnit();
+ SetInputCallbackIsActive(false);
+ ReportAndResetStats();
+ io_buffer_frame_size_ = 0;
+ got_input_callback_ = false;
+
+ OpenVoiceProcessingAU();
+ result = AudioOutputUnitStart(audio_unit_);
+ if (result != noErr) {
+ OSSTATUS_DLOG(ERROR, result) << "Failed to start acquiring data";
+ Stop();
+ return;
+ }
+
+ log_callback_.Run(base::StringPrintf(
+ "AU in: Successfully reinitialized AEC for output device id=%d.",
+ output_device_id));
+}
+
// static
OSStatus AUAudioInputStream::DataIsAvailable(void* context,
AudioUnitRenderActionFlags* flags,
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac.h b/chromium/media/audio/mac/audio_low_latency_input_mac.h
index 8245f29e5b3..b0b6e62a7b2 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac.h
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac.h
@@ -50,26 +50,23 @@
#include "base/timer/timer.h"
#include "media/audio/agc_audio_stream.h"
#include "media/audio/audio_io.h"
-#include "media/audio/audio_manager.h"
+#include "media/audio/mac/audio_manager_mac.h"
#include "media/base/audio_block_fifo.h"
#include "media/base/audio_parameters.h"
namespace media {
-class AudioManagerMac;
-
class MEDIA_EXPORT AUAudioInputStream
: public AgcAudioStream<AudioInputStream> {
public:
- enum class VoiceProcessingMode { DISABLED = 0, ENABLED = 1 };
-
// The ctor takes all the usual parameters, plus |manager| which is the
// the audio manager who is creating this object.
- AUAudioInputStream(AudioManagerMac* manager,
- const AudioParameters& input_params,
- AudioDeviceID audio_device_id,
- const AudioManager::LogCallback& log_callback,
- VoiceProcessingMode voice_processing_mode);
+ AUAudioInputStream(
+ AudioManagerMac* manager,
+ const AudioParameters& input_params,
+ AudioDeviceID audio_device_id,
+ const AudioManager::LogCallback& log_callback,
+ AudioManagerBase::VoiceProcessingMode voice_processing_mode);
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioInputStream::Close().
~AUAudioInputStream() override;
@@ -83,6 +80,7 @@ class MEDIA_EXPORT AUAudioInputStream
void SetVolume(double volume) override;
double GetVolume() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
// Returns the current hardware sample rate for the default input device.
static int HardwareSampleRate();
@@ -150,6 +148,9 @@ class MEDIA_EXPORT AUAudioInputStream
// Uninitializes the audio unit if needed.
void CloseAudioUnit();
+ // Reinitializes the AudioUnit to use a new output device.
+ void SwitchVoiceProcessingOutputDevice(AudioDeviceID output_device_id);
+
// Adds extra UMA stats when it has been detected that startup failed.
void AddHistogramsForFailedStartup();
@@ -255,6 +256,9 @@ class MEDIA_EXPORT AUAudioInputStream
// and gain control on Sierra and later.
const bool use_voice_processing_;
+ // The of the output device to cancel echo from.
+ AudioDeviceID output_device_id_for_aec_;
+
// Stores the timestamp of the previous audio buffer provided by the OS.
// We use this in combination with |last_number_of_frames_| to detect when
// the OS has decided to skip providing frames (i.e. a glitch).
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
index 8e3348eb0b0..2deeb87c9f8 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
@@ -133,7 +133,7 @@ class MacAudioInputTest : public testing::Test {
int samples_per_packet = fs / 100;
AudioInputStream* ais = audio_manager_->MakeAudioInputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, fs, 16, samples_per_packet),
+ CHANNEL_LAYOUT_STEREO, fs, samples_per_packet),
AudioDeviceDescription::kDefaultDeviceId,
base::Bind(&MacAudioInputTest::OnLogMessage, base::Unretained(this)));
EXPECT_TRUE(ais);
@@ -147,7 +147,7 @@ class MacAudioInputTest : public testing::Test {
int samples_per_packet = fs / 100;
AudioInputStream* ais = audio_manager_->MakeAudioInputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- fs, 16, samples_per_packet),
+ fs, samples_per_packet),
AudioDeviceDescription::kDefaultDeviceId,
base::Bind(&MacAudioInputTest::OnLogMessage, base::Unretained(this)));
EXPECT_TRUE(ais);
diff --git a/chromium/media/audio/mac/audio_manager_mac.cc b/chromium/media/audio/mac/audio_manager_mac.cc
index c46b9ddd534..66a5622f2d3 100644
--- a/chromium/media/audio/mac/audio_manager_mac.cc
+++ b/chromium/media/audio/mac/audio_manager_mac.cc
@@ -160,8 +160,9 @@ static void GetAudioDeviceInfo(bool is_input,
}
}
-static AudioDeviceID GetAudioDeviceIdByUId(bool is_input,
- const std::string& device_id) {
+AudioDeviceID AudioManagerMac::GetAudioDeviceIdByUId(
+ bool is_input,
+ const std::string& device_id) {
DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
AudioObjectPropertyAddress property_address = {
kAudioHardwarePropertyDevices,
@@ -233,7 +234,7 @@ static bool GetDefaultDevice(AudioDeviceID* device, bool input) {
return true;
}
-static bool GetDefaultOutputDevice(AudioDeviceID* device) {
+bool AudioManagerMac::GetDefaultOutputDevice(AudioDeviceID* device) {
return GetDefaultDevice(device, false);
}
@@ -503,15 +504,15 @@ AudioManagerMac::AudioManagerMac(std::unique_ptr<AudioThread> audio_thread,
: AudioManagerBase(std::move(audio_thread), audio_log_factory),
current_sample_rate_(0),
current_output_device_(kAudioDeviceUnknown),
- in_shutdown_(false) {
+ in_shutdown_(false),
+ weak_ptr_factory_(this) {
SetMaxOutputStreamsAllowed(kMaxOutputStreams);
- // Task must be posted last to avoid races from handing out "this" to the
- // audio thread. Always PostTask even if we're on the right thread since
- // AudioManager creation is on the startup path and this may be slow.
+ // PostTask since AudioManager creation may be on the startup path and this
+ // may be slow.
GetTaskRunner()->PostTask(
FROM_HERE, base::Bind(&AudioManagerMac::InitializeOnAudioThread,
- base::Unretained(this)));
+ weak_ptr_factory_.GetWeakPtr()));
}
AudioManagerMac::~AudioManagerMac() = default;
@@ -525,20 +526,7 @@ void AudioManagerMac::ShutdownOnAudioThread() {
// Even if tasks to close the streams are enqueued, they would not run
// leading to CHECKs getting hit in the destructor about open streams. Close
// them explicitly here. crbug.com/608049.
- for (auto iter = basic_input_streams_.begin();
- iter != basic_input_streams_.end();) {
- // Note: Closing the stream will invalidate the iterator.
- // Increment the iterator before closing the stream.
- AudioInputStream* stream = *iter++;
- stream->Close();
- }
- for (auto iter = low_latency_input_streams_.begin();
- iter != low_latency_input_streams_.end();) {
- // Note: Closing the stream will invalidate the iterator.
- // Increment the iterator before closing the stream.
- AudioInputStream* stream = *iter++;
- stream->Close();
- }
+ CloseAllInputStreams();
CHECK(basic_input_streams_.empty());
CHECK(low_latency_input_streams_.empty());
@@ -614,7 +602,7 @@ AudioParameters AudioManagerMac::GetInputStreamParameters(
if (device == kAudioObjectUnknown) {
DLOG(ERROR) << "Invalid device " << device_id;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, kFallbackSampleRate, 16,
+ CHANNEL_LAYOUT_STEREO, kFallbackSampleRate,
ChooseBufferSize(true, kFallbackSampleRate));
}
@@ -638,7 +626,7 @@ AudioParameters AudioManagerMac::GetInputStreamParameters(
// TODO(grunell): query the native channel layout for the specific device.
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, 16, buffer_size);
+ sample_rate, buffer_size);
if (DeviceSupportsAmbientNoiseReduction(device)) {
params.set_effects(AudioParameters::NOISE_SUPPRESSION);
@@ -808,11 +796,10 @@ AudioInputStream* AudioManagerMac::MakeLowLatencyInputStream(
return nullptr;
}
- using VoiceProcessingMode = AUAudioInputStream::VoiceProcessingMode;
VoiceProcessingMode voice_processing_mode =
(params.effects() & AudioParameters::ECHO_CANCELLER)
- ? VoiceProcessingMode::ENABLED
- : VoiceProcessingMode::DISABLED;
+ ? VoiceProcessingMode::kEnabled
+ : VoiceProcessingMode::kDisabled;
auto* stream = new AUAudioInputStream(this, params, audio_device_id,
log_callback, voice_processing_mode);
@@ -827,9 +814,11 @@ AudioParameters AudioManagerMac::GetPreferredOutputStreamParameters(
const AudioDeviceID device = GetAudioDeviceIdByUId(false, output_device_id);
if (device == kAudioObjectUnknown) {
DLOG(ERROR) << "Invalid output device " << output_device_id;
- return input_params.IsValid() ? input_params : AudioParameters(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_STEREO,
- kFallbackSampleRate, 16, ChooseBufferSize(false, kFallbackSampleRate));
+ return input_params.IsValid()
+ ? input_params
+ : AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO, kFallbackSampleRate,
+ ChooseBufferSize(false, kFallbackSampleRate));
}
const bool has_valid_input_params = input_params.IsValid();
@@ -868,7 +857,7 @@ AudioParameters AudioManagerMac::GetPreferredOutputStreamParameters(
}
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- hardware_sample_rate, 16, buffer_size);
+ hardware_sample_rate, buffer_size);
params.set_channels_for_discrete(output_channels);
return params;
}
diff --git a/chromium/media/audio/mac/audio_manager_mac.h b/chromium/media/audio/mac/audio_manager_mac.h
index c642acefcbb..bb1977d5e94 100644
--- a/chromium/media/audio/mac/audio_manager_mac.h
+++ b/chromium/media/audio/mac/audio_manager_mac.h
@@ -16,6 +16,7 @@
#include "base/compiler_specific.h"
#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
#include "media/audio/audio_manager_base.h"
#include "media/audio/mac/audio_device_listener_mac.h"
@@ -77,6 +78,9 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
static int HardwareSampleRateForDevice(AudioDeviceID device_id);
static int HardwareSampleRate();
+ static bool GetDefaultOutputDevice(AudioDeviceID* device);
+ static AudioDeviceID GetAudioDeviceIdByUId(bool is_input,
+ const std::string& device_id);
// OSX has issues with starting streams as the system goes into suspend and
// immediately after it wakes up from resume. See http://crbug.com/160920.
@@ -212,6 +216,8 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
// Core Audio APIs are not executed during shutdown.
bool in_shutdown_;
+ base::WeakPtrFactory<AudioManagerMac> weak_ptr_factory_;
+
DISALLOW_COPY_AND_ASSIGN(AudioManagerMac);
};
diff --git a/chromium/media/audio/mac/core_audio_util_mac.cc b/chromium/media/audio/mac/core_audio_util_mac.cc
index 249ea673279..3c050b4bc69 100644
--- a/chromium/media/audio/mac/core_audio_util_mac.cc
+++ b/chromium/media/audio/mac/core_audio_util_mac.cc
@@ -10,7 +10,6 @@
#include "base/single_thread_task_runner.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
-#include "media/audio/audio_manager.h"
namespace media {
namespace core_audio_mac {
@@ -25,7 +24,6 @@ AudioObjectPropertyScope InputOutputScope(bool is_input) {
base::Optional<std::string> GetDeviceStringProperty(
AudioObjectID device_id,
AudioObjectPropertySelector property_selector) {
- DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
CFStringRef property_value = nullptr;
UInt32 size = sizeof(property_value);
AudioObjectPropertyAddress property_address = {
@@ -55,7 +53,6 @@ base::Optional<uint32_t> GetDeviceUint32Property(
AudioObjectID device_id,
AudioObjectPropertySelector property_selector,
AudioObjectPropertyScope property_scope) {
- DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
AudioObjectPropertyAddress property_address = {
property_selector, property_scope, kAudioObjectPropertyElementMaster};
UInt32 property_value;
@@ -72,7 +69,6 @@ base::Optional<uint32_t> GetDeviceUint32Property(
uint32_t GetDevicePropertySize(AudioObjectID device_id,
AudioObjectPropertySelector property_selector,
AudioObjectPropertyScope property_scope) {
- DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
AudioObjectPropertyAddress property_address = {
property_selector, property_scope, kAudioObjectPropertyElementMaster};
UInt32 size = 0;
@@ -91,7 +87,6 @@ uint32_t GetDevicePropertySize(AudioObjectID device_id,
std::vector<AudioObjectID> GetAudioDeviceIDs(
AudioObjectID audio_object_id,
AudioObjectPropertySelector property_selector) {
- DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
AudioObjectPropertyAddress property_address = {
property_selector, kAudioObjectPropertyScopeGlobal,
kAudioObjectPropertyElementMaster};
diff --git a/chromium/media/audio/mac/coreaudio_dispatch_override.cc b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
index 30b4e2c340f..2ac812b0f3f 100644
--- a/chromium/media/audio/mac/coreaudio_dispatch_override.cc
+++ b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
@@ -16,7 +16,7 @@
namespace {
struct dyld_interpose_tuple {
template <typename T>
- dyld_interpose_tuple(const T* replacement, const T* replacee)
+ dyld_interpose_tuple(T* replacement, T* replacee)
: replacement(reinterpret_cast<const void*>(replacement)),
replacee(reinterpret_cast<const void*>(replacee)) {}
const void* replacement;
diff --git a/chromium/media/audio/mock_audio_debug_recording_manager.h b/chromium/media/audio/mock_audio_debug_recording_manager.h
index 797f88be0bd..3e9073e24da 100644
--- a/chromium/media/audio/mock_audio_debug_recording_manager.h
+++ b/chromium/media/audio/mock_audio_debug_recording_manager.h
@@ -6,6 +6,7 @@
#define MEDIA_AUDIO_MOCK_AUDIO_DEBUG_RECORDING_MANAGER_H_
#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
#include "media/audio/audio_debug_recording_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/audio/mock_audio_source_callback.h b/chromium/media/audio/mock_audio_source_callback.h
index 7283c6ca0b2..f7c95111b8f 100644
--- a/chromium/media/audio/mock_audio_source_callback.h
+++ b/chromium/media/audio/mock_audio_source_callback.h
@@ -17,7 +17,7 @@ namespace media {
class MockAudioSourceCallback : public AudioOutputStream::AudioSourceCallback {
public:
MockAudioSourceCallback();
- virtual ~MockAudioSourceCallback();
+ ~MockAudioSourceCallback() override;
MOCK_METHOD4(OnMoreData,
int(base::TimeDelta, base::TimeTicks, int, AudioBus*));
diff --git a/chromium/media/audio/null_audio_sink.cc b/chromium/media/audio/null_audio_sink.cc
index 58f120e298d..0f02fe96716 100644
--- a/chromium/media/audio/null_audio_sink.cc
+++ b/chromium/media/audio/null_audio_sink.cc
@@ -88,7 +88,6 @@ bool NullAudioSink::CurrentThreadIsRenderingThread() {
}
void NullAudioSink::SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) {
callback.Run(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL);
}
diff --git a/chromium/media/audio/null_audio_sink.h b/chromium/media/audio/null_audio_sink.h
index 9a2eb647d1d..8395d521fb8 100644
--- a/chromium/media/audio/null_audio_sink.h
+++ b/chromium/media/audio/null_audio_sink.h
@@ -22,7 +22,8 @@ class FakeAudioWorker;
class MEDIA_EXPORT NullAudioSink : public SwitchableAudioRendererSink {
public:
- NullAudioSink(const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
+ explicit NullAudioSink(
+ const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
// AudioRendererSink implementation.
void Initialize(const AudioParameters& params,
@@ -36,7 +37,6 @@ class MEDIA_EXPORT NullAudioSink : public SwitchableAudioRendererSink {
bool IsOptimizedForHardwareParameters() override;
bool CurrentThreadIsRenderingThread() override;
void SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) override;
// Enables audio frame hashing. Must be called prior to Initialize().
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.cc b/chromium/media/audio/pulse/audio_manager_pulse.cc
index 8cd8711be09..fc0ec5d6c4d 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.cc
+++ b/chromium/media/audio/pulse/audio_manager_pulse.cc
@@ -108,7 +108,7 @@ AudioParameters AudioManagerPulse::GetInputStreamParameters(
// TODO(xians): add support for querying native channel layout for pulse.
UpdateNativeAudioHardwareInfo();
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, native_input_sample_rate_, 16,
+ CHANNEL_LAYOUT_STEREO, native_input_sample_rate_,
buffer_size);
}
@@ -156,7 +156,6 @@ AudioParameters AudioManagerPulse::GetPreferredOutputStreamParameters(
VLOG_IF(0, !output_device_id.empty()) << "Not implemented!";
int buffer_size = kMinimumOutputBufferSize;
- int bits_per_sample = 16;
// Query native parameters where applicable; Pulse does not require these to
// be respected though, so prefer the input parameters for channel count.
@@ -165,8 +164,12 @@ AudioParameters AudioManagerPulse::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = GuessChannelLayout(native_channel_count_);
if (input_params.IsValid()) {
- bits_per_sample = input_params.bits_per_sample();
- channel_layout = input_params.channel_layout();
+ // Use the system's output channel count for the DISCRETE layout. This is to
+ // avoid a crash due to the lack of support on the multi-channel beyond 8 in
+ // the PulseAudio layer.
+ if (input_params.channel_layout() != CHANNEL_LAYOUT_DISCRETE)
+ channel_layout = input_params.channel_layout();
+
buffer_size =
std::min(kMaximumOutputBufferSize,
std::max(buffer_size, input_params.frames_per_buffer()));
@@ -177,7 +180,7 @@ AudioParameters AudioManagerPulse::GetPreferredOutputStreamParameters(
buffer_size = user_buffer_size;
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
}
AudioOutputStream* AudioManagerPulse::MakeOutputStream(
diff --git a/chromium/media/audio/pulse/pulse_input.cc b/chromium/media/audio/pulse/pulse_input.cc
index 3a0aa4d2f9d..a342eb76ab9 100644
--- a/chromium/media/audio/pulse/pulse_input.cc
+++ b/chromium/media/audio/pulse/pulse_input.cc
@@ -199,6 +199,11 @@ bool PulseAudioInputStream::IsMuted() {
return muted_;
}
+void PulseAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
// static, used by pa_stream_set_read_callback.
void PulseAudioInputStream::ReadCallback(pa_stream* handle,
size_t length,
@@ -295,17 +300,20 @@ void PulseAudioInputStream::ReadData() {
if (!data || length == 0)
break;
- const int number_of_frames = length / params_.GetBytesPerFrame();
+ const int number_of_frames =
+ length / params_.GetBytesPerFrame(pulse::kInputSampleFormat);
if (number_of_frames > fifo_.GetUnfilledFrames()) {
// Dynamically increase capacity to the FIFO to handle larger buffer got
// from Pulse.
- const int increase_blocks_of_buffer = static_cast<int>(
- (number_of_frames - fifo_.GetUnfilledFrames()) /
- params_.frames_per_buffer()) + 1;
+ const int increase_blocks_of_buffer =
+ static_cast<int>((number_of_frames - fifo_.GetUnfilledFrames()) /
+ params_.frames_per_buffer()) +
+ 1;
fifo_.IncreaseCapacity(increase_blocks_of_buffer);
}
- fifo_.Push(data, number_of_frames, params_.bits_per_sample() / 8);
+ fifo_.Push(data, number_of_frames,
+ SampleFormatToBytesPerChannel(pulse::kInputSampleFormat));
// Checks if we still have data.
pa_stream_drop(handle_);
diff --git a/chromium/media/audio/pulse/pulse_input.h b/chromium/media/audio/pulse/pulse_input.h
index 19e3b0b171d..56b39069681 100644
--- a/chromium/media/audio/pulse/pulse_input.h
+++ b/chromium/media/audio/pulse/pulse_input.h
@@ -40,6 +40,7 @@ class PulseAudioInputStream : public AgcAudioStream<AudioInputStream> {
void SetVolume(double volume) override;
double GetVolume() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
private:
// PulseAudio Callbacks.
diff --git a/chromium/media/audio/pulse/pulse_output.cc b/chromium/media/audio/pulse/pulse_output.cc
index a6ff39cdf35..ca1181d3aa8 100644
--- a/chromium/media/audio/pulse/pulse_output.cc
+++ b/chromium/media/audio/pulse/pulse_output.cc
@@ -47,10 +47,6 @@ PulseAudioOutputStream::PulseAudioOutputStream(const AudioParameters& params,
: params_(AudioParameters(params.format(),
params.channel_layout(),
params.sample_rate(),
- // Ignore the given bits per sample. We
- // want 32 because we're outputting
- // floats.
- 32,
params.frames_per_buffer())),
device_id_(device_id),
manager_(manager),
@@ -58,7 +54,8 @@ PulseAudioOutputStream::PulseAudioOutputStream(const AudioParameters& params,
pa_mainloop_(NULL),
pa_stream_(NULL),
volume_(1.0f),
- source_callback_(NULL) {
+ source_callback_(NULL),
+ buffer_size_(params_.GetBytesPerBuffer(kSampleFormatF32)) {
CHECK(params_.IsValid());
audio_bus_ = AudioBus::Create(params_);
}
@@ -131,7 +128,7 @@ void PulseAudioOutputStream::FulfillWriteRequest(size_t requested_bytes) {
int bytes_remaining = requested_bytes;
while (bytes_remaining > 0) {
void* pa_buffer = nullptr;
- size_t pa_buffer_size = params_.GetBytesPerBuffer();
+ size_t pa_buffer_size = buffer_size_;
CHECK_GE(pa_stream_begin_write(pa_stream_, &pa_buffer, &pa_buffer_size), 0);
if (!source_callback_) {
@@ -155,7 +152,7 @@ void PulseAudioOutputStream::FulfillWriteRequest(size_t requested_bytes) {
audio_bus_->Scale(volume_);
- size_t frame_size = params_.GetBytesPerBuffer() / unwritten_frames_in_bus;
+ size_t frame_size = buffer_size_ / unwritten_frames_in_bus;
size_t frames_to_copy = pa_buffer_size / frame_size;
size_t frame_offset_in_bus = 0;
do {
diff --git a/chromium/media/audio/pulse/pulse_output.h b/chromium/media/audio/pulse/pulse_output.h
index 8e86b0461ec..3a7b3fe866d 100644
--- a/chromium/media/audio/pulse/pulse_output.h
+++ b/chromium/media/audio/pulse/pulse_output.h
@@ -93,6 +93,8 @@ class PulseAudioOutputStream : public AudioOutputStream {
// Container for retrieving data from AudioSourceCallback::OnMoreData().
std::unique_ptr<AudioBus> audio_bus_;
+ const size_t buffer_size_;
+
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(PulseAudioOutputStream);
diff --git a/chromium/media/audio/pulse/pulse_util.cc b/chromium/media/audio/pulse/pulse_util.cc
index 8d7d5f38215..372e73dccb0 100644
--- a/chromium/media/audio/pulse/pulse_util.cc
+++ b/chromium/media/audio/pulse/pulse_util.cc
@@ -11,7 +11,6 @@
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "media/audio/audio_device_description.h"
-#include "media/base/audio_parameters.h"
#include "media/base/audio_timestamp_helper.h"
#if defined(DLOPEN_PULSEAUDIO)
@@ -193,22 +192,6 @@ void ContextStateCallback(pa_context* context, void* mainloop) {
pa_threaded_mainloop_signal(pa_mainloop, 0);
}
-pa_sample_format_t BitsToPASampleFormat(int bits_per_sample) {
- switch (bits_per_sample) {
- case 8:
- return PA_SAMPLE_U8;
- case 16:
- return PA_SAMPLE_S16LE;
- case 24:
- return PA_SAMPLE_S24LE;
- case 32:
- return PA_SAMPLE_S32LE;
- default:
- NOTREACHED() << "Invalid bits per sample: " << bits_per_sample;
- return PA_SAMPLE_INVALID;
- }
-}
-
pa_channel_map ChannelLayoutToPAChannelMap(ChannelLayout channel_layout) {
pa_channel_map channel_map;
if (channel_layout == CHANNEL_LAYOUT_MONO) {
@@ -279,8 +262,12 @@ bool CreateInputStream(pa_threaded_mainloop* mainloop,
// Set sample specifications.
pa_sample_spec sample_specifications;
- sample_specifications.format = BitsToPASampleFormat(
- params.bits_per_sample());
+
+ // FIXME: This should be PA_SAMPLE_FLOAT32, but there is more work needed in
+ // PulseAudioInputStream to support this.
+ static_assert(kInputSampleFormat == kSampleFormatS16,
+ "Only 16-bit input supported.");
+ sample_specifications.format = PA_SAMPLE_S16LE;
sample_specifications.rate = params.sample_rate();
sample_specifications.channels = params.channels();
@@ -306,7 +293,7 @@ bool CreateInputStream(pa_threaded_mainloop* mainloop,
// values should be chosen can be found at
// freedesktop.org/software/pulseaudio/doxygen/structpa__buffer__attr.html.
pa_buffer_attr buffer_attributes;
- const unsigned int buffer_size = params.GetBytesPerBuffer();
+ const unsigned int buffer_size = params.GetBytesPerBuffer(kInputSampleFormat);
buffer_attributes.maxlength = static_cast<uint32_t>(-1);
buffer_attributes.tlength = buffer_size;
buffer_attributes.minreq = buffer_size;
@@ -424,11 +411,12 @@ bool CreateOutputStream(pa_threaded_mainloop** mainloop,
// Setting |minreq| to the exact buffer size leads to more callbacks than
// necessary, so we've clipped it to half the buffer size. Regardless of the
// requested amount, we'll always fill |params.GetBytesPerBuffer()| though.
+ size_t buffer_size = params.GetBytesPerBuffer(kSampleFormatF32);
pa_buffer_attr pa_buffer_attributes;
pa_buffer_attributes.maxlength = static_cast<uint32_t>(-1);
- pa_buffer_attributes.minreq = params.GetBytesPerBuffer() / 2;
+ pa_buffer_attributes.minreq = buffer_size / 2;
pa_buffer_attributes.prebuf = static_cast<uint32_t>(-1);
- pa_buffer_attributes.tlength = params.GetBytesPerBuffer() * 3;
+ pa_buffer_attributes.tlength = buffer_size * 3;
pa_buffer_attributes.fragsize = static_cast<uint32_t>(-1);
// Connect playback stream. Like pa_buffer_attr, the pa_stream_flags have a
diff --git a/chromium/media/audio/pulse/pulse_util.h b/chromium/media/audio/pulse/pulse_util.h
index 2edb432a8d6..678fc82d341 100644
--- a/chromium/media/audio/pulse/pulse_util.h
+++ b/chromium/media/audio/pulse/pulse_util.h
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/time/time.h"
#include "media/audio/audio_device_name.h"
+#include "media/base/audio_parameters.h"
#include "media/base/channel_layout.h"
namespace media {
@@ -45,8 +46,6 @@ void DestroyPulse(pa_threaded_mainloop* mainloop, pa_context* context);
void StreamSuccessCallback(pa_stream* s, int error, void* mainloop);
void ContextStateCallback(pa_context* context, void* mainloop);
-pa_sample_format_t BitsToPASampleFormat(int bits_per_sample);
-
pa_channel_map ChannelLayoutToPAChannelMap(ChannelLayout channel_layout);
void WaitForOperationCompletion(pa_threaded_mainloop* mainloop,
@@ -54,6 +53,8 @@ void WaitForOperationCompletion(pa_threaded_mainloop* mainloop,
base::TimeDelta GetHardwareLatency(pa_stream* stream);
+constexpr SampleFormat kInputSampleFormat = kSampleFormatS16;
+
// Create a recording stream for the threaded mainloop, return true if success,
// otherwise false. |mainloop| and |context| have to be from a valid Pulse
// threaded mainloop and the handle of the created stream will be returned by
diff --git a/chromium/media/audio/scoped_task_runner_observer.cc b/chromium/media/audio/scoped_task_runner_observer.cc
index 2f43655de10..f275174c57f 100644
--- a/chromium/media/audio/scoped_task_runner_observer.cc
+++ b/chromium/media/audio/scoped_task_runner_observer.cc
@@ -24,7 +24,7 @@ void ScopedTaskRunnerObserver::ObserveLoopDestruction(
base::WaitableEvent* done) {
// Note: |done| may be NULL.
if (task_runner_->BelongsToCurrentThread()) {
- base::MessageLoop* loop = base::MessageLoop::current();
+ base::MessageLoopCurrent loop = base::MessageLoopCurrent::Get();
if (enable) {
loop->AddDestructionObserver(this);
} else {
diff --git a/chromium/media/audio/scoped_task_runner_observer.h b/chromium/media/audio/scoped_task_runner_observer.h
index d7efe669203..b0f0b319216 100644
--- a/chromium/media/audio/scoped_task_runner_observer.h
+++ b/chromium/media/audio/scoped_task_runner_observer.h
@@ -7,7 +7,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
namespace base {
class SingleThreadTaskRunner;
@@ -23,7 +23,7 @@ namespace media {
// NOTE: The class that inherits from this class must implement the
// WillDestroyCurrentMessageLoop virtual method from DestructionObserver.
class ScopedTaskRunnerObserver
- : public base::MessageLoop::DestructionObserver {
+ : public base::MessageLoopCurrent::DestructionObserver {
public:
explicit ScopedTaskRunnerObserver(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
diff --git a/chromium/media/audio/simple_sources.cc b/chromium/media/audio/simple_sources.cc
index ca9bf98b422..1eebf239afd 100644
--- a/chromium/media/audio/simple_sources.cc
+++ b/chromium/media/audio/simple_sources.cc
@@ -193,8 +193,7 @@ void FileSource::LoadWavFile(const base::FilePath& path_to_wav_file) {
AudioParameters file_audio_slice(
AudioParameters::AUDIO_PCM_LOW_LATENCY,
GuessChannelLayout(wav_audio_handler_->num_channels()),
- wav_audio_handler_->sample_rate(), wav_audio_handler_->bits_per_sample(),
- params_.frames_per_buffer());
+ wav_audio_handler_->sample_rate(), params_.frames_per_buffer());
file_audio_converter_.reset(
new AudioConverter(file_audio_slice, params_, false));
@@ -244,14 +243,13 @@ double FileSource::ProvideInput(AudioBus* audio_bus_into_converter,
void FileSource::OnError() {}
BeepingSource::BeepingSource(const AudioParameters& params)
- : buffer_size_(params.GetBytesPerBuffer()),
+ : buffer_size_(params.GetBytesPerBuffer(kSampleFormatU8)),
buffer_(new uint8_t[buffer_size_]),
params_(params),
last_callback_time_(base::TimeTicks::Now()),
beep_duration_in_buffers_(kBeepDurationMilliseconds *
params.sample_rate() /
- params.frames_per_buffer() /
- 1000),
+ params.frames_per_buffer() / 1000),
beep_generated_in_buffers_(0),
beep_period_in_frames_(params.sample_rate() / kBeepFrequency) {}
@@ -283,10 +281,9 @@ int BeepingSource::OnMoreData(base::TimeDelta /* delay */,
// generate a beep sound.
if (should_beep || beep_generated_in_buffers_) {
// Compute the number of frames to output high value. Then compute the
- // number of bytes based on channels and bits per channel.
+ // number of bytes based on channels.
int high_frames = beep_period_in_frames_ / 2;
- int high_bytes = high_frames * params_.bits_per_sample() *
- params_.channels() / 8;
+ int high_bytes = high_frames * params_.channels();
// Separate high and low with the same number of bytes to generate a
// square wave.
@@ -304,8 +301,8 @@ int BeepingSource::OnMoreData(base::TimeDelta /* delay */,
}
last_callback_time_ = base::TimeTicks::Now();
- dest->FromInterleaved(buffer_.get(), dest->frames(),
- params_.bits_per_sample() / 8);
+ dest->FromInterleaved<UnsignedInt8SampleTypeTraits>(buffer_.get(),
+ dest->frames());
return dest->frames();
}
diff --git a/chromium/media/audio/simple_sources_unittest.cc b/chromium/media/audio/simple_sources_unittest.cc
index bd7ce42ed65..0bab509b119 100644
--- a/chromium/media/audio/simple_sources_unittest.cc
+++ b/chromium/media/audio/simple_sources_unittest.cc
@@ -23,12 +23,10 @@ namespace media {
// Validate that the SineWaveAudioSource writes the expected values.
TEST(SimpleSources, SineWaveAudioSource) {
static const uint32_t samples = 1024;
- static const uint32_t bytes_per_sample = 2;
static const int freq = 200;
- AudioParameters params(
- AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kTelephoneSampleRate, bytes_per_sample * 8, samples);
+ AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
+ AudioParameters::kTelephoneSampleRate, samples);
SineWaveAudioSource source(1, freq, params.sample_rate());
std::unique_ptr<AudioBus> audio_bus = AudioBus::Create(params);
@@ -125,7 +123,7 @@ TEST(SimpleSources, FileSourceTestDataWithoutLooping) {
// Create AudioParameters which match those in the WAV data.
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, kNumFrames);
+ CHANNEL_LAYOUT_STEREO, 48000, kNumFrames);
std::unique_ptr<AudioBus> audio_bus = AudioBus::Create(2, kNumFrames);
audio_bus->Zero();
@@ -163,7 +161,7 @@ TEST(SimpleSources, FileSourceTestDataWithLooping) {
// Create AudioParameters which match those in the WAV data.
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, kNumFrames);
+ CHANNEL_LAYOUT_STEREO, 48000, kNumFrames);
std::unique_ptr<AudioBus> audio_bus = AudioBus::Create(2, kNumFrames);
audio_bus->Zero();
@@ -182,7 +180,7 @@ TEST(SimpleSources, FileSourceTestDataWithLooping) {
TEST(SimpleSources, BadFilePathFails) {
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 10);
+ CHANNEL_LAYOUT_STEREO, 48000, 10);
std::unique_ptr<AudioBus> audio_bus = AudioBus::Create(2, 10);
audio_bus->Zero();
@@ -222,7 +220,7 @@ TEST(SimpleSources, FileSourceCorruptTestDataFails) {
// Create AudioParameters which match those in the WAV data.
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, kNumFrames);
+ CHANNEL_LAYOUT_STEREO, 48000, kNumFrames);
std::unique_ptr<AudioBus> audio_bus = AudioBus::Create(2, kNumFrames);
audio_bus->Zero();
diff --git a/chromium/media/audio/sounds/audio_stream_handler.cc b/chromium/media/audio/sounds/audio_stream_handler.cc
index fd42e8669df..90ff6a089e8 100644
--- a/chromium/media/audio/sounds/audio_stream_handler.cc
+++ b/chromium/media/audio/sounds/audio_stream_handler.cc
@@ -62,8 +62,7 @@ class AudioStreamHandler::AudioStreamContainer
const AudioParameters params(
AudioParameters::AUDIO_PCM_LOW_LATENCY,
GuessChannelLayout(wav_audio_->num_channels()),
- wav_audio_->sample_rate(), wav_audio_->bits_per_sample(),
- kDefaultFrameCount);
+ wav_audio_->sample_rate(), kDefaultFrameCount);
stream_ =
audio_manager_->MakeAudioOutputStreamProxy(params, std::string());
if (!stream_ || !stream_->Open()) {
@@ -182,10 +181,9 @@ AudioStreamHandler::AudioStreamHandler(const base::StringPiece& wav_data) {
return;
}
- const AudioParameters params(
- AudioParameters::AUDIO_PCM_LOW_LATENCY,
- GuessChannelLayout(wav_audio->num_channels()), wav_audio->sample_rate(),
- wav_audio->bits_per_sample(), kDefaultFrameCount);
+ const AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ GuessChannelLayout(wav_audio->num_channels()),
+ wav_audio->sample_rate(), kDefaultFrameCount);
if (!params.IsValid()) {
LOG(ERROR) << "Audio params are invalid.";
return;
diff --git a/chromium/media/audio/virtual_audio_input_stream.cc b/chromium/media/audio/virtual_audio_input_stream.cc
index f885e9fd1f8..660fb6692a4 100644
--- a/chromium/media/audio/virtual_audio_input_stream.cc
+++ b/chromium/media/audio/virtual_audio_input_stream.cc
@@ -151,4 +151,9 @@ bool VirtualAudioInputStream::IsMuted() {
return false;
}
+void VirtualAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ // Not supported. Do nothing.
+}
+
} // namespace media
diff --git a/chromium/media/audio/virtual_audio_input_stream.h b/chromium/media/audio/virtual_audio_input_stream.h
index b794c382595..e1973054b9c 100644
--- a/chromium/media/audio/virtual_audio_input_stream.h
+++ b/chromium/media/audio/virtual_audio_input_stream.h
@@ -58,6 +58,7 @@ class MEDIA_EXPORT VirtualAudioInputStream : public AudioInputStream {
bool SetAutomaticGainControl(bool enabled) override;
bool GetAutomaticGainControl() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
// Attaches an AudioConverter::InputCallback to be used as input. This
// VirtualAudioInputStream must outlive all attached streams, so any attached
diff --git a/chromium/media/audio/virtual_audio_input_stream_unittest.cc b/chromium/media/audio/virtual_audio_input_stream_unittest.cc
index e5bae4a3cc0..63d3fc6ee1d 100644
--- a/chromium/media/audio/virtual_audio_input_stream_unittest.cc
+++ b/chromium/media/audio/virtual_audio_input_stream_unittest.cc
@@ -29,8 +29,10 @@ namespace media {
namespace {
-const AudioParameters kParams(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_STEREO, 8000, 8, 10);
+const AudioParameters kParams(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO,
+ 8000,
+ 10);
class MockInputCallback : public AudioInputStream::AudioInputCallback {
public:
@@ -42,7 +44,7 @@ class MockInputCallback : public AudioInputStream::AudioInputCallback {
InvokeWithoutArgs(&data_pushed_, &base::WaitableEvent::Signal));
}
- virtual ~MockInputCallback() = default;
+ ~MockInputCallback() override = default;
MOCK_METHOD3(OnData,
void(const AudioBus* source,
diff --git a/chromium/media/audio/virtual_audio_output_stream_unittest.cc b/chromium/media/audio/virtual_audio_output_stream_unittest.cc
index 8998d01f349..569e2387850 100644
--- a/chromium/media/audio/virtual_audio_output_stream_unittest.cc
+++ b/chromium/media/audio/virtual_audio_output_stream_unittest.cc
@@ -21,8 +21,10 @@ using ::testing::_;
namespace media {
namespace {
-const AudioParameters kParams(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, CHANNEL_LAYOUT_MONO, 8000, 8, 128);
+const AudioParameters kParams(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_MONO,
+ 8000,
+ 128);
}
class MockVirtualAudioInputStream : public VirtualAudioInputStream {
@@ -33,7 +35,7 @@ class MockVirtualAudioInputStream : public VirtualAudioInputStream {
kParams,
worker_task_runner,
base::Bind(&base::DeletePointer<VirtualAudioInputStream>)) {}
- ~MockVirtualAudioInputStream() = default;
+ ~MockVirtualAudioInputStream() override = default;
MOCK_METHOD2(AddInputProvider,
void(AudioConverter::InputCallback* input,
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index 7098c193bcc..10d49f911e3 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -4,7 +4,11 @@
#include "media/audio/win/audio_low_latency_input_win.h"
+#include <audiopolicy.h>
+#include <mediaobj.h>
#include <objbase.h>
+#include <uuids.h>
+#include <wmcodecdsp.h>
#include <algorithm>
#include <cmath>
@@ -18,7 +22,6 @@
#include "base/trace_event/trace_event.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_features.h"
-#include "media/audio/win/audio_manager_win.h"
#include "media/audio/win/avrt_wrapper_win.h"
#include "media/audio/win/core_audio_util_win.h"
#include "media/base/audio_block_fifo.h"
@@ -67,27 +70,136 @@ bool IsSupportedFormatForConversion(const WAVEFORMATEX& format) {
return true;
}
+// Returns the index of the device in the device collection, or -1 for the
+// default device, as used by the voice processing DMO.
+base::Optional<WORD> GetAudioDeviceCollectionIndexFromId(
+ const std::string& device_id,
+ const EDataFlow data_flow) {
+ // The default device is specified with -1.
+ if (AudioDeviceDescription::IsDefaultDevice(device_id))
+ return -1;
+
+ WORD device_index = -1;
+ HRESULT hr = E_FAIL;
+ // The default communications does not have an index itself, so we need to
+ // find the index for the underlying device.
+ if (AudioDeviceDescription::IsCommunicationsDevice(device_id)) {
+ const std::string communications_id =
+ (data_flow == eCapture)
+ ? CoreAudioUtil::GetCommunicationsInputDeviceID()
+ : CoreAudioUtil::GetCommunicationsOutputDeviceID();
+ hr = CoreAudioUtil::GetDeviceCollectionIndex(communications_id, data_flow,
+ &device_index);
+ } else {
+ // Otherwise, just look for the device_id directly.
+ hr = CoreAudioUtil::GetDeviceCollectionIndex(device_id, data_flow,
+ &device_index);
+ }
+
+ if (FAILED(hr) || hr == S_FALSE)
+ return base::nullopt;
+
+ return device_index;
+}
+
+// Implementation of IMediaBuffer, as required for
+// IMediaObject::ProcessOutput(). After consuming data provided by
+// ProcessOutput(), call SetLength() to update the buffer availability.
+// Example implementation:
+// http://msdn.microsoft.com/en-us/library/dd376684(v=vs.85).aspx
+class MediaBufferImpl : public IMediaBuffer {
+ public:
+ explicit MediaBufferImpl(DWORD max_length)
+ : data_(new BYTE[max_length]), max_length_(max_length) {}
+
+ // IMediaBuffer implementation.
+ STDMETHOD(GetBufferAndLength)(BYTE** buffer, DWORD* length) {
+ if (!buffer || !length)
+ return E_POINTER;
+
+ *buffer = data_.get();
+ *length = length_;
+ return S_OK;
+ }
+
+ STDMETHOD(GetMaxLength)(DWORD* max_length) {
+ if (!max_length)
+ return E_POINTER;
+
+ *max_length = max_length_;
+ return S_OK;
+ }
+
+ STDMETHOD(SetLength)(DWORD length) {
+ if (length > max_length_)
+ return E_INVALIDARG;
+
+ length_ = length;
+ return S_OK;
+ }
+
+ // IUnknown implementation.
+ STDMETHOD_(ULONG, AddRef)() { return InterlockedIncrement(&ref_count_); }
+
+ STDMETHOD(QueryInterface)(REFIID riid, void** object) {
+ if (!object)
+ return E_POINTER;
+ if (riid != IID_IMediaBuffer && riid != IID_IUnknown)
+ return E_NOINTERFACE;
+
+ *object = static_cast<IMediaBuffer*>(this);
+ AddRef();
+ return S_OK;
+ }
+
+ STDMETHOD_(ULONG, Release)() {
+ LONG ref_count = InterlockedDecrement(&ref_count_);
+ if (ref_count == 0)
+ delete this;
+
+ return ref_count;
+ }
+
+ private:
+ virtual ~MediaBufferImpl() {}
+
+ std::unique_ptr<BYTE[]> data_;
+ DWORD length_ = 0;
+ const DWORD max_length_;
+ LONG ref_count_ = 0;
+};
+
} // namespace
WASAPIAudioInputStream::WASAPIAudioInputStream(
AudioManagerWin* manager,
const AudioParameters& params,
const std::string& device_id,
- const AudioManager::LogCallback& log_callback)
- : manager_(manager), device_id_(device_id), log_callback_(log_callback) {
+ const AudioManager::LogCallback& log_callback,
+ AudioManagerBase::VoiceProcessingMode voice_processing_mode)
+ : manager_(manager),
+ device_id_(device_id),
+ output_device_id_for_aec_(AudioDeviceDescription::kDefaultDeviceId),
+ log_callback_(log_callback),
+ use_voice_processing_(voice_processing_mode ==
+ AudioManagerBase::VoiceProcessingMode::kEnabled) {
DCHECK(manager_);
DCHECK(!device_id_.empty());
DCHECK(!log_callback_.is_null());
+ DVLOG_IF(1, use_voice_processing_) << "Using Windows voice capture DSP DMO.";
+
// Load the Avrt DLL if not already loaded. Required to support MMCSS.
bool avrt_init = avrt::Initialize();
DCHECK(avrt_init) << "Failed to load the Avrt.dll";
+ const SampleFormat kSampleFormat = kSampleFormatS16;
+
// Set up the desired output format specified by the client.
output_format_.wFormatTag = WAVE_FORMAT_PCM;
output_format_.nChannels = params.channels();
output_format_.nSamplesPerSec = params.sample_rate();
- output_format_.wBitsPerSample = params.bits_per_sample();
+ output_format_.wBitsPerSample = SampleFormatToBitsPerChannel(kSampleFormat);
output_format_.nBlockAlign =
(output_format_.wBitsPerSample / 8) * output_format_.nChannels;
output_format_.nAvgBytesPerSec =
@@ -99,13 +211,13 @@ WASAPIAudioInputStream::WASAPIAudioInputStream(
input_format_ = output_format_;
// Size in bytes of each audio frame.
- frame_size_ = input_format_.nBlockAlign;
+ frame_size_bytes_ = input_format_.nBlockAlign;
// Store size of audio packets which we expect to get from the audio
// endpoint device in each capture event.
- packet_size_frames_ = params.GetBytesPerBuffer() / input_format_.nBlockAlign;
- packet_size_bytes_ = params.GetBytesPerBuffer();
- DVLOG(1) << "Number of bytes per audio frame : " << frame_size_;
+ packet_size_bytes_ = params.GetBytesPerBuffer(kSampleFormat);
+ packet_size_frames_ = packet_size_bytes_ / input_format_.nBlockAlign;
+ DVLOG(1) << "Number of bytes per audio frame : " << frame_size_bytes_;
DVLOG(1) << "Number of audio frames per packet: " << packet_size_frames_;
// All events are auto-reset events and non-signaled initially.
@@ -143,6 +255,16 @@ bool WASAPIAudioInputStream::Open() {
return false;
}
+ // If voice processing is enabled, initialize the DMO that is used for it. The
+ // remainder of the function initializes an audio capture client (the normal
+ // case). Either the DMO or the capture client is used.
+ // TODO(grunell): Refactor out the audio capture client initialization to its
+ // own function.
+ if (use_voice_processing_) {
+ opened_ = InitializeDmo();
+ return opened_;
+ }
+
// Obtain an IAudioClient interface which enables us to create and initialize
// an audio stream between an audio application and the audio engine.
hr = endpoint_device_->Activate(__uuidof(IAudioClient), CLSCTX_INPROC_SERVER,
@@ -190,17 +312,27 @@ void WASAPIAudioInputStream::Start(AudioInputCallback* callback) {
if (started_)
return;
- if (device_id_ == AudioDeviceDescription::kLoopbackWithMuteDeviceId &&
- system_audio_volume_) {
- BOOL muted = false;
- system_audio_volume_->GetMute(&muted);
-
- // If the system audio is muted at the time of capturing, then no need to
- // mute it again, and later we do not unmute system audio when stopping
- // capturing.
- if (!muted) {
- system_audio_volume_->SetMute(true, NULL);
- mute_done_ = true;
+ // TODO(grunell): Refactor the |use_voice_processing_| conditions in this
+ // function to clean up the code.
+ if (use_voice_processing_) {
+ // Pre-fill render buffer with silence.
+ if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence(
+ audio_client_for_render_.Get(), audio_render_client_.Get())) {
+ DLOG(WARNING) << "Failed to pre-fill render buffer with silence.";
+ }
+ } else {
+ if (device_id_ == AudioDeviceDescription::kLoopbackWithMuteDeviceId &&
+ system_audio_volume_) {
+ BOOL muted = false;
+ system_audio_volume_->GetMute(&muted);
+
+ // If the system audio is muted at the time of capturing, then no need to
+ // mute it again, and later we do not unmute system audio when stopping
+ // capturing.
+ if (!muted) {
+ system_audio_volume_->SetMute(true, NULL);
+ mute_done_ = true;
+ }
}
}
@@ -219,21 +351,31 @@ void WASAPIAudioInputStream::Start(AudioInputCallback* callback) {
base::SimpleThread::Options(base::ThreadPriority::REALTIME_AUDIO)));
capture_thread_->Start();
- // Start streaming data between the endpoint buffer and the audio engine.
- HRESULT hr = audio_client_->Start();
- if (FAILED(hr)) {
- DLOG(ERROR) << "Failed to start input streaming.";
- log_callback_.Run(base::StringPrintf(
- "WASAPIAIS::Start: Failed to start audio client, hresult = %#lx", hr));
- }
-
- if (SUCCEEDED(hr) && audio_render_client_for_loopback_.Get()) {
- hr = audio_render_client_for_loopback_->Start();
- if (FAILED(hr))
+ HRESULT hr = E_FAIL;
+ if (use_voice_processing_) {
+ hr = audio_client_for_render_->Start();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to start output streaming: " << std::hex << hr
+ << ", proceeding without rendering.";
+ }
+ } else {
+ // Start streaming data between the endpoint buffer and the audio engine.
+ hr = audio_client_->Start();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to start input streaming.";
log_callback_.Run(base::StringPrintf(
- "WASAPIAIS::Start: Failed to start render client for loopback, "
- "hresult = %#lx",
+ "WASAPIAIS::Start: Failed to start audio client, hresult = %#lx",
hr));
+ }
+
+ if (SUCCEEDED(hr) && audio_render_client_for_loopback_.Get()) {
+ hr = audio_render_client_for_loopback_->Start();
+ if (FAILED(hr))
+ log_callback_.Run(base::StringPrintf(
+ "WASAPIAIS::Start: Failed to start render client for loopback, "
+ "hresult = %#lx",
+ hr));
+ }
}
started_ = SUCCEEDED(hr);
@@ -264,10 +406,21 @@ void WASAPIAudioInputStream::Stop() {
SetEvent(stop_capture_event_.Get());
}
- // Stop the input audio streaming.
- HRESULT hr = audio_client_->Stop();
- if (FAILED(hr)) {
- LOG(ERROR) << "Failed to stop input streaming.";
+ // TODO(grunell): Refactor the |use_voice_processing_| conditions in this
+ // function to clean up the code.
+ if (use_voice_processing_) {
+ // Stop the render audio streaming. The input streaming needs no explicit
+ // stopping.
+ HRESULT hr = audio_client_for_render_->Stop();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to stop output streaming.";
+ }
+ } else {
+ // Stop the input audio streaming.
+ HRESULT hr = audio_client_->Stop();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to stop input streaming.";
+ }
}
// Wait until the thread completes and perform cleanup.
@@ -277,6 +430,12 @@ void WASAPIAudioInputStream::Stop() {
capture_thread_.reset();
}
+ if (use_voice_processing_) {
+ HRESULT hr = voice_capture_dmo_->FreeStreamingResources();
+ if (FAILED(hr))
+ DLOG(ERROR) << "Failed to free dmo resources.";
+ }
+
started_ = false;
sink_ = NULL;
}
@@ -363,6 +522,56 @@ bool WASAPIAudioInputStream::IsMuted() {
return is_muted != FALSE;
}
+void WASAPIAudioInputStream::SetOutputDeviceForAec(
+ const std::string& output_device_id) {
+ if (!use_voice_processing_)
+ return;
+
+ if (output_device_id == output_device_id_for_aec_)
+ return;
+
+ output_device_id_for_aec_ = output_device_id;
+
+ // Set devices.
+ Microsoft::WRL::ComPtr<IPropertyStore> ps;
+ HRESULT hr = voice_capture_dmo_->QueryInterface(IID_IPropertyStore, &ps);
+ if (FAILED(hr) || !ps) {
+ log_callback_.Run(base::StringPrintf(
+ "WASAPIAIS:SetOutputDeviceForAec: Getting DMO property store failed."));
+ return;
+ }
+
+ if (!SetDmoDevices(ps.Get())) {
+ log_callback_.Run(
+ "WASAPIAIS:SetOutputDeviceForAec: Setting device indices failed.");
+ return;
+ }
+
+ // Recreate the dummy render client on the new output.
+ hr = audio_client_for_render_->Stop();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to stop output streaming.";
+ }
+
+ CreateDummyRenderClientsForDmo();
+
+ if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence(
+ audio_client_for_render_.Get(), audio_render_client_.Get())) {
+ DLOG(WARNING) << "Failed to pre-fill render buffer with silence.";
+ }
+
+ hr = audio_client_for_render_->Start();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to start output streaming: " << std::hex << hr
+ << ", proceeding without rendering.";
+ }
+
+ log_callback_.Run(base::StringPrintf(
+ "WASAPIAIS:SetOutputDeviceForAec: Successfully updated AEC output "
+ "device to %s",
+ output_device_id.c_str()));
+}
+
void WASAPIAudioInputStream::Run() {
ScopedCOMInitializer com_init(ScopedCOMInitializer::kMTA);
@@ -392,8 +601,8 @@ void WASAPIAudioInputStream::Run() {
// be able to buffer up data in cases where a conversion requires two audio
// buffers (and we need to be able to write to the third one).
size_t capture_buffer_size =
- std::max(2 * endpoint_buffer_size_frames_ * frame_size_,
- 2 * packet_size_frames_ * frame_size_);
+ std::max(2 * endpoint_buffer_size_frames_ * frame_size_bytes_,
+ 2 * packet_size_frames_ * frame_size_bytes_);
int buffers_required = capture_buffer_size / packet_size_bytes_;
if (converter_ && imperfect_buffer_size_conversion_)
++buffers_required;
@@ -404,53 +613,75 @@ void WASAPIAudioInputStream::Run() {
DVLOG(1) << "AudioBlockFifo buffer count: " << buffers_required;
- bool recording = true;
- bool error = false;
+ bool success =
+ use_voice_processing_ ? RunWithDmo() : RunWithAudioCaptureClient();
+
+ if (!success) {
+ // TODO(henrika): perhaps it worth improving the cleanup here by e.g.
+ // stopping the audio client, joining the thread etc.?
+ NOTREACHED() << "WASAPI capturing failed with error code "
+ << GetLastError();
+ }
+
+ // Disable MMCSS.
+ if (mm_task && !avrt::AvRevertMmThreadCharacteristics(mm_task)) {
+ PLOG(WARNING) << "Failed to disable MMCSS";
+ }
+
+ fifo_.reset();
+}
+
+bool WASAPIAudioInputStream::RunWithAudioCaptureClient() {
HANDLE wait_array[2] = {stop_capture_event_.Get(),
audio_samples_ready_event_.Get()};
- while (recording && !error) {
+ while (true) {
// Wait for a close-down event or a new capture event.
DWORD wait_result = WaitForMultipleObjects(2, wait_array, FALSE, INFINITE);
switch (wait_result) {
case WAIT_OBJECT_0 + 0:
// |stop_capture_event_| has been set.
- recording = false;
- break;
+ return true;
case WAIT_OBJECT_0 + 1:
// |audio_samples_ready_event_| has been set.
PullCaptureDataAndPushToSink();
break;
case WAIT_FAILED:
default:
- error = true;
- break;
+ return false;
}
}
- if (recording && error) {
- // TODO(henrika): perhaps it worth improving the cleanup here by e.g.
- // stopping the audio client, joining the thread etc.?
- NOTREACHED() << "WASAPI capturing failed with error code "
- << GetLastError();
- }
+ return false;
+}
- // Disable MMCSS.
- if (mm_task && !avrt::AvRevertMmThreadCharacteristics(mm_task)) {
- PLOG(WARNING) << "Failed to disable MMCSS";
+bool WASAPIAudioInputStream::RunWithDmo() {
+ while (true) {
+ // Poll every 5 ms, or wake up on capture stop signal.
+ DWORD wait_result = WaitForSingleObject(stop_capture_event_.Get(), 5);
+ switch (wait_result) {
+ case WAIT_OBJECT_0:
+ // |stop_capture_event_| has been set.
+ return true;
+ case WAIT_TIMEOUT:
+ PullDmoCaptureDataAndPushToSink();
+ if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence(
+ audio_client_for_render_.Get(), audio_render_client_.Get())) {
+ DLOG(WARNING) << "Failed to fill render buffer with silence.";
+ }
+ break;
+ case WAIT_FAILED:
+ default:
+ return false;
+ }
}
- fifo_.reset();
+ return false;
}
void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
- TRACE_EVENT1("audio", "WASAPIAudioInputStream::Run_0", "sample rate",
- input_format_.nSamplesPerSec);
-
- Microsoft::WRL::ComPtr<IAudioClock> audio_clock;
- audio_client_->GetService(IID_PPV_ARGS(&audio_clock));
- if (!audio_clock)
- LOG(WARNING) << "IAudioClock unavailable, capture times may be inaccurate.";
+ TRACE_EVENT1("audio", "WASAPIAudioInputStream::PullCaptureDataAndPushToSink",
+ "sample rate", input_format_.nSamplesPerSec);
// Pull data from the capture endpoint buffer until it's empty or an error
// occurs.
@@ -492,12 +723,14 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
// TODO(dalecurtis, olka, grunell): Is this ever false? If it is, should we
// handle |flags & AUDCLNT_BUFFERFLAGS_TIMESTAMP_ERROR|?
- if (audio_clock) {
+ if (audio_clock_) {
// The reported timestamp from GetBuffer is not as reliable as the clock
// from the client. We've seen timestamps reported for USB audio devices,
// be off by several days. Furthermore we've seen them jump back in time
// every 2 seconds or so.
- audio_clock->GetPosition(&device_position, &capture_time_100ns);
+ // TODO(grunell): Using the audio clock as capture time for the currently
+ // processed buffer seems incorrect. http://crbug.com/825744.
+ audio_clock_->GetPosition(&device_position, &capture_time_100ns);
}
base::TimeTicks capture_time;
@@ -562,6 +795,106 @@ void WASAPIAudioInputStream::PullCaptureDataAndPushToSink() {
} // while (true)
}
+void WASAPIAudioInputStream::PullDmoCaptureDataAndPushToSink() {
+ TRACE_EVENT1("audio",
+ "WASAPIAudioInputStream::PullDmoCaptureDataAndPushToSink",
+ "sample rate", input_format_.nSamplesPerSec);
+
+ // Pull data from the capture endpoint buffer until it's empty or an error
+ // occurs.
+ while (true) {
+ DWORD status = 0;
+ DMO_OUTPUT_DATA_BUFFER data_buffer = {0};
+ data_buffer.pBuffer = media_buffer_.Get();
+
+ // Get processed capture data from the DMO.
+ HRESULT hr =
+ voice_capture_dmo_->ProcessOutput(0, // dwFlags
+ 1, // cOutputBufferCount
+ &data_buffer,
+ &status); // Must be ignored.
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "DMO ProcessOutput failed, hr = 0x" << std::hex << hr;
+ break;
+ }
+
+ BYTE* data;
+ ULONG data_length = 0;
+ // Get a pointer to the data buffer. This should be valid until the next
+ // call to ProcessOutput.
+ hr = media_buffer_->GetBufferAndLength(&data, &data_length);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Could not get buffer, hr = 0x" << std::hex << hr;
+ break;
+ }
+
+ if (data_length > 0) {
+ const int samples_produced = data_length / frame_size_bytes_;
+
+ base::TimeTicks capture_time;
+ if (data_buffer.dwStatus & DMO_OUTPUT_DATA_BUFFERF_TIME &&
+ data_buffer.rtTimestamp > 0) {
+ // See conversion notes on |capture_time_100ns| in
+ // PullCaptureDataAndPushToSink().
+ capture_time +=
+ base::TimeDelta::FromMicroseconds(data_buffer.rtTimestamp / 10.0);
+ } else {
+ // We may not get the timestamp from ProcessOutput(), fall back on
+ // current timestamp.
+ capture_time = base::TimeTicks::Now();
+ }
+
+ // Adjust |capture_time| for the FIFO before pushing.
+ capture_time -= AudioTimestampHelper::FramesToTime(
+ fifo_->GetAvailableFrames(), input_format_.nSamplesPerSec);
+
+ fifo_->Push(data, samples_produced, input_format_.wBitsPerSample / 8);
+
+ // Reset length to indicate buffer availability.
+ hr = media_buffer_->SetLength(0);
+ if (FAILED(hr))
+ DLOG(ERROR) << "Could not reset length, hr = 0x" << std::hex << hr;
+
+ // Get a cached AGC volume level which is updated once every second on the
+ // audio manager thread. Note that, |volume| is also updated each time
+ // SetVolume() is called through IPC by the render-side AGC.
+ double volume = 0.0;
+ GetAgcVolume(&volume);
+
+ while (fifo_->available_blocks()) {
+ if (converter_) {
+ if (imperfect_buffer_size_conversion_ &&
+ fifo_->available_blocks() == 1) {
+ // Special case. We need to buffer up more audio before we can
+ // convert or else we'll suffer an underrun.
+ // TODO(grunell): Verify this is really true.
+ break;
+ }
+ converter_->Convert(convert_bus_.get());
+ sink_->OnData(convert_bus_.get(), capture_time, volume);
+
+ // Move the capture time forward for each vended block.
+ capture_time += AudioTimestampHelper::FramesToTime(
+ convert_bus_->frames(), output_format_.nSamplesPerSec);
+ } else {
+ sink_->OnData(fifo_->Consume(), capture_time, volume);
+
+ // Move the capture time forward for each vended block.
+ capture_time += AudioTimestampHelper::FramesToTime(
+ packet_size_frames_, input_format_.nSamplesPerSec);
+ }
+ }
+ } // if (data_length > 0)
+
+ if (!(data_buffer.dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE)) {
+ // The DMO cannot currently produce more data. This is the normal case;
+ // otherwise it means the DMO had more than 10 ms of data available and
+ // ProcessOutput should be called again.
+ break;
+ }
+ } // while (true)
+}
+
void WASAPIAudioInputStream::HandleError(HRESULT err) {
NOTREACHED() << "Error code: " << err;
if (sink_)
@@ -729,45 +1062,7 @@ bool WASAPIAudioInputStream::DesiredFormatIsSupported(HRESULT* hr) {
<< "\nblock align: " << input_format_.nBlockAlign
<< "\navg bytes per sec: " << input_format_.nAvgBytesPerSec;
- // Ideally, we want a 1:1 ratio between the buffers we get and the buffers
- // we give to OnData so that each buffer we receive from the OS can be
- // directly converted to a buffer that matches with what was asked for.
- const double buffer_ratio = output_format_.nSamplesPerSec /
- static_cast<double>(packet_size_frames_);
- double new_frames_per_buffer =
- input_format_.nSamplesPerSec / buffer_ratio;
-
- const auto input_layout = GuessChannelLayout(input_format_.nChannels);
- DCHECK_NE(CHANNEL_LAYOUT_UNSUPPORTED, input_layout);
- const auto output_layout = GuessChannelLayout(output_format_.nChannels);
- DCHECK_NE(CHANNEL_LAYOUT_UNSUPPORTED, output_layout);
-
- const AudioParameters input(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- input_layout, input_format_.nSamplesPerSec,
- input_format_.wBitsPerSample,
- static_cast<int>(new_frames_per_buffer));
-
- const AudioParameters output(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- output_layout, output_format_.nSamplesPerSec,
- output_format_.wBitsPerSample,
- packet_size_frames_);
-
- converter_.reset(new AudioConverter(input, output, false));
- converter_->AddInput(this);
- converter_->PrimeWithSilence();
- convert_bus_ = AudioBus::Create(output);
-
- // Update our packet size assumptions based on the new format.
- const auto new_bytes_per_buffer =
- static_cast<int>(new_frames_per_buffer) * input_format_.nBlockAlign;
- packet_size_frames_ = new_bytes_per_buffer / input_format_.nBlockAlign;
- packet_size_bytes_ = new_bytes_per_buffer;
- frame_size_ = input_format_.nBlockAlign;
-
- imperfect_buffer_size_conversion_ =
- std::modf(new_frames_per_buffer, &new_frames_per_buffer) != 0.0;
- DVLOG_IF(1, imperfect_buffer_size_conversion_)
- << "Audio capture data conversion: Need to inject fifo";
+ SetupConverterAndStoreFormatInfo();
// Indicate that we're good to go with a close match.
hresult = S_OK;
@@ -782,6 +1077,45 @@ bool WASAPIAudioInputStream::DesiredFormatIsSupported(HRESULT* hr) {
return (hresult == S_OK);
}
+void WASAPIAudioInputStream::SetupConverterAndStoreFormatInfo() {
+ // Ideally, we want a 1:1 ratio between the buffers we get and the buffers
+ // we give to OnData so that each buffer we receive from the OS can be
+ // directly converted to a buffer that matches with what was asked for.
+ const double buffer_ratio =
+ output_format_.nSamplesPerSec / static_cast<double>(packet_size_frames_);
+ double new_frames_per_buffer = input_format_.nSamplesPerSec / buffer_ratio;
+
+ const auto input_layout = GuessChannelLayout(input_format_.nChannels);
+ DCHECK_NE(CHANNEL_LAYOUT_UNSUPPORTED, input_layout);
+ const auto output_layout = GuessChannelLayout(output_format_.nChannels);
+ DCHECK_NE(CHANNEL_LAYOUT_UNSUPPORTED, output_layout);
+
+ const AudioParameters input(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ input_layout, input_format_.nSamplesPerSec,
+ static_cast<int>(new_frames_per_buffer));
+
+ const AudioParameters output(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ output_layout, output_format_.nSamplesPerSec,
+ packet_size_frames_);
+
+ converter_.reset(new AudioConverter(input, output, false));
+ converter_->AddInput(this);
+ converter_->PrimeWithSilence();
+ convert_bus_ = AudioBus::Create(output);
+
+ // Update our packet size assumptions based on the new format.
+ const auto new_bytes_per_buffer =
+ static_cast<int>(new_frames_per_buffer) * input_format_.nBlockAlign;
+ packet_size_frames_ = new_bytes_per_buffer / input_format_.nBlockAlign;
+ packet_size_bytes_ = new_bytes_per_buffer;
+ frame_size_bytes_ = input_format_.nBlockAlign;
+
+ imperfect_buffer_size_conversion_ =
+ std::modf(new_frames_per_buffer, &new_frames_per_buffer) != 0.0;
+ DVLOG_IF(1, imperfect_buffer_size_conversion_)
+ << "Audio capture data conversion: Need to inject fifo";
+}
+
HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
DCHECK_EQ(OPEN_RESULT_OK, open_result_);
DWORD flags;
@@ -932,6 +1266,10 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
if (FAILED(hr))
open_result_ = OPEN_RESULT_NO_AUDIO_VOLUME;
+ audio_client_->GetService(IID_PPV_ARGS(&audio_clock_));
+ if (!audio_clock_)
+ LOG(WARNING) << "IAudioClock unavailable, capture times may be inaccurate.";
+
return hr;
}
@@ -978,6 +1316,223 @@ void WASAPIAudioInputStream::MaybeReportFormatRelatedInitError(
FormatRelatedInitError::kCount);
}
+bool WASAPIAudioInputStream::InitializeDmo() {
+ HRESULT hr = ::CoCreateInstance(CLSID_CWMAudioAEC, NULL, CLSCTX_INPROC_SERVER,
+ IID_IMediaObject, &voice_capture_dmo_);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Creating DMO failed.";
+ return false;
+ }
+
+ if (!SetDmoProperties())
+ return false;
+
+ if (!SetDmoFormat())
+ return false;
+
+ hr = voice_capture_dmo_->AllocateStreamingResources();
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Allocating DMO resources failed.";
+ return false;
+ }
+
+ SetupConverterAndStoreFormatInfo();
+
+ media_buffer_ =
+ new MediaBufferImpl(endpoint_buffer_size_frames_ * frame_size_bytes_);
+
+ if (!CreateDummyRenderClientsForDmo())
+ return false;
+
+ // Get volume interface.
+ Microsoft::WRL::ComPtr<IAudioSessionManager> audio_session_manager;
+ hr = endpoint_device_->Activate(__uuidof(IAudioSessionManager),
+ CLSCTX_INPROC_SERVER, NULL,
+ &audio_session_manager);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Obtaining audio session manager failed.";
+ return false;
+ }
+ hr = audio_session_manager->GetSimpleAudioVolume(
+ NULL, // AudioSessionGuid. NULL for default session.
+ FALSE, // CrossProcessSession.
+ &simple_audio_volume_);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Obtaining audio volume interface failed.";
+ return false;
+ }
+
+ return true;
+}
+
+bool WASAPIAudioInputStream::SetDmoProperties() {
+ Microsoft::WRL::ComPtr<IPropertyStore> ps;
+ HRESULT hr = voice_capture_dmo_->QueryInterface(IID_IPropertyStore, &ps);
+ if (FAILED(hr) || !ps) {
+ DLOG(ERROR) << "Getting DMO property store failed.";
+ return false;
+ }
+
+ // Set devices.
+ if (!SetDmoDevices(ps.Get())) {
+ DLOG(ERROR) << "Setting device indices failed.";
+ return false;
+ }
+
+ // Set DMO mode to AEC only.
+ if (FAILED(CoreAudioUtil::SetVtI4Property(
+ ps.Get(), MFPKEY_WMAAECMA_SYSTEM_MODE, SINGLE_CHANNEL_AEC))) {
+ DLOG(ERROR) << "Setting DMO system mode failed.";
+ return false;
+ }
+
+ // Enable the feature mode. This lets us override the default processing
+ // settings below.
+ if (FAILED(CoreAudioUtil::SetBoolProperty(
+ ps.Get(), MFPKEY_WMAAECMA_FEATURE_MODE, VARIANT_TRUE))) {
+ DLOG(ERROR) << "Setting DMO feature mode failed.";
+ return false;
+ }
+
+ // Disable analog AGC (default enabled).
+ if (FAILED(CoreAudioUtil::SetBoolProperty(
+ ps.Get(), MFPKEY_WMAAECMA_MIC_GAIN_BOUNDER, VARIANT_FALSE))) {
+ DLOG(ERROR) << "Setting DMO mic gain bounder failed.";
+ return false;
+ }
+
+ // Disable noise suppression (default enabled).
+ if (FAILED(CoreAudioUtil::SetVtI4Property(ps.Get(), MFPKEY_WMAAECMA_FEATR_NS,
+ 0))) {
+ DLOG(ERROR) << "Disabling DMO NS failed.";
+ return false;
+ }
+
+ return true;
+}
+
+bool WASAPIAudioInputStream::SetDmoFormat() {
+ DMO_MEDIA_TYPE mt; // Media type.
+ mt.majortype = MEDIATYPE_Audio;
+ mt.subtype = MEDIASUBTYPE_PCM;
+ mt.lSampleSize = 0;
+ mt.bFixedSizeSamples = TRUE;
+ mt.bTemporalCompression = FALSE;
+ mt.formattype = FORMAT_WaveFormatEx;
+
+ HRESULT hr = MoInitMediaType(&mt, sizeof(WAVEFORMATEX));
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Init media type for DMO failed.";
+ return false;
+ }
+
+ WAVEFORMATEX* dmo_output_format =
+ reinterpret_cast<WAVEFORMATEX*>(mt.pbFormat);
+ dmo_output_format->wFormatTag = WAVE_FORMAT_PCM;
+ dmo_output_format->nChannels = 1;
+ dmo_output_format->nSamplesPerSec = 16000;
+ dmo_output_format->nAvgBytesPerSec = 32000;
+ dmo_output_format->nBlockAlign = 2;
+ dmo_output_format->wBitsPerSample = 16;
+ dmo_output_format->cbSize = 0;
+
+ DCHECK(IsSupportedFormatForConversion(*dmo_output_format));
+
+ // Store the format used.
+ input_format_.wFormatTag = dmo_output_format->wFormatTag;
+ input_format_.nChannels = dmo_output_format->nChannels;
+ input_format_.nSamplesPerSec = dmo_output_format->nSamplesPerSec;
+ input_format_.wBitsPerSample = dmo_output_format->wBitsPerSample;
+ input_format_.nBlockAlign = dmo_output_format->nBlockAlign;
+ input_format_.nAvgBytesPerSec = dmo_output_format->nAvgBytesPerSec;
+ input_format_.cbSize = dmo_output_format->cbSize;
+
+ hr = voice_capture_dmo_->SetOutputType(0, &mt, 0);
+ MoFreeMediaType(&mt);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Setting DMO output type failed.";
+ return false;
+ }
+
+ // We use 10 ms buffer size for the DMO.
+ endpoint_buffer_size_frames_ = input_format_.nSamplesPerSec / 100;
+
+ return true;
+}
+
+bool WASAPIAudioInputStream::SetDmoDevices(IPropertyStore* ps) {
+ // Look up the input device's index.
+ const base::Optional<WORD> input_device_index =
+ GetAudioDeviceCollectionIndexFromId(device_id_, eCapture);
+
+ if (!input_device_index) {
+ log_callback_.Run(
+ base::StringPrintf("WASAPIAIS:SetDmoDevices: Could not "
+ "resolve input device index for %s",
+ device_id_.c_str()));
+ return false;
+ }
+
+ // Look up the output device's index.
+ const base::Optional<WORD> output_device_index =
+ GetAudioDeviceCollectionIndexFromId(output_device_id_for_aec_, eRender);
+ if (!output_device_index) {
+ log_callback_.Run(
+ base::StringPrintf("WASAPIAIS:SetDmoDevices: Could not "
+ "resolve output device index for %s",
+ output_device_id_for_aec_.c_str()));
+ return false;
+ }
+
+ // The DEVICE_INDEXES property packs the input and output indices into the
+ // upper and lower halves of a LONG.
+ LONG device_index_value =
+ (static_cast<ULONG>(*output_device_index) << 16) +
+ (static_cast<ULONG>(*input_device_index) & 0x0000ffff);
+ return !FAILED(CoreAudioUtil::SetVtI4Property(
+ ps, MFPKEY_WMAAECMA_DEVICE_INDEXES, device_index_value));
+}
+
+bool WASAPIAudioInputStream::CreateDummyRenderClientsForDmo() {
+ Microsoft::WRL::ComPtr<IAudioClient> audio_client(CoreAudioUtil::CreateClient(
+ output_device_id_for_aec_, eRender, eConsole));
+ if (!audio_client.Get()) {
+ DLOG(ERROR) << "Failed to create audio client for dummy rendering for DMO.";
+ return false;
+ }
+
+ WAVEFORMATPCMEX mix_format;
+ HRESULT hr =
+ CoreAudioUtil::GetSharedModeMixFormat(audio_client.Get(), &mix_format);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get mix format.";
+ return false;
+ }
+
+ hr = audio_client->Initialize(AUDCLNT_SHAREMODE_SHARED,
+ 0, // Stream flags
+ 0, // Buffer duration
+ 0, // Device period
+ reinterpret_cast<WAVEFORMATEX*>(&mix_format),
+ NULL);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to initalize audio client for rendering.";
+ return false;
+ }
+
+ Microsoft::WRL::ComPtr<IAudioRenderClient> audio_render_client =
+ CoreAudioUtil::CreateRenderClient(audio_client.Get());
+ if (!audio_render_client.Get()) {
+ DLOG(ERROR) << "Failed to create audio render client.";
+ return false;
+ }
+
+ audio_client_for_render_ = audio_client;
+ audio_render_client_ = audio_render_client;
+
+ return true;
+}
+
double WASAPIAudioInputStream::ProvideInput(AudioBus* audio_bus,
uint32_t frames_delayed) {
fifo_->Consume()->CopyTo(audio_bus);
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.h b/chromium/media/audio/win/audio_low_latency_input_win.h
index 99b0c7a5962..262c8813fa3 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.h
+++ b/chromium/media/audio/win/audio_low_latency_input_win.h
@@ -58,6 +58,7 @@
#include <Audioclient.h>
#include <MMDeviceAPI.h>
+#include <dmo.h>
#include <endpointvolume.h>
#include <stddef.h>
#include <stdint.h>
@@ -75,7 +76,7 @@
#include "base/win/scoped_com_initializer.h"
#include "base/win/scoped_handle.h"
#include "media/audio/agc_audio_stream.h"
-#include "media/audio/audio_manager.h"
+#include "media/audio/win/audio_manager_win.h"
#include "media/base/audio_converter.h"
#include "media/base/audio_parameters.h"
#include "media/base/media_export.h"
@@ -84,7 +85,6 @@ namespace media {
class AudioBlockFifo;
class AudioBus;
-class AudioManagerWin;
// AudioInputStream implementation using Windows Core Audio APIs.
class MEDIA_EXPORT WASAPIAudioInputStream
@@ -94,10 +94,12 @@ class MEDIA_EXPORT WASAPIAudioInputStream
public:
// The ctor takes all the usual parameters, plus |manager| which is the
// the audio manager who is creating this object.
- WASAPIAudioInputStream(AudioManagerWin* manager,
- const AudioParameters& params,
- const std::string& device_id,
- const AudioManager::LogCallback& log_callback);
+ WASAPIAudioInputStream(
+ AudioManagerWin* manager,
+ const AudioParameters& params,
+ const std::string& device_id,
+ const AudioManager::LogCallback& log_callback,
+ AudioManagerBase::VoiceProcessingMode voice_processing_mode);
// The dtor is typically called by the AudioManager only and it is usually
// triggered by calling AudioInputStream::Close().
@@ -112,20 +114,32 @@ class MEDIA_EXPORT WASAPIAudioInputStream
void SetVolume(double volume) override;
double GetVolume() override;
bool IsMuted() override;
+ void SetOutputDeviceForAec(const std::string& output_device_id) override;
bool started() const { return started_; }
private:
- // DelegateSimpleThread::Delegate implementation.
+ // DelegateSimpleThread::Delegate implementation. Calls either
+ // RunWithAudioCaptureClient() or RunWithDmo().
void Run() override;
- // Pulls capture data from the endpoint device and pushes it to the sink.
+ // Waits for an event that the audio capture client has data ready.
+ bool RunWithAudioCaptureClient();
+
+ // Polls the DMO (voice processing component) for data every 5 ms.
+ bool RunWithDmo();
+
+ // Pulls capture data from the audio capture client and pushes it to the sink.
void PullCaptureDataAndPushToSink();
+ // Pulls capture data from the DMO and pushes it to the sink.
+ void PullDmoCaptureDataAndPushToSink();
+
// Issues the OnError() callback to the |sink_|.
void HandleError(HRESULT err);
- // The Open() method is divided into these sub methods.
+ // The Open() method is divided into these sub methods when not using the
+ // voice processing DMO.
HRESULT SetCaptureDevice();
HRESULT GetAudioEngineStreamFormat();
// Returns whether the desired format is supported or not and writes the
@@ -133,14 +147,23 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// function returns false with |*hr| == S_FALSE, the OS supports a closest
// match but we don't support conversion to it.
bool DesiredFormatIsSupported(HRESULT* hr);
+ void SetupConverterAndStoreFormatInfo();
HRESULT InitializeAudioEngine();
void ReportOpenResult(HRESULT hr) const;
-
// Reports stats for format related audio client initilization
// (IAudioClient::Initialize) errors, that is if |hr| is an error related to
// the format.
void MaybeReportFormatRelatedInitError(HRESULT hr) const;
+ // The Open() method is divided into these sub methods when using the voice
+ // processing DMO. In addition, SetupConverterAndStoreFormatInfo() above is
+ // also called.
+ bool InitializeDmo();
+ bool SetDmoProperties();
+ bool SetDmoFormat();
+ bool SetDmoDevices(IPropertyStore* ps);
+ bool CreateDummyRenderClientsForDmo();
+
// AudioConverter::InputCallback implementation.
double ProvideInput(AudioBus* audio_bus, uint32_t frames_delayed) override;
@@ -188,30 +211,35 @@ class MEDIA_EXPORT WASAPIAudioInputStream
std::unique_ptr<base::DelegateSimpleThread> capture_thread_;
// Contains the desired output audio format which is set up at construction,
- // that is the audio format this class should output data to the sink in.
+ // that is the audio format this class should output data to the sink in, that
+ // is the format after the converter.
WAVEFORMATEX output_format_;
// Contains the audio format we get data from the audio engine in. Set to
// |output_format_| at construction and might be changed to a close match
- // if the audio engine doesn't support the originally set format.
+ // if the audio engine doesn't support the originally set format, or to the
+ // format the voice capture DMO outputs if it's used. Note that this is also
+ // the format after the fifo, i.e. the input format to the converter if any.
WAVEFORMATEX input_format_;
bool opened_ = false;
bool started_ = false;
StreamOpenResult open_result_ = OPEN_RESULT_OK;
- // Size in bytes of each audio frame (4 bytes for 16-bit stereo PCM)
- size_t frame_size_ = 0;
+ // Size in bytes of each audio frame before the converter (4 bytes for 16-bit
+ // stereo PCM). Note that this is the same before and after the fifo.
+ size_t frame_size_bytes_ = 0;
- // Size in audio frames of each audio packet where an audio packet
- // is defined as the block of data which the user received in each
- // OnData() callback.
+ // Size in audio frames of each audio packet (buffer) after the fifo but
+ // before the converter.
size_t packet_size_frames_ = 0;
- // Size in bytes of each audio packet.
+ // Size in bytes of each audio packet (buffer) after the fifo but before the
+ // converter.
size_t packet_size_bytes_ = 0;
- // Length of the audio endpoint buffer.
+ // Length of the audio endpoint buffer, or the buffer size used for the DMO.
+ // That is, the buffer size before the fifo.
uint32_t endpoint_buffer_size_frames_ = 0;
// Contains the unique name of the selected endpoint device.
@@ -219,6 +247,10 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// device role and is not a valid ID as such.
std::string device_id_;
+ // Contains the unique name of the output device from which to cancel echo, in
+ // case voice processing is enabled, i.e. |use_voice_processing_| is true.
+ std::string output_device_id_for_aec_;
+
// Pointer to the object that will receive the recorded audio samples.
AudioInputCallback* sink_ = nullptr;
@@ -245,6 +277,11 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// from a capture endpoint buffer.
Microsoft::WRL::ComPtr<IAudioCaptureClient> audio_capture_client_;
+ // The IAudioClock interface is used to get the current timestamp, as the
+ // timestamp from IAudioCaptureClient::GetBuffer can be unreliable with some
+ // devices.
+ Microsoft::WRL::ComPtr<IAudioClock> audio_clock_;
+
// The ISimpleAudioVolume interface enables a client to control the
// master volume level of an audio session.
// The volume-level is a value in the range 0.0 to 1.0.
@@ -289,6 +326,22 @@ class MEDIA_EXPORT WASAPIAudioInputStream
UINT64 total_lost_frames_ = 0;
UINT64 largest_glitch_frames_ = 0;
+ // Indicates if the voice processing DMO should be used.
+ bool use_voice_processing_ = false;
+
+ // The voice processing DMO and its data buffer.
+ Microsoft::WRL::ComPtr<IMediaObject> voice_capture_dmo_;
+ Microsoft::WRL::ComPtr<IMediaBuffer> media_buffer_;
+
+ // Dummy rendering when using the DMO. The DMO requires audio rendering to the
+ // device it's set up to use, otherwise it won't produce any capture audio
+ // data. Normally, when the DMO is used there's a render stream, but it's not
+ // guaranteed so we need to support the lack of it. We do this by always
+ // opening a render client and rendering silence to it when the DMO is
+ // running.
+ Microsoft::WRL::ComPtr<IAudioClient> audio_client_for_render_;
+ Microsoft::WRL::ComPtr<IAudioRenderClient> audio_render_client_;
+
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream);
diff --git a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
index 3a25097d103..27b0f65fbf8 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
@@ -43,6 +43,8 @@ namespace media {
namespace {
+constexpr SampleFormat kSampleFormat = kSampleFormatS16;
+
void LogCallbackDummy(const std::string& /* message */) {}
} // namespace
@@ -101,17 +103,14 @@ class WriteToFileAudioSink : public AudioInputStream::AudioInputCallback {
// 2 bytes per sample, 2 channels, 10ms @ 48kHz, 10 seconds <=> 1920000 bytes.
static const size_t kMaxBufferSize = 2 * 2 * 480 * 100 * 10;
- explicit WriteToFileAudioSink(const char* file_name, int bits_per_sample)
- : bits_per_sample_(bits_per_sample),
- buffer_(0, kMaxBufferSize),
- bytes_to_write_(0) {
+ explicit WriteToFileAudioSink(const char* file_name)
+ : buffer_(0, kMaxBufferSize), bytes_to_write_(0) {
base::FilePath file_path;
- EXPECT_TRUE(PathService::Get(base::DIR_EXE, &file_path));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_EXE, &file_path));
file_path = file_path.AppendASCII(file_name);
binary_file_ = base::OpenFile(file_path, "wb");
DLOG_IF(ERROR, !binary_file_) << "Failed to open binary PCM data file.";
VLOG(0) << ">> Output file: " << file_path.value() << " has been created.";
- VLOG(0) << ">> bits_per_sample_:" << bits_per_sample_;
}
~WriteToFileAudioSink() override {
@@ -136,11 +135,11 @@ class WriteToFileAudioSink : public AudioInputStream::AudioInputCallback {
void OnData(const AudioBus* src,
base::TimeTicks capture_time,
double volume) override {
- EXPECT_EQ(bits_per_sample_, 16);
const int num_samples = src->frames() * src->channels();
auto interleaved = std::make_unique<int16_t[]>(num_samples);
const int bytes_per_sample = sizeof(interleaved[0]);
- src->ToInterleaved(src->frames(), bytes_per_sample, interleaved.get());
+ src->ToInterleaved<SignedInt16SampleTypeTraits>(src->frames(),
+ interleaved.get());
// Store data data in a temporary buffer to avoid making blocking
// fwrite() calls in the audio callback. The complete buffer will be
@@ -154,7 +153,6 @@ class WriteToFileAudioSink : public AudioInputStream::AudioInputCallback {
void OnError() override {}
private:
- int bits_per_sample_;
media::SeekableBuffer buffer_;
FILE* binary_file_;
size_t bytes_to_write_;
@@ -172,11 +170,16 @@ static bool HasCoreAudioAndInputDevices(AudioManager* audio_man) {
// also allows the user to modify the default settings.
class AudioInputStreamWrapper {
public:
- explicit AudioInputStreamWrapper(AudioManager* audio_manager)
+ explicit AudioInputStreamWrapper(AudioManager* audio_manager,
+ bool use_voice_processing)
: audio_man_(audio_manager) {
EXPECT_TRUE(SUCCEEDED(CoreAudioUtil::GetPreferredAudioParameters(
AudioDeviceDescription::kDefaultDeviceId, false, &default_params_)));
EXPECT_EQ(format(), AudioParameters::AUDIO_PCM_LOW_LATENCY);
+ if (use_voice_processing) {
+ default_params_.set_effects(default_params_.effects() |
+ AudioParameters::ECHO_CANCELLER);
+ }
frames_per_buffer_ = default_params_.frames_per_buffer();
}
@@ -203,7 +206,6 @@ class AudioInputStreamWrapper {
int channels() const {
return ChannelLayoutToChannelCount(default_params_.channel_layout());
}
- int bits_per_sample() const { return default_params_.bits_per_sample(); }
int sample_rate() const { return default_params_.sample_rate(); }
int frames_per_buffer() const { return frames_per_buffer_; }
@@ -225,8 +227,9 @@ class AudioInputStreamWrapper {
// Convenience method which creates a default AudioInputStream object.
static AudioInputStream* CreateDefaultAudioInputStream(
- AudioManager* audio_manager) {
- AudioInputStreamWrapper aisw(audio_manager);
+ AudioManager* audio_manager,
+ bool use_voice_processing) {
+ AudioInputStreamWrapper aisw(audio_manager, use_voice_processing);
AudioInputStream* ais = aisw.Create();
return ais;
}
@@ -261,7 +264,9 @@ class ScopedAudioInputStream {
DISALLOW_COPY_AND_ASSIGN(ScopedAudioInputStream);
};
-class WinAudioInputTest : public ::testing::Test {
+// The test class. The boolean parameter specifies if voice processing should be
+// used.
+class WinAudioInputTest : public ::testing::TestWithParam<bool> {
public:
WinAudioInputTest() {
audio_manager_ =
@@ -296,27 +301,27 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamHardwareSampleRate) {
}
// Test Create(), Close() calling sequence.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamCreateAndClose) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamCreateAndClose) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
ScopedAudioInputStream ais(
- CreateDefaultAudioInputStream(audio_manager_.get()));
+ CreateDefaultAudioInputStream(audio_manager_.get(), GetParam()));
ais.Close();
}
// Test Open(), Close() calling sequence.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenAndClose) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamOpenAndClose) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
ScopedAudioInputStream ais(
- CreateDefaultAudioInputStream(audio_manager_.get()));
+ CreateDefaultAudioInputStream(audio_manager_.get(), GetParam()));
EXPECT_TRUE(ais->Open());
ais.Close();
}
// Test Open(), Start(), Close() calling sequence.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenStartAndClose) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamOpenStartAndClose) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
ScopedAudioInputStream ais(
- CreateDefaultAudioInputStream(audio_manager_.get()));
+ CreateDefaultAudioInputStream(audio_manager_.get(), GetParam()));
EXPECT_TRUE(ais->Open());
MockAudioInputCallback sink;
ais->Start(&sink);
@@ -324,10 +329,10 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenStartAndClose) {
}
// Test Open(), Start(), Stop(), Close() calling sequence.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenStartStopAndClose) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamOpenStartStopAndClose) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
ScopedAudioInputStream ais(
- CreateDefaultAudioInputStream(audio_manager_.get()));
+ CreateDefaultAudioInputStream(audio_manager_.get(), GetParam()));
EXPECT_TRUE(ais->Open());
MockAudioInputCallback sink;
ais->Start(&sink);
@@ -336,10 +341,10 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenStartStopAndClose) {
}
// Test some additional calling sequences.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamMiscCallingSequences) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamMiscCallingSequences) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
ScopedAudioInputStream ais(
- CreateDefaultAudioInputStream(audio_manager_.get()));
+ CreateDefaultAudioInputStream(audio_manager_.get(), GetParam()));
// Open(), Open() should fail the second time.
EXPECT_TRUE(ais->Open());
@@ -361,7 +366,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamMiscCallingSequences) {
ais.Close();
}
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
int count = 0;
@@ -370,15 +375,15 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
// Create default WASAPI input stream which records in stereo using
// the shared mixing rate. The default buffer size is 10ms.
- AudioInputStreamWrapper aisw(audio_manager_.get());
+ AudioInputStreamWrapper aisw(audio_manager_.get(), GetParam());
ScopedAudioInputStream ais(aisw.Create());
EXPECT_TRUE(ais->Open());
MockAudioInputCallback sink;
// Derive the expected size in bytes of each recorded packet.
- uint32_t bytes_per_packet =
- aisw.channels() * aisw.frames_per_buffer() * (aisw.bits_per_sample() / 8);
+ uint32_t bytes_per_packet = aisw.channels() * aisw.frames_per_buffer() *
+ SampleFormatToBytesPerChannel(kSampleFormat);
{
// We use 10ms packets and will run the test until ten packets are received.
@@ -405,8 +410,8 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
count = 0;
ais.Reset(aisw.Create(2 * frames_per_buffer_10ms));
EXPECT_TRUE(ais->Open());
- bytes_per_packet =
- aisw.channels() * aisw.frames_per_buffer() * (aisw.bits_per_sample() / 8);
+ bytes_per_packet = aisw.channels() * aisw.frames_per_buffer() *
+ SampleFormatToBytesPerChannel(kSampleFormat);
{
base::RunLoop run_loop;
@@ -426,8 +431,8 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
count = 0;
ais.Reset(aisw.Create(frames_per_buffer_10ms / 2));
EXPECT_TRUE(ais->Open());
- bytes_per_packet =
- aisw.channels() * aisw.frames_per_buffer() * (aisw.bits_per_sample() / 8);
+ bytes_per_packet = aisw.channels() * aisw.frames_per_buffer() *
+ SampleFormatToBytesPerChannel(kSampleFormat);
{
base::RunLoop run_loop;
@@ -444,7 +449,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
}
// Test that we can capture a stream in loopback.
-TEST_F(WinAudioInputTest, WASAPIAudioInputStreamLoopback) {
+TEST_P(WinAudioInputTest, WASAPIAudioInputStreamLoopback) {
AudioDeviceInfoAccessorForTests device_info_accessor(audio_manager_.get());
ABORT_AUDIO_TEST_IF_NOT(device_info_accessor.HasAudioOutputDevices() &&
CoreAudioUtil::IsSupported());
@@ -478,7 +483,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamLoopback) {
// To include disabled tests in test execution, just invoke the test program
// with --gtest_also_run_disabled_tests or set the GTEST_ALSO_RUN_DISABLED_TESTS
// environment variable to a value greater than 0.
-TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamRecordToFile) {
+TEST_P(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamRecordToFile) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
// Name of the output PCM file containing captured data. The output file
@@ -486,12 +491,12 @@ TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamRecordToFile) {
// Example of full name: \src\build\Debug\out_stereo_10sec.pcm.
const char* file_name = "out_10sec.pcm";
- AudioInputStreamWrapper aisw(audio_manager_.get());
+ AudioInputStreamWrapper aisw(audio_manager_.get(), GetParam());
ScopedAudioInputStream ais(aisw.Create());
ASSERT_TRUE(ais->Open());
VLOG(0) << ">> Sample rate: " << aisw.sample_rate() << " [Hz]";
- WriteToFileAudioSink file_sink(file_name, aisw.bits_per_sample());
+ WriteToFileAudioSink file_sink(file_name);
VLOG(0) << ">> Speak into the default microphone while recording.";
ais->Start(&file_sink);
base::PlatformThread::Sleep(TestTimeouts::action_timeout());
@@ -500,7 +505,7 @@ TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamRecordToFile) {
ais.Close();
}
-TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamResampleToFile) {
+TEST_P(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamResampleToFile) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
// This is basically the same test as WASAPIAudioInputStreamRecordToFile
@@ -534,8 +539,9 @@ TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamResampleToFile) {
// 44.1kHz.
// Otherwise (e.g. 44.1kHz, 22.05kHz etc) we convert to 48kHz.
const int hw_sample_rate = params.sample_rate();
- params.Reset(params.format(), test.layout, test.rate,
- params.bits_per_sample(), test.frames);
+ params.Reset(params.format(), test.layout, test.rate, test.frames);
+ if (GetParam())
+ params.set_effects(params.effects() | AudioParameters::ECHO_CANCELLER);
std::string file_name(base::StringPrintf(
"resampled_10sec_%i_to_%i_%s.pcm", hw_sample_rate, params.sample_rate(),
@@ -548,7 +554,7 @@ TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamResampleToFile) {
VLOG(0) << ">> Resampled rate will be: " << aisw.sample_rate() << " [Hz]";
VLOG(0) << ">> New layout will be: "
<< ChannelLayoutToString(params.channel_layout());
- WriteToFileAudioSink file_sink(file_name.c_str(), aisw.bits_per_sample());
+ WriteToFileAudioSink file_sink(file_name.c_str());
VLOG(0) << ">> Speak into the default microphone while recording.";
ais->Start(&file_sink);
base::PlatformThread::Sleep(TestTimeouts::action_timeout());
@@ -559,4 +565,8 @@ TEST_F(WinAudioInputTest, DISABLED_WASAPIAudioInputStreamResampleToFile) {
}
}
+INSTANTIATE_TEST_CASE_P(/* Intentially left empty */,
+ WinAudioInputTest,
+ ::testing::Bool());
+
} // namespace media
diff --git a/chromium/media/audio/win/audio_low_latency_output_win.cc b/chromium/media/audio/win/audio_low_latency_output_win.cc
index accbacb4aa0..eae42366058 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win.cc
@@ -71,14 +71,7 @@ WASAPIAudioOutputStream::WASAPIAudioOutputStream(AudioManagerWin* manager,
bool avrt_init = avrt::Initialize();
DCHECK(avrt_init) << "Failed to load the avrt.dll";
- // New set that appropriate for float output.
- AudioParameters float_params(
- params.format(), params.channel_layout(), params.sample_rate(),
- // Ignore the given bits per sample because we're outputting
- // floats.
- sizeof(float) * CHAR_BIT, params.frames_per_buffer());
-
- audio_bus_ = AudioBus::Create(float_params);
+ audio_bus_ = AudioBus::Create(params);
// Set up the desired render format specified by the client. We use the
// WAVE_FORMAT_EXTENSIBLE structure to ensure that multiple channel ordering
@@ -87,27 +80,27 @@ WASAPIAudioOutputStream::WASAPIAudioOutputStream(AudioManagerWin* manager,
// Begin with the WAVEFORMATEX structure that specifies the basic format.
WAVEFORMATEX* format = &format_.Format;
format->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
- format->nChannels = float_params.channels();
- format->nSamplesPerSec = float_params.sample_rate();
- format->wBitsPerSample = float_params.bits_per_sample();
+ format->nChannels = params.channels();
+ format->nSamplesPerSec = params.sample_rate();
+ format->wBitsPerSample = sizeof(float) * 8;
format->nBlockAlign = (format->wBitsPerSample / 8) * format->nChannels;
format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign;
format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
// Add the parts which are unique to WAVE_FORMAT_EXTENSIBLE.
- format_.Samples.wValidBitsPerSample = float_params.bits_per_sample();
+ format_.Samples.wValidBitsPerSample = format->wBitsPerSample;
format_.dwChannelMask = CoreAudioUtil::GetChannelConfig(device_id, eRender);
format_.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
// Store size (in different units) of audio packets which we expect to
// get from the audio endpoint device in each render event.
- packet_size_frames_ = float_params.frames_per_buffer();
- packet_size_bytes_ = float_params.GetBytesPerBuffer();
+ packet_size_frames_ = params.frames_per_buffer();
+ packet_size_bytes_ = params.GetBytesPerBuffer(kSampleFormatF32);
DVLOG(1) << "Number of bytes per audio frame : " << format->nBlockAlign;
DVLOG(1) << "Number of audio frames per packet: " << packet_size_frames_;
DVLOG(1) << "Number of bytes per packet : " << packet_size_bytes_;
DVLOG(1) << "Number of milliseconds per packet: "
- << float_params.GetBufferDuration().InMillisecondsF();
+ << params.GetBufferDuration().InMillisecondsF();
// All events are auto-reset events and non-signaled initially.
diff --git a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
index a57ec878def..60648d2c46f 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
@@ -67,7 +67,8 @@ MATCHER_P(HasValidDelay, value, "") {
// Used to terminate a loop from a different thread than the loop belongs to.
// |task_runner| should be a SingleThreadTaskRunner.
ACTION_P(QuitLoop, task_runner) {
- task_runner->PostTask(FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ task_runner->PostTask(FROM_HERE,
+ base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
}
// This audio source implementation should be used for manual tests only since
@@ -93,7 +94,7 @@ class ReadFromFileAudioSource : public AudioOutputStream::AudioSourceCallback {
// Get complete file path to output file in directory containing
// media_unittests.exe.
base::FilePath file_name;
- EXPECT_TRUE(PathService::Get(base::DIR_EXE, &file_name));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_EXE, &file_name));
file_name = file_name.AppendASCII(kDeltaTimeMsFileName);
EXPECT_TRUE(!text_file_);
@@ -171,8 +172,7 @@ class AudioOutputStreamWrapper {
public:
explicit AudioOutputStreamWrapper(AudioManager* audio_manager)
: audio_man_(audio_manager),
- format_(AudioParameters::AUDIO_PCM_LOW_LATENCY),
- bits_per_sample_(kBitsPerSample) {
+ format_(AudioParameters::AUDIO_PCM_LOW_LATENCY) {
AudioParameters preferred_params;
EXPECT_TRUE(SUCCEEDED(CoreAudioUtil::GetPreferredAudioParameters(
AudioDeviceDescription::kDefaultDeviceId, true, &preferred_params)));
@@ -205,7 +205,6 @@ class AudioOutputStreamWrapper {
AudioParameters::Format format() const { return format_; }
int channels() const { return ChannelLayoutToChannelCount(channel_layout_); }
- int bits_per_sample() const { return bits_per_sample_; }
int sample_rate() const { return sample_rate_; }
int samples_per_packet() const { return samples_per_packet_; }
@@ -213,7 +212,7 @@ class AudioOutputStreamWrapper {
AudioOutputStream* CreateOutputStream() {
AudioOutputStream* aos = audio_man_->MakeAudioOutputStream(
AudioParameters(format_, channel_layout_, sample_rate_,
- bits_per_sample_, samples_per_packet_),
+ samples_per_packet_),
std::string(), AudioManager::LogCallback());
EXPECT_TRUE(aos);
return aos;
@@ -222,7 +221,6 @@ class AudioOutputStreamWrapper {
AudioManager* audio_man_;
AudioParameters::Format format_;
ChannelLayout channel_layout_;
- int bits_per_sample_;
int sample_rate_;
int samples_per_packet_;
};
@@ -385,7 +383,7 @@ TEST_F(WASAPIAudioOutputStreamTest, ValidPacketSize) {
aos->Start(&source);
message_loop_.task_runner()->PostDelayedTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure(),
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated(),
TestTimeouts::action_timeout());
base::RunLoop().Run();
aos->Stop();
@@ -422,7 +420,6 @@ TEST_F(WASAPIAudioOutputStreamTest, DISABLED_ReadFromStereoFile) {
DVLOG(0) << "File name : " << file_name.c_str();
DVLOG(0) << "Sample rate : " << aosw.sample_rate();
- DVLOG(0) << "Bits per sample: " << aosw.bits_per_sample();
DVLOG(0) << "#channels : " << aosw.channels();
DVLOG(0) << "File size : " << file_source.file_size();
DVLOG(0) << "#file segments : " << kNumFileSegments;
@@ -576,7 +573,7 @@ TEST_F(WASAPIAudioOutputStreamTest,
aos->Start(&source);
message_loop_.task_runner()->PostDelayedTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure(),
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated(),
TestTimeouts::action_timeout());
base::RunLoop().Run();
aos->Stop();
@@ -610,7 +607,7 @@ TEST_F(WASAPIAudioOutputStreamTest,
aos->Start(&source);
message_loop_.task_runner()->PostDelayedTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure(),
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated(),
TestTimeouts::action_timeout());
base::RunLoop().Run();
aos->Stop();
diff --git a/chromium/media/audio/win/audio_manager_win.cc b/chromium/media/audio/win/audio_manager_win.cc
index 3e8015dfb33..d61781b625a 100644
--- a/chromium/media/audio/win/audio_manager_win.cc
+++ b/chromium/media/audio/win/audio_manager_win.cc
@@ -19,11 +19,11 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/command_line.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/win/windows_version.h"
#include "media/audio/audio_device_description.h"
+#include "media/audio/audio_features.h"
#include "media/audio/audio_io.h"
#include "media/audio/win/audio_device_listener_win.h"
#include "media/audio/win/audio_low_latency_input_win.h"
@@ -92,12 +92,20 @@ AudioManagerWin::AudioManagerWin(std::unique_ptr<AudioThread> audio_thread,
SetMaxOutputStreamsAllowed(kMaxOutputStreams);
- // WARNING: This is executed on the UI loop, do not add any code here which
- // loads libraries or attempts to call out into the OS. Instead add such code
- // to the InitializeOnAudioThread() method below.
+ // WARNING: This may be executed on the UI loop, do not add any code here
+ // which loads libraries or attempts to call out into the OS. Instead add
+ // such code to the InitializeOnAudioThread() method below.
+
+ // In case we are already on the audio thread (i.e. when running out of
+ // process audio), don't post.
+ if (GetTaskRunner()->BelongsToCurrentThread()) {
+ this->InitializeOnAudioThread();
+ return;
+ }
// Task must be posted last to avoid races from handing out "this" to the
- // audio thread.
+ // audio thread. Unretained is safe since we join the audio thread before
+ // destructing |this|.
GetTaskRunner()->PostTask(
FROM_HERE, base::Bind(&AudioManagerWin::InitializeOnAudioThread,
base::Unretained(this)));
@@ -172,13 +180,16 @@ AudioParameters AudioManagerWin::GetInputStreamParameters(
// code path somehow for a configuration - e.g. tab capture).
parameters =
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 48000, 16, kFallbackBufferSize);
+ CHANNEL_LAYOUT_STEREO, 48000, kFallbackBufferSize);
}
int user_buffer_size = GetUserBufferSize();
if (user_buffer_size)
parameters.set_frames_per_buffer(user_buffer_size);
+ parameters.set_effects(parameters.effects() |
+ AudioParameters::EXPERIMENTAL_ECHO_CANCELLER);
+
return parameters;
}
@@ -249,7 +260,14 @@ AudioInputStream* AudioManagerWin::MakeLowLatencyInputStream(
const LogCallback& log_callback) {
// Used for both AUDIO_PCM_LOW_LATENCY and AUDIO_PCM_LINEAR.
DVLOG(1) << "MakeLowLatencyInputStream: " << device_id;
- return new WASAPIAudioInputStream(this, params, device_id, log_callback);
+
+ VoiceProcessingMode voice_processing_mode =
+ params.effects() & AudioParameters::ECHO_CANCELLER
+ ? VoiceProcessingMode::kEnabled
+ : VoiceProcessingMode::kDisabled;
+
+ return new WASAPIAudioInputStream(this, params, device_id, log_callback,
+ voice_processing_mode);
}
std::string AudioManagerWin::GetDefaultInputDeviceID() {
@@ -275,7 +293,6 @@ AudioParameters AudioManagerWin::GetPreferredOutputStreamParameters(
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = 48000;
int buffer_size = kFallbackBufferSize;
- int bits_per_sample = 16;
int effects = AudioParameters::NO_EFFECTS;
// TODO(henrika): Remove kEnableExclusiveAudio and related code. It doesn't
@@ -307,7 +324,6 @@ AudioParameters AudioManagerWin::GetPreferredOutputStreamParameters(
return AudioParameters();
}
- bits_per_sample = params.bits_per_sample();
buffer_size = params.frames_per_buffer();
channel_layout = params.channel_layout();
sample_rate = params.sample_rate();
@@ -350,7 +366,7 @@ AudioParameters AudioManagerWin::GetPreferredOutputStreamParameters(
buffer_size = user_buffer_size;
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout,
- sample_rate, bits_per_sample, buffer_size);
+ sample_rate, buffer_size);
params.set_effects(effects);
return params;
}
diff --git a/chromium/media/audio/win/audio_output_win_unittest.cc b/chromium/media/audio/win/audio_output_win_unittest.cc
index 30f808d72b4..73ea9caf58b 100644
--- a/chromium/media/audio/win/audio_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_output_win_unittest.cc
@@ -183,7 +183,7 @@ TEST_F(WinAudioTest, PCMWaveStreamGetAndClose) {
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 8000, 16, 256),
+ 8000, 256),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
oas->Close();
@@ -194,40 +194,34 @@ TEST_F(WinAudioTest, SanityOnMakeParams) {
ABORT_AUDIO_TEST_IF_NOT(audio_manager_device_info_->HasAudioOutputDevices());
AudioParameters::Format fmt = AudioParameters::AUDIO_PCM_LINEAR;
- EXPECT_TRUE(
- NULL ==
- audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_UNSUPPORTED, 8000, 16, 256),
- std::string(), AudioManager::LogCallback()));
- EXPECT_TRUE(
- NULL ==
- audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 1024 * 1024, 16, 256),
- std::string(), AudioManager::LogCallback()));
EXPECT_TRUE(NULL ==
audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_STEREO, 8000, 80, 256),
+ AudioParameters(fmt, CHANNEL_LAYOUT_UNSUPPORTED, 8000, 256),
std::string(), AudioManager::LogCallback()));
- EXPECT_TRUE(
- NULL ==
- audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_UNSUPPORTED, 8000, 16, 256),
- std::string(), AudioManager::LogCallback()));
EXPECT_TRUE(NULL ==
audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_STEREO, -8000, 16, 256),
+ AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 1024 * 1024, 256),
std::string(), AudioManager::LogCallback()));
+ EXPECT_TRUE(NULL == audio_manager_->MakeAudioOutputStream(
+ AudioParameters(fmt, CHANNEL_LAYOUT_STEREO, 0, 256),
+ std::string(), AudioManager::LogCallback()));
EXPECT_TRUE(NULL ==
audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000, 16, -100),
+ AudioParameters(fmt, CHANNEL_LAYOUT_UNSUPPORTED, 8000, 256),
std::string(), AudioManager::LogCallback()));
EXPECT_TRUE(NULL ==
audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000, 16, 0),
+ AudioParameters(fmt, CHANNEL_LAYOUT_STEREO, -8000, 256),
std::string(), AudioManager::LogCallback()));
+ EXPECT_TRUE(NULL == audio_manager_->MakeAudioOutputStream(
+ AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000, -100),
+ std::string(), AudioManager::LogCallback()));
+ EXPECT_TRUE(NULL == audio_manager_->MakeAudioOutputStream(
+ AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000, 0),
+ std::string(), AudioManager::LogCallback()));
EXPECT_TRUE(NULL ==
audio_manager_->MakeAudioOutputStream(
- AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000, 16,
+ AudioParameters(fmt, CHANNEL_LAYOUT_MONO, 8000,
media::limits::kMaxSamplesPerPacket + 1),
std::string(), AudioManager::LogCallback()));
}
@@ -238,7 +232,7 @@ TEST_F(WinAudioTest, PCMWaveStreamOpenAndClose) {
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 8000, 16, 256),
+ 8000, 256),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
EXPECT_TRUE(oas->Open());
@@ -251,7 +245,7 @@ TEST_F(WinAudioTest, PCMWaveStreamOpenLimit) {
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 8000, 16, 1024 * 1024 * 1024),
+ 8000, 1024 * 1024 * 1024),
std::string(), AudioManager::LogCallback());
EXPECT_TRUE(NULL == oas);
if (oas)
@@ -266,7 +260,7 @@ TEST_F(WinAudioTest, PCMWaveSlowSource) {
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 16000, 16, 256),
+ 16000, 256),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
TestSourceLaggy test_laggy(90);
@@ -291,7 +285,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPlaySlowLoop) {
uint32_t samples_100_ms = AudioParameters::kAudioCDSampleRate / 10;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kAudioCDSampleRate, 16, samples_100_ms),
+ AudioParameters::kAudioCDSampleRate, samples_100_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -321,7 +315,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPlay200HzTone44Kss) {
uint32_t samples_100_ms = AudioParameters::kAudioCDSampleRate / 10;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kAudioCDSampleRate, 16, samples_100_ms),
+ AudioParameters::kAudioCDSampleRate, samples_100_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -345,8 +339,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPlay200HzTone22Kss) {
uint32_t samples_100_ms = AudioParameters::kAudioCDSampleRate / 20;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kAudioCDSampleRate / 2, 16,
- samples_100_ms),
+ AudioParameters::kAudioCDSampleRate / 2, samples_100_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -382,7 +375,7 @@ TEST_F(WinAudioTest, PushSourceFile16KHz) {
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- kSampleRate, 16, kSamples100ms),
+ kSampleRate, kSamples100ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -415,7 +408,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPlayTwice200HzTone44Kss) {
uint32_t samples_100_ms = AudioParameters::kAudioCDSampleRate / 10;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kAudioCDSampleRate, 16, samples_100_ms),
+ AudioParameters::kAudioCDSampleRate, samples_100_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -453,7 +446,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPlay200HzToneLowLatency) {
uint32_t samples_10_ms = sample_rate / 100;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, sample_rate, 16, samples_10_ms),
+ CHANNEL_LAYOUT_MONO, sample_rate, samples_10_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -483,7 +476,7 @@ TEST_F(WinAudioTest, PCMWaveStreamPendingBytes) {
uint32_t samples_100_ms = AudioParameters::kAudioCDSampleRate / 10;
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- AudioParameters::kAudioCDSampleRate, 16, samples_100_ms),
+ AudioParameters::kAudioCDSampleRate, samples_100_ms),
std::string(), AudioManager::LogCallback());
ASSERT_TRUE(NULL != oas);
@@ -629,8 +622,8 @@ TEST_F(WinAudioTest, SyncSocketBasic) {
static const int sample_rate = AudioParameters::kAudioCDSampleRate;
static const uint32_t kSamples20ms = sample_rate / 50;
- AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_MONO, sample_rate, 16, kSamples20ms);
+ AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
+ sample_rate, kSamples20ms);
AudioOutputStream* oas = audio_manager_->MakeAudioOutputStream(
params, std::string(), AudioManager::LogCallback());
diff --git a/chromium/media/audio/win/core_audio_util_win.cc b/chromium/media/audio/win/core_audio_util_win.cc
index 5541849f5d9..4e4a534166a 100644
--- a/chromium/media/audio/win/core_audio_util_win.cc
+++ b/chromium/media/audio/win/core_audio_util_win.cc
@@ -405,11 +405,6 @@ HRESULT GetPreferredAudioParametersInternal(IAudioClient* client,
// Preferred sample rate.
int sample_rate = mix_format.Format.nSamplesPerSec;
- // TODO(henrika): possibly use format.Format.wBitsPerSample here instead.
- // We use a hard-coded value of 16 bits per sample today even if most audio
- // engines does the actual mixing in 32 bits per sample.
- int bits_per_sample = 16;
-
// We are using the native device period to derive the smallest possible
// buffer size in shared mode. Note that the actual endpoint buffer will be
// larger than this size but it will be possible to fill it up in two calls.
@@ -420,8 +415,7 @@ HRESULT GetPreferredAudioParametersInternal(IAudioClient* client,
0.5);
AudioParameters audio_params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- channel_layout, sample_rate, bits_per_sample,
- frames_per_buffer);
+ channel_layout, sample_rate, frames_per_buffer);
*params = audio_params;
DVLOG(1) << params->AsHumanReadableString();
@@ -793,8 +787,7 @@ HRESULT CoreAudioUtil::GetPreferredAudioParameters(const std::string& device_id,
// need to do the same thing?
if (params->channels() != 1) {
params->Reset(params->format(), CHANNEL_LAYOUT_STEREO,
- params->sample_rate(), params->bits_per_sample(),
- params->frames_per_buffer());
+ params->sample_rate(), params->frames_per_buffer());
}
return hr;
@@ -983,4 +976,71 @@ bool CoreAudioUtil::GetDxDiagDetails(std::string* driver_name,
return true;
}
+HRESULT CoreAudioUtil::GetDeviceCollectionIndex(const std::string& device_id,
+ EDataFlow data_flow,
+ WORD* index) {
+ ComPtr<IMMDeviceEnumerator> enumerator = CreateDeviceEnumerator();
+ if (!enumerator.Get()) {
+ DLOG(ERROR) << "Failed to create device enumerator.";
+ return E_FAIL;
+ }
+
+ ComPtr<IMMDeviceCollection> device_collection;
+ HRESULT hr = enumerator->EnumAudioEndpoints(data_flow, DEVICE_STATE_ACTIVE,
+ &device_collection);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get device collection.";
+ return hr;
+ }
+
+ UINT number_of_devices = 0;
+ hr = device_collection->GetCount(&number_of_devices);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get device collection count.";
+ return hr;
+ }
+
+ ComPtr<IMMDevice> device;
+ for (WORD i = 0; i < number_of_devices; ++i) {
+ hr = device_collection->Item(i, &device);
+ if (FAILED(hr)) {
+ DLOG(WARNING) << "Failed to get device.";
+ continue;
+ }
+ ScopedCoMem<WCHAR> current_device_id;
+ hr = device->GetId(&current_device_id);
+ if (FAILED(hr)) {
+ DLOG(WARNING) << "Failed to get device id.";
+ continue;
+ }
+ if (base::UTF16ToUTF8(current_device_id.get()) == device_id) {
+ *index = i;
+ return S_OK;
+ }
+ }
+
+ DVLOG(1) << "No matching device found.";
+ return S_FALSE;
+}
+
+HRESULT CoreAudioUtil::SetBoolProperty(IPropertyStore* property_store,
+ REFPROPERTYKEY key,
+ VARIANT_BOOL value) {
+ base::win::ScopedPropVariant pv;
+ PROPVARIANT* pv_ptr = pv.Receive();
+ pv_ptr->vt = VT_BOOL;
+ pv_ptr->boolVal = value;
+ return property_store->SetValue(key, pv.get());
+}
+
+HRESULT CoreAudioUtil::SetVtI4Property(IPropertyStore* property_store,
+ REFPROPERTYKEY key,
+ LONG value) {
+ base::win::ScopedPropVariant pv;
+ PROPVARIANT* pv_ptr = pv.Receive();
+ pv_ptr->vt = VT_I4;
+ pv_ptr->lVal = value;
+ return property_store->SetValue(key, pv.get());
+}
+
} // namespace media
diff --git a/chromium/media/audio/win/core_audio_util_win.h b/chromium/media/audio/win/core_audio_util_win.h
index 584405ba420..3b7226f5a01 100644
--- a/chromium/media/audio/win/core_audio_util_win.h
+++ b/chromium/media/audio/win/core_audio_util_win.h
@@ -212,6 +212,23 @@ class MEDIA_EXPORT CoreAudioUtil {
static bool GetDxDiagDetails(std::string* driver_name,
std::string* driver_version);
+ // Gets the device collection index for the device specified by |device_id|.
+ // If the device is found in the device collection, the index is written to
+ // |*index| and S_OK is returned. If the device is not found, S_FALSE is
+ // returned and |*index| is left unchanged. In case of an error, the error
+ // result is returned and |*index| is left unchanged.
+ static HRESULT GetDeviceCollectionIndex(const std::string& device_id,
+ EDataFlow data_flow,
+ WORD* index);
+
+ // Sets the property identified by |key| to |value| in |*property_store|.
+ static HRESULT SetBoolProperty(IPropertyStore* property_store,
+ REFPROPERTYKEY key,
+ VARIANT_BOOL value);
+ static HRESULT SetVtI4Property(IPropertyStore* property_store,
+ REFPROPERTYKEY key,
+ LONG value);
+
private:
CoreAudioUtil() {}
~CoreAudioUtil() {}
diff --git a/chromium/media/audio/win/waveout_output_win.cc b/chromium/media/audio/win/waveout_output_win.cc
index 811930668d8..70d830f6213 100644
--- a/chromium/media/audio/win/waveout_output_win.cc
+++ b/chromium/media/audio/win/waveout_output_win.cc
@@ -72,6 +72,8 @@ inline WAVEHDR* PCMWaveOutAudioOutputStream::GetBuffer(int n) const {
return reinterpret_cast<WAVEHDR*>(&buffers_[n * BufferSize()]);
}
+constexpr SampleFormat kSampleFormat = kSampleFormatS16;
+
PCMWaveOutAudioOutputStream::PCMWaveOutAudioOutputStream(
AudioManagerWin* manager,
const AudioParameters& params,
@@ -81,7 +83,7 @@ PCMWaveOutAudioOutputStream::PCMWaveOutAudioOutputStream(
manager_(manager),
callback_(NULL),
num_buffers_(num_buffers),
- buffer_size_(params.GetBytesPerBuffer()),
+ buffer_size_(params.GetBytesPerBuffer(kSampleFormat)),
volume_(1),
channels_(params.channels()),
pending_bytes_(0),
@@ -92,7 +94,7 @@ PCMWaveOutAudioOutputStream::PCMWaveOutAudioOutputStream(
format_.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
format_.Format.nChannels = params.channels();
format_.Format.nSamplesPerSec = params.sample_rate();
- format_.Format.wBitsPerSample = params.bits_per_sample();
+ format_.Format.wBitsPerSample = SampleFormatToBitsPerChannel(kSampleFormat);
format_.Format.cbSize = sizeof(format_) - sizeof(WAVEFORMATEX);
// The next are computed from above.
format_.Format.nBlockAlign = (format_.Format.nChannels *
@@ -105,7 +107,7 @@ PCMWaveOutAudioOutputStream::PCMWaveOutAudioOutputStream(
format_.dwChannelMask = kChannelsToMask[params.channels()];
}
format_.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
- format_.Samples.wValidBitsPerSample = params.bits_per_sample();
+ format_.Samples.wValidBitsPerSample = format_.Format.wBitsPerSample;
}
PCMWaveOutAudioOutputStream::~PCMWaveOutAudioOutputStream() {
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index 7f0849a8a74..f0fa3368e92 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -81,6 +81,7 @@ source_set("base") {
"byte_queue.cc",
"byte_queue.h",
"callback_holder.h",
+ "callback_registry.h",
"cdm_callback_promise.cc",
"cdm_callback_promise.h",
"cdm_config.h",
@@ -140,6 +141,8 @@ source_set("base") {
"encryption_scheme.h",
"fake_audio_worker.cc",
"fake_audio_worker.h",
+ "fallback_video_decoder.cc",
+ "fallback_video_decoder.h",
"feedback_signal_accumulator.h",
"hdr_metadata.cc",
"hdr_metadata.h",
@@ -214,8 +217,6 @@ source_set("base") {
"renderer_factory_selector.cc",
"renderer_factory_selector.h",
"routing_token_callback.h",
- "sample_format.cc",
- "sample_format.h",
"sample_rates.cc",
"sample_rates.h",
"seekable_buffer.cc",
@@ -230,6 +231,7 @@ source_set("base") {
"stream_parser.h",
"stream_parser_buffer.cc",
"stream_parser_buffer.h",
+ "subsample_entry.cc",
"subsample_entry.h",
"surface_manager.h",
"text_cue.cc",
@@ -245,8 +247,12 @@ source_set("base") {
"time_delta_interpolator.h",
"time_source.h",
"timestamp_constants.h",
+ "unaligned_shared_memory.cc",
+ "unaligned_shared_memory.h",
"user_input_monitor.cc",
"user_input_monitor.h",
+ "video_bitrate_allocation.cc",
+ "video_bitrate_allocation.h",
"video_codecs.cc",
"video_codecs.h",
"video_color_space.cc",
@@ -287,7 +293,7 @@ source_set("base") {
"//base/allocator:buildflags",
"//gpu/command_buffer/common",
"//skia",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
"//third_party/libyuv",
"//third_party/widevine/cdm:headers",
"//ui/display:display",
@@ -456,6 +462,7 @@ source_set("unit_tests") {
"bind_to_current_loop_unittest.cc",
"bit_reader_unittest.cc",
"callback_holder_unittest.cc",
+ "callback_registry_unittest.cc",
"channel_mixer_unittest.cc",
"channel_mixing_matrix_unittest.cc",
"container_names_unittest.cc",
@@ -463,9 +470,11 @@ source_set("unit_tests") {
"decode_capabilities_unittest.cc",
"decoder_buffer_queue_unittest.cc",
"decoder_buffer_unittest.cc",
+ "decrypt_config_unittest.cc",
"djb2_unittest.cc",
"fake_audio_worker_unittest.cc",
"fake_demuxer_stream_unittest.cc",
+ "fallback_video_decoder_unittest.cc",
"feedback_signal_accumulator_unittest.cc",
"gmock_callback_support_unittest.cc",
"key_systems_unittest.cc",
@@ -482,10 +491,14 @@ source_set("unit_tests") {
"silent_sink_suspender_unittest.cc",
"sinc_resampler_unittest.cc",
"stream_parser_unittest.cc",
+ "subsample_entry_unittest.cc",
"text_ranges_unittest.cc",
"text_renderer_unittest.cc",
"time_delta_interpolator_unittest.cc",
+ "unaligned_shared_memory_unittest.cc",
+ "user_input_monitor_unittest.cc",
"vector_math_unittest.cc",
+ "video_bitrate_allocation_unittest.cc",
"video_codecs_unittest.cc",
"video_color_space_unittest.cc",
"video_decoder_config_unittest.cc",
diff --git a/chromium/media/base/android/BUILD.gn b/chromium/media/base/android/BUILD.gn
index 9e12acb2964..a7f75cc2994 100644
--- a/chromium/media/base/android/BUILD.gn
+++ b/chromium/media/base/android/BUILD.gn
@@ -44,6 +44,8 @@ if (is_android) {
"media_drm_bridge_client.h",
"media_drm_bridge_delegate.cc",
"media_drm_bridge_delegate.h",
+ "media_drm_bridge_factory.cc",
+ "media_drm_bridge_factory.h",
"media_drm_storage.cc",
"media_drm_storage.h",
"media_drm_storage_bridge.cc",
diff --git a/chromium/media/base/android/android_cdm_factory.cc b/chromium/media/base/android/android_cdm_factory.cc
index 12c17cfef65..2da000bbf9c 100644
--- a/chromium/media/base/android/android_cdm_factory.cc
+++ b/chromium/media/base/android/android_cdm_factory.cc
@@ -5,44 +5,41 @@
#include "media/base/android/android_cdm_factory.h"
#include "base/feature_list.h"
+#include "base/logging.h"
+#include "base/metrics/histogram_macros.h"
#include "base/strings/string_number_conversions.h"
#include "media/base/android/media_drm_bridge.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_config.h"
#include "media/base/content_decryption_module.h"
#include "media/base/key_system_names.h"
-#include "media/base/key_systems.h"
#include "media/base/media_switches.h"
#include "media/cdm/aes_decryptor.h"
-#include "third_party/widevine/cdm/widevine_cdm_common.h"
#include "url/origin.h"
namespace media {
+
namespace {
-void OnCdmCreated(const std::string& key_system,
- MediaDrmBridge::SecurityLevel security_level,
- const CdmCreatedCB& bound_cdm_created_cb,
- scoped_refptr<MediaDrmBridge> cdm) {
- if (!cdm) {
- std::string error_message = "MediaDrmBridge cannot be created for " +
- key_system + " with security level " +
- base::IntToString(security_level);
- LOG(ERROR) << error_message;
- bound_cdm_created_cb.Run(nullptr, error_message);
- return;
- }
- // Success!
- bound_cdm_created_cb.Run(cdm, "");
+void ReportMediaDrmBridgeKeySystemSupport(bool supported) {
+ UMA_HISTOGRAM_BOOLEAN("Media.EME.MediaDrmBridge.KeySystemSupport", supported);
}
+
} // namespace
AndroidCdmFactory::AndroidCdmFactory(const CreateFetcherCB& create_fetcher_cb,
const CreateStorageCB& create_storage_cb)
: create_fetcher_cb_(create_fetcher_cb),
- create_storage_cb_(create_storage_cb) {}
+ create_storage_cb_(create_storage_cb),
+ weak_factory_(this) {}
-AndroidCdmFactory::~AndroidCdmFactory() {}
+AndroidCdmFactory::~AndroidCdmFactory() {
+ weak_factory_.InvalidateWeakPtrs();
+ for (auto& pending_creation : pending_creations_) {
+ auto& cdm_created_cb = pending_creation.second.second;
+ cdm_created_cb.Run(nullptr, "CDM creation aborted");
+ }
+}
void AndroidCdmFactory::Create(
const std::string& key_system,
@@ -53,6 +50,8 @@ void AndroidCdmFactory::Create(
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
const CdmCreatedCB& cdm_created_cb) {
+ DVLOG(1) << __func__;
+
// Bound |cdm_created_cb| so we always fire it asynchronously.
CdmCreatedCB bound_cdm_created_cb = BindToCurrentLoop(cdm_created_cb);
@@ -75,35 +74,41 @@ void AndroidCdmFactory::Create(
std::string error_message;
if (!MediaDrmBridge::IsKeySystemSupported(key_system)) {
- error_message = "Key system not supported unexpectedly: " + key_system;
- NOTREACHED() << error_message;
- bound_cdm_created_cb.Run(nullptr, error_message);
+ ReportMediaDrmBridgeKeySystemSupport(false);
+ bound_cdm_created_cb.Run(
+ nullptr, "Key system not supported unexpectedly: " + key_system);
return;
}
- MediaDrmBridge::SecurityLevel security_level =
- MediaDrmBridge::SECURITY_LEVEL_DEFAULT;
- if (key_system == kWidevineKeySystem) {
- security_level = cdm_config.use_hw_secure_codecs
- ? MediaDrmBridge::SECURITY_LEVEL_1
- : MediaDrmBridge::SECURITY_LEVEL_3;
- } else if (!cdm_config.use_hw_secure_codecs) {
- // Assume other key systems require hardware-secure codecs and thus do not
- // support full compositing.
- error_message =
- key_system +
- " may require use_video_overlay_for_embedded_encrypted_video";
- NOTREACHED() << error_message;
- bound_cdm_created_cb.Run(nullptr, error_message);
- return;
- }
+ ReportMediaDrmBridgeKeySystemSupport(true);
+
+ auto factory = std::make_unique<MediaDrmBridgeFactory>(create_fetcher_cb_,
+ create_storage_cb_);
+ auto* raw_factory = factory.get();
+
+ creation_id_++;
+ pending_creations_.emplace(
+ creation_id_, PendingCreation(std::move(factory), bound_cdm_created_cb));
+
+ raw_factory->Create(
+ key_system, security_origin, cdm_config, session_message_cb,
+ session_closed_cb, session_keys_change_cb, session_expiration_update_cb,
+ base::BindRepeating(&AndroidCdmFactory::OnCdmCreated,
+ weak_factory_.GetWeakPtr(), creation_id_));
+}
+
+void AndroidCdmFactory::OnCdmCreated(
+ uint32_t creation_id,
+ const scoped_refptr<ContentDecryptionModule>& cdm,
+ const std::string& error_message) {
+ DVLOG(1) << __func__ << ": creation_id = " << creation_id;
+
+ DCHECK(pending_creations_.count(creation_id));
+ auto cdm_created_cb = pending_creations_[creation_id].second;
+ pending_creations_.erase(creation_id);
- MediaDrmBridge::Create(key_system, security_origin, security_level,
- create_fetcher_cb_, create_storage_cb_,
- session_message_cb, session_closed_cb,
- session_keys_change_cb, session_expiration_update_cb,
- base::BindOnce(&OnCdmCreated, key_system,
- security_level, bound_cdm_created_cb));
+ LOG_IF(ERROR, !cdm) << error_message;
+ cdm_created_cb.Run(cdm, error_message);
}
} // namespace media
diff --git a/chromium/media/base/android/android_cdm_factory.h b/chromium/media/base/android/android_cdm_factory.h
index d28300fc12c..854b6d9bba1 100644
--- a/chromium/media/base/android/android_cdm_factory.h
+++ b/chromium/media/base/android/android_cdm_factory.h
@@ -5,8 +5,14 @@
#ifndef MEDIA_BASE_ANDROID_ANDROID_CDM_FACTORY_H_
#define MEDIA_BASE_ANDROID_ANDROID_CDM_FACTORY_H_
+#include <stdint.h>
+
+#include <utility>
+
+#include "base/containers/flat_map.h"
#include "base/macros.h"
-#include "media/base/android/media_drm_storage.h"
+#include "base/memory/weak_ptr.h"
+#include "media/base/android/media_drm_bridge_factory.h"
#include "media/base/cdm_factory.h"
#include "media/base/media_export.h"
#include "media/base/provision_fetcher.h"
@@ -32,9 +38,23 @@ class MEDIA_EXPORT AndroidCdmFactory : public CdmFactory {
const CdmCreatedCB& cdm_created_cb) final;
private:
+ // Callback for MediaDrmBridgeFactory::Create().
+ void OnCdmCreated(uint32_t creation_id,
+ const scoped_refptr<ContentDecryptionModule>& cdm,
+ const std::string& error_message);
+
CreateFetcherCB create_fetcher_cb_;
CreateStorageCB create_storage_cb_;
+ uint32_t creation_id_ = 0;
+
+ // Map between creation ID and PendingCreations.
+ using PendingCreation =
+ std::pair<std::unique_ptr<MediaDrmBridgeFactory>, CdmCreatedCB>;
+ base::flat_map<uint32_t, PendingCreation> pending_creations_;
+
+ base::WeakPtrFactory<AndroidCdmFactory> weak_factory_;
+
DISALLOW_COPY_AND_ASSIGN(AndroidCdmFactory);
};
diff --git a/chromium/media/base/android/media_codec_loop_unittest.cc b/chromium/media/base/android/media_codec_loop_unittest.cc
index fdf9ca27ea2..171788f9ef0 100644
--- a/chromium/media/base/android/media_codec_loop_unittest.cc
+++ b/chromium/media/base/android/media_codec_loop_unittest.cc
@@ -6,7 +6,7 @@
#include "base/android/build_info.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
+#include "base/single_thread_task_runner.h"
#include "base/test/test_mock_time_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/android/media_codec_bridge.h"
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index 3751965bec3..5026bda9bd4 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -19,6 +19,7 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
+#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
@@ -212,16 +213,21 @@ bool IsKeySystemSupportedWithTypeImpl(const std::string& key_system,
}
UUID scheme_uuid = GetKeySystemManager()->GetUUID(key_system);
- if (scheme_uuid.empty())
+ if (scheme_uuid.empty()) {
+ DVLOG(1) << "Cannot get UUID for key system " << key_system;
return false;
+ }
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jbyteArray> j_scheme_uuid =
base::android::ToJavaByteArray(env, &scheme_uuid[0], scheme_uuid.size());
ScopedJavaLocalRef<jstring> j_container_mime_type =
ConvertUTF8ToJavaString(env, container_mime_type);
- return Java_MediaDrmBridge_isCryptoSchemeSupported(env, j_scheme_uuid,
- j_container_mime_type);
+ bool supported = Java_MediaDrmBridge_isCryptoSchemeSupported(
+ env, j_scheme_uuid, j_container_mime_type);
+ DVLOG_IF(1, !supported) << "Crypto scheme not supported for " << key_system
+ << " with " << container_mime_type;
+ return supported;
}
MediaDrmBridge::SecurityLevel GetSecurityLevelFromString(
@@ -273,24 +279,6 @@ bool IsPersistentLicenseTypeSupportedByMediaDrm() {
base::android::SDK_VERSION_MARSHMALLOW;
}
-// Callback for MediaDrmStorageBridge::Initialize.
-// |create_media_drm_bridge_cb|, factory method to create MediaDrmBridge.
-// |created_cb|, callback to return the MediaDrmBridge to caller of
-// MediaDrmBridge::Create.
-void OnStorageInitialized(CreateMediaDrmBridgeCB create_media_drm_bridge_cb,
- MediaDrmBridge::CreatedCB created_cb,
- MediaDrmStorageBridge* storage) {
- DCHECK(storage);
-
- // MediaDrmStorageBridge should always return a valid origin ID after
- // initialize. Otherwise the pipe is broken and we should not create
- // MediaDrmBridge here.
- std::move(created_cb)
- .Run(storage->origin_id().empty() ? nullptr
- : std::move(create_media_drm_bridge_cb)
- .Run(storage->origin_id()));
-}
-
} // namespace
// MediaDrm is not generally usable without MediaCodec. Thus, both the MediaDrm
@@ -337,24 +325,30 @@ std::vector<std::string> MediaDrmBridge::GetPlatformKeySystemNames() {
}
// static
+std::vector<uint8_t> MediaDrmBridge::GetUUID(const std::string& key_system) {
+ return GetKeySystemManager()->GetUUID(key_system);
+}
+
+// static
scoped_refptr<MediaDrmBridge> MediaDrmBridge::CreateInternal(
const std::vector<uint8_t>& scheme_uuid,
+ const std::string& origin_id,
SecurityLevel security_level,
+ bool requires_media_crypto,
std::unique_ptr<MediaDrmStorageBridge> storage,
const CreateFetcherCB& create_fetcher_cb,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
- const SessionExpirationUpdateCB& session_expiration_update_cb,
- const std::string& origin_id) {
+ const SessionExpirationUpdateCB& session_expiration_update_cb) {
// All paths requires the MediaDrmApis.
DCHECK(AreMediaDrmApisAvailable());
DCHECK(!scheme_uuid.empty());
scoped_refptr<MediaDrmBridge> media_drm_bridge(new MediaDrmBridge(
- scheme_uuid, origin_id, security_level, std::move(storage),
- create_fetcher_cb, session_message_cb, session_closed_cb,
- session_keys_change_cb, session_expiration_update_cb));
+ scheme_uuid, origin_id, security_level, requires_media_crypto,
+ std::move(storage), create_fetcher_cb, session_message_cb,
+ session_closed_cb, session_keys_change_cb, session_expiration_update_cb));
if (media_drm_bridge->j_media_drm_.is_null())
return nullptr;
@@ -363,51 +357,6 @@ scoped_refptr<MediaDrmBridge> MediaDrmBridge::CreateInternal(
}
// static
-void MediaDrmBridge::Create(
- const std::string& key_system,
- const url::Origin& security_origin,
- SecurityLevel security_level,
- const CreateFetcherCB& create_fetcher_cb,
- const CreateStorageCB& create_storage_cb,
- const SessionMessageCB& session_message_cb,
- const SessionClosedCB& session_closed_cb,
- const SessionKeysChangeCB& session_keys_change_cb,
- const SessionExpirationUpdateCB& session_expiration_update_cb,
- CreatedCB created_cb) {
- DVLOG(1) << __func__;
-
- if (!IsAvailable()) {
- std::move(created_cb).Run(nullptr);
- return;
- }
-
- UUID scheme_uuid = GetKeySystemManager()->GetUUID(key_system);
- if (scheme_uuid.empty()) {
- std::move(created_cb).Run(nullptr);
- return;
- }
-
- // MediaDrmStorage may be lazy created in MediaDrmStorageBridge.
- auto storage = std::make_unique<MediaDrmStorageBridge>();
- MediaDrmStorageBridge* raw_storage = storage.get();
-
- CreateMediaDrmBridgeCB create_media_drm_bridge_cb = base::BindOnce(
- &MediaDrmBridge::CreateInternal, scheme_uuid, security_level,
- std::move(storage), create_fetcher_cb, session_message_cb,
- session_closed_cb, session_keys_change_cb, session_expiration_update_cb);
-
- if (IsPersistentLicenseTypeSupported(key_system) &&
- !security_origin.unique() && !create_storage_cb.is_null()) {
- raw_storage->Initialize(
- create_storage_cb, base::BindOnce(&OnStorageInitialized,
- std::move(create_media_drm_bridge_cb),
- std::move(created_cb), raw_storage));
- } else {
- std::move(created_cb).Run(std::move(create_media_drm_bridge_cb).Run(""));
- }
-}
-
-// static
scoped_refptr<MediaDrmBridge> MediaDrmBridge::CreateWithoutSessionSupport(
const std::string& key_system,
const std::string& origin_id,
@@ -416,19 +365,21 @@ scoped_refptr<MediaDrmBridge> MediaDrmBridge::CreateWithoutSessionSupport(
DVLOG(1) << __func__;
// Sessions won't be used so decoding capability is not required.
- if (!AreMediaDrmApisAvailable()) {
+ if (!AreMediaDrmApisAvailable())
return nullptr;
- }
UUID scheme_uuid = GetKeySystemManager()->GetUUID(key_system);
- if (scheme_uuid.empty()) {
+ if (scheme_uuid.empty())
return nullptr;
- }
+
+ // When created without session support, MediaCrypto is not needed.
+ const bool requires_media_crypto = false;
return CreateInternal(
- scheme_uuid, security_level, std::make_unique<MediaDrmStorageBridge>(),
- create_fetcher_cb, SessionMessageCB(), SessionClosedCB(),
- SessionKeysChangeCB(), SessionExpirationUpdateCB(), origin_id);
+ scheme_uuid, origin_id, security_level, requires_media_crypto,
+ std::make_unique<MediaDrmStorageBridge>(), create_fetcher_cb,
+ SessionMessageCB(), SessionClosedCB(), SessionKeysChangeCB(),
+ SessionExpirationUpdateCB());
}
void MediaDrmBridge::SetServerCertificate(
@@ -859,6 +810,7 @@ MediaDrmBridge::MediaDrmBridge(
const std::vector<uint8_t>& scheme_uuid,
const std::string& origin_id,
SecurityLevel security_level,
+ bool requires_media_crypto,
std::unique_ptr<MediaDrmStorageBridge> storage,
const CreateFetcherCB& create_fetcher_cb,
const SessionMessageCB& session_message_cb,
@@ -906,7 +858,7 @@ MediaDrmBridge::MediaDrmBridge(
// Note: OnMediaCryptoReady() could be called in this call.
j_media_drm_.Reset(Java_MediaDrmBridge_create(
env, j_scheme_uuid, j_security_origin, j_security_level,
- reinterpret_cast<intptr_t>(this),
+ requires_media_crypto, reinterpret_cast<intptr_t>(this),
reinterpret_cast<intptr_t>(storage_.get())));
}
@@ -948,6 +900,9 @@ void MediaDrmBridge::NotifyMediaCryptoReady(JavaObjectPtr j_media_crypto) {
j_media_crypto_ = std::move(j_media_crypto);
+ UMA_HISTOGRAM_BOOLEAN("Media.EME.MediaCryptoAvailable",
+ !j_media_crypto_->is_null());
+
if (media_crypto_ready_cb_.is_null())
return;
diff --git a/chromium/media/base/android/media_drm_bridge.h b/chromium/media/base/android/media_drm_bridge.h
index 0aac7c5049d..20de8f7e738 100644
--- a/chromium/media/base/android/media_drm_bridge.h
+++ b/chromium/media/base/android/media_drm_bridge.h
@@ -60,7 +60,6 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
using ResetCredentialsCB = base::Callback<void(bool)>;
using MediaCryptoReadyCB = MediaCryptoContext::MediaCryptoReadyCB;
- using CreatedCB = base::OnceCallback<void(scoped_refptr<MediaDrmBridge>)>;
// Checks whether MediaDRM is available and usable, including for decoding.
// All other static methods check IsAvailable() or equivalent internally.
@@ -82,20 +81,8 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
// are not handled by Chrome explicitly.
static std::vector<std::string> GetPlatformKeySystemNames();
- // Returns a MediaDrmBridge instance if |key_system| and |security_level| are
- // supported, and nullptr otherwise. The default security level will be used
- // if |security_level| is SECURITY_LEVEL_DEFAULT.
- static void Create(
- const std::string& key_system,
- const url::Origin& security_origin,
- SecurityLevel security_level,
- const CreateFetcherCB& create_fetcher_cb,
- const CreateStorageCB& create_storage_cb,
- const SessionMessageCB& session_message_cb,
- const SessionClosedCB& session_closed_cb,
- const SessionKeysChangeCB& session_keys_change_cb,
- const SessionExpirationUpdateCB& session_expiration_update_cb,
- CreatedCB created_cb);
+ // Returns the scheme UUID for |key_system|.
+ static std::vector<uint8_t> GetUUID(const std::string& key_system);
// Same as Create() except that no session callbacks are provided. This is
// used when we need to use MediaDrmBridge without creating any sessions.
@@ -138,8 +125,8 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
// current origin and leave the offline licenses in invalid state (offline
// licenses can't be used anymore).
//
- // MediaDrmBridge must be created with a valid origin ID. This function won't
- // touch persistent storage.
+ // MediaDrmBridge must be created with a valid origin ID without session
+ // support. This function won't touch persistent storage.
void Unprovision();
// PlayerTracker implementation. Can be called on any thread.
@@ -156,7 +143,9 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
// video playback.
bool IsSecureCodecRequired();
- // Reset the device credentials.
+ // Reset the device credentials. MediaDrmBridge must be created without
+ // session support.
+ // TODO(xhwang): Unify Unprovision() and ResetDeviceCredentials().
void ResetDeviceCredentials(const ResetCredentialsCB& callback);
// Helper functions to resolve promises.
@@ -249,30 +238,44 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
bool success);
private:
+ friend class MediaDrmBridgeFactory;
// For DeleteSoon() in DeleteOnCorrectThread().
friend class base::DeleteHelper<MediaDrmBridge>;
static scoped_refptr<MediaDrmBridge> CreateInternal(
const std::vector<uint8_t>& scheme_uuid,
+ const std::string& origin_id,
SecurityLevel security_level,
+ bool requires_media_crypto,
std::unique_ptr<MediaDrmStorageBridge> storage,
const CreateFetcherCB& create_fetcher_cb,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
- const SessionExpirationUpdateCB& session_expiration_update_cb,
- const std::string& origin_id);
+ const SessionExpirationUpdateCB& session_expiration_update_cb);
// Constructs a MediaDrmBridge for |scheme_uuid| and |security_level|. The
// default security level will be used if |security_level| is
- // SECURITY_LEVEL_DEFAULT. Sessions should not be created if session callbacks
- // are null.
+ // SECURITY_LEVEL_DEFAULT.
//
// |origin_id| is a random string that can identify an origin. It may be empty
// when reseting device credential.
+ //
+ // If |requires_media_crypto| is true, MediaCrypto is expected to be created
+ // and notified via MediaCryptoReadyCB set in SetMediaCryptoReadyCB(). This
+ // may trigger the provisioning process. Before MediaCrypto is notified, no
+ // other methods should be called.
+ // TODO(xhwang): It's odd to rely on MediaCryptoReadyCB. Maybe we should add a
+ // dedicated Initialize() method.
+ //
+ // If |requires_media_crypto| is false, MediaCrypto will not be created. This
+ // object cannot be used for playback, but can be used to unprovision the
+ // device/origin via Unprovision() and ResetDeviceCredentials(). Sessions
+ // should not be created in this mode.
MediaDrmBridge(const std::vector<uint8_t>& scheme_uuid,
const std::string& origin_id,
SecurityLevel security_level,
+ bool requires_media_crypto,
std::unique_ptr<MediaDrmStorageBridge> storage,
const CreateFetcherCB& create_fetcher_cb,
const SessionMessageCB& session_message_cb,
diff --git a/chromium/media/base/android/media_drm_bridge_factory.cc b/chromium/media/base/android/media_drm_bridge_factory.cc
new file mode 100644
index 00000000000..90ac867f153
--- /dev/null
+++ b/chromium/media/base/android/media_drm_bridge_factory.cc
@@ -0,0 +1,136 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/android/media_drm_bridge_factory.h"
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "media/base/cdm_config.h"
+#include "media/base/content_decryption_module.h"
+#include "third_party/widevine/cdm/widevine_cdm_common.h"
+#include "url/origin.h"
+
+namespace media {
+
+MediaDrmBridgeFactory::MediaDrmBridgeFactory(
+ const CreateFetcherCB& create_fetcher_cb,
+ const CreateStorageCB& create_storage_cb)
+ : create_fetcher_cb_(create_fetcher_cb),
+ create_storage_cb_(create_storage_cb),
+ weak_factory_(this) {
+ DCHECK(create_fetcher_cb_);
+ DCHECK(create_storage_cb_);
+}
+
+MediaDrmBridgeFactory::~MediaDrmBridgeFactory() {
+ if (cdm_created_cb_)
+ std::move(cdm_created_cb_).Run(nullptr, "CDM creation aborted");
+}
+
+void MediaDrmBridgeFactory::Create(
+ const std::string& key_system,
+ const url::Origin& security_origin,
+ const CdmConfig& cdm_config,
+ const SessionMessageCB& session_message_cb,
+ const SessionClosedCB& session_closed_cb,
+ const SessionKeysChangeCB& session_keys_change_cb,
+ const SessionExpirationUpdateCB& session_expiration_update_cb,
+ const CdmCreatedCB& cdm_created_cb) {
+ DCHECK(MediaDrmBridge::IsKeySystemSupported(key_system));
+ DCHECK(MediaDrmBridge::IsAvailable());
+ DCHECK(!security_origin.unique());
+ DCHECK(scheme_uuid_.empty()) << "This factory can only be used once.";
+
+ scheme_uuid_ = MediaDrmBridge::GetUUID(key_system);
+ DCHECK(!scheme_uuid_.empty());
+
+ // Set security level.
+ if (key_system == kWidevineKeySystem) {
+ security_level_ = cdm_config.use_hw_secure_codecs
+ ? MediaDrmBridge::SECURITY_LEVEL_1
+ : MediaDrmBridge::SECURITY_LEVEL_3;
+ } else if (!cdm_config.use_hw_secure_codecs) {
+ // Assume other key systems require hardware-secure codecs and thus do not
+ // support full compositing.
+ auto error_message =
+ key_system +
+ " may require use_video_overlay_for_embedded_encrypted_video";
+ NOTREACHED() << error_message;
+ cdm_created_cb.Run(nullptr, error_message);
+ return;
+ }
+
+ session_message_cb_ = session_message_cb;
+ session_closed_cb_ = session_closed_cb;
+ session_keys_change_cb_ = session_keys_change_cb;
+ session_expiration_update_cb_ = session_expiration_update_cb;
+ cdm_created_cb_ = cdm_created_cb;
+
+ // MediaDrmStorage may be lazy created in MediaDrmStorageBridge.
+ storage_ = std::make_unique<MediaDrmStorageBridge>();
+
+ // TODO(xhwang): We should always try per-origin provisioning as long as it's
+ // supported regardless of whether persistent license is enabled or not.
+ if (!MediaDrmBridge::IsPersistentLicenseTypeSupported(key_system)) {
+ CreateMediaDrmBridge("");
+ return;
+ }
+
+ storage_->Initialize(
+ create_storage_cb_,
+ base::BindOnce(&MediaDrmBridgeFactory::OnStorageInitialized,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaDrmBridgeFactory::OnStorageInitialized() {
+ DCHECK(storage_);
+
+ // MediaDrmStorageBridge should always return a valid origin ID after
+ // initialize. Otherwise the pipe is broken and we should not create
+ // MediaDrmBridge here.
+ auto origin_id = storage_->origin_id();
+ DVLOG(2) << __func__ << ": origin_id = " << origin_id;
+ if (origin_id.empty()) {
+ std::move(cdm_created_cb_).Run(nullptr, "Cannot fetch origin ID");
+ return;
+ }
+
+ CreateMediaDrmBridge(origin_id);
+}
+
+void MediaDrmBridgeFactory::CreateMediaDrmBridge(const std::string& origin_id) {
+ DCHECK(!media_drm_bridge_);
+
+ // Requires MediaCrypto so that it can be used by MediaCodec-based decoders.
+ const bool requires_media_crypto = true;
+
+ media_drm_bridge_ = MediaDrmBridge::CreateInternal(
+ scheme_uuid_, origin_id, security_level_, requires_media_crypto,
+ std::move(storage_), create_fetcher_cb_, session_message_cb_,
+ session_closed_cb_, session_keys_change_cb_,
+ session_expiration_update_cb_);
+
+ if (!media_drm_bridge_) {
+ std::move(cdm_created_cb_).Run(nullptr, "MediaDrmBridge creation failed");
+ return;
+ }
+
+ media_drm_bridge_->SetMediaCryptoReadyCB(base::BindRepeating(
+ &MediaDrmBridgeFactory::OnMediaCryptoReady, weak_factory_.GetWeakPtr()));
+}
+
+void MediaDrmBridgeFactory::OnMediaCryptoReady(
+ JavaObjectPtr media_crypto,
+ bool requires_secure_video_codec) {
+ DCHECK(media_crypto);
+ if (media_crypto->is_null()) {
+ media_drm_bridge_ = nullptr;
+ std::move(cdm_created_cb_).Run(nullptr, "MediaCrypto not available");
+ return;
+ }
+
+ std::move(cdm_created_cb_).Run(media_drm_bridge_, "");
+}
+
+} // namespace media
diff --git a/chromium/media/base/android/media_drm_bridge_factory.h b/chromium/media/base/android/media_drm_bridge_factory.h
new file mode 100644
index 00000000000..ad68881fa5c
--- /dev/null
+++ b/chromium/media/base/android/media_drm_bridge_factory.h
@@ -0,0 +1,82 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ANDROID_MEDIA_DRM_BRIDGE_FACTORY_H_
+#define MEDIA_BASE_ANDROID_MEDIA_DRM_BRIDGE_FACTORY_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "media/base/android/media_drm_bridge.h"
+#include "media/base/android/media_drm_storage_bridge.h"
+#include "media/base/cdm_factory.h"
+#include "media/base/media_export.h"
+#include "media/base/provision_fetcher.h"
+
+namespace media {
+
+struct CdmConfig;
+
+// A factory for creating MediaDrmBridge. Only one MediaDrmBridge can be created
+// at any time.
+class MEDIA_EXPORT MediaDrmBridgeFactory : public CdmFactory {
+ public:
+ MediaDrmBridgeFactory(const CreateFetcherCB& create_fetcher_cb,
+ const CreateStorageCB& create_storage_cb);
+ ~MediaDrmBridgeFactory() final;
+
+ // CdmFactory implementation.
+ void Create(const std::string& key_system,
+ const url::Origin& security_origin,
+ const CdmConfig& cdm_config,
+ const SessionMessageCB& session_message_cb,
+ const SessionClosedCB& session_closed_cb,
+ const SessionKeysChangeCB& session_keys_change_cb,
+ const SessionExpirationUpdateCB& session_expiration_update_cb,
+ const CdmCreatedCB& cdm_created_cb) final;
+
+ private:
+ // Callback for Initialize() on |storage_|.
+ void OnStorageInitialized();
+
+ // Creates |media_drm_bridge_|, and call SetMediaCryptoReadyCB() to wait for
+ // MediaCrypto to be ready.
+ void CreateMediaDrmBridge(const std::string& origin_id);
+
+ // Callback for SetMediaCryptoReadyCB() on |media_drm_bridge_|.
+ void OnMediaCryptoReady(JavaObjectPtr media_crypto,
+ bool requires_secure_video_codec);
+
+ CreateFetcherCB create_fetcher_cb_;
+ CreateStorageCB create_storage_cb_;
+
+ std::vector<uint8_t> scheme_uuid_;
+
+ MediaDrmBridge::SecurityLevel security_level_ =
+ MediaDrmBridge::SECURITY_LEVEL_DEFAULT;
+
+ SessionMessageCB session_message_cb_;
+ SessionClosedCB session_closed_cb_;
+ SessionKeysChangeCB session_keys_change_cb_;
+ SessionExpirationUpdateCB session_expiration_update_cb_;
+
+ // TODO(xhwang): Make CdmCreatedCB an OnceCallback.
+ using CdmCreatedOnceCB = base::OnceCallback<CdmCreatedCB::RunType>;
+ CdmCreatedOnceCB cdm_created_cb_;
+
+ std::unique_ptr<MediaDrmStorageBridge> storage_;
+ scoped_refptr<MediaDrmBridge> media_drm_bridge_;
+
+ base::WeakPtrFactory<MediaDrmBridgeFactory> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(MediaDrmBridgeFactory);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ANDROID_MEDIA_DRM_BRIDGE_FACTORY_H_
diff --git a/chromium/media/base/android/mock_media_crypto_context.cc b/chromium/media/base/android/mock_media_crypto_context.cc
index 38600a1271f..cb0fbd42adf 100644
--- a/chromium/media/base/android/mock_media_crypto_context.cc
+++ b/chromium/media/base/android/mock_media_crypto_context.cc
@@ -17,7 +17,11 @@ using ::testing::_;
namespace media {
-MockMediaCryptoContext::MockMediaCryptoContext() {
+MockMediaCryptoContext::MockMediaCryptoContext(bool has_media_crypto_context)
+ : has_media_crypto_context_(has_media_crypto_context) {
+ if (!has_media_crypto_context_)
+ return;
+
// Provide some sane defaults.
ON_CALL(*this, RegisterPlayer(_, _))
.WillByDefault(DoAll(SaveArg<0>(&new_key_cb), SaveArg<1>(&cdm_unset_cb),
@@ -33,7 +37,7 @@ MockMediaCryptoContext::MockMediaCryptoContext() {
MockMediaCryptoContext::~MockMediaCryptoContext() {}
MediaCryptoContext* MockMediaCryptoContext::GetMediaCryptoContext() {
- return this;
+ return has_media_crypto_context_ ? this : nullptr;
}
} // namespace media
diff --git a/chromium/media/base/android/mock_media_crypto_context.h b/chromium/media/base/android/mock_media_crypto_context.h
index 857349522c1..ceb42465988 100644
--- a/chromium/media/base/android/mock_media_crypto_context.h
+++ b/chromium/media/base/android/mock_media_crypto_context.h
@@ -19,7 +19,7 @@ class MEDIA_EXPORT MockMediaCryptoContext
: public CdmContext,
public testing::NiceMock<MediaCryptoContext> {
public:
- MockMediaCryptoContext();
+ explicit MockMediaCryptoContext(bool has_media_crypto_context);
~MockMediaCryptoContext() override;
// CdmContext implementation.
@@ -40,6 +40,7 @@ class MEDIA_EXPORT MockMediaCryptoContext
MediaCryptoReadyCB media_crypto_ready_cb;
private:
+ bool has_media_crypto_context_;
DISALLOW_COPY_AND_ASSIGN(MockMediaCryptoContext);
};
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index aa587f7ecba..cf94d82a14f 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -395,6 +395,7 @@ void AudioBuffer::TrimRange(int start, int end) {
case kUnknownSampleFormat:
case kSampleFormatAc3:
case kSampleFormatEac3:
+ case kSampleFormatMpegHAudio:
NOTREACHED() << "Invalid sample format!";
}
} else {
diff --git a/chromium/media/base/audio_buffer_converter.cc b/chromium/media/base/audio_buffer_converter.cc
index d5cee38ba3b..4026a2c95dc 100644
--- a/chromium/media/base/audio_buffer_converter.cc
+++ b/chromium/media/base/audio_buffer_converter.cc
@@ -143,7 +143,6 @@ void AudioBufferConverter::ResetConverter(
input_params_.format(),
buffer->channel_layout(),
buffer->sample_rate(),
- input_params_.bits_per_sample(),
// If resampling is needed and the FIFO disabled, the AudioConverter will
// always request SincResampler::kDefaultRequestSize frames. Otherwise it
// will use the output frame size.
diff --git a/chromium/media/base/audio_buffer_converter_unittest.cc b/chromium/media/base/audio_buffer_converter_unittest.cc
index 506ea28e05a..06d88ec058a 100644
--- a/chromium/media/base/audio_buffer_converter_unittest.cc
+++ b/chromium/media/base/audio_buffer_converter_unittest.cc
@@ -40,7 +40,6 @@ class AudioBufferConverterTest : public ::testing::Test {
output_params_(AudioParameters::AUDIO_PCM_LOW_LATENCY,
kOutChannelLayout,
kOutSampleRate,
- 16,
kOutFrameSize) {
audio_buffer_converter_.reset(new AudioBufferConverter(output_params_));
}
@@ -208,7 +207,7 @@ TEST_F(AudioBufferConverterTest, ResetThenConvert) {
TEST_F(AudioBufferConverterTest, DiscreteChannelLayout) {
output_params_ =
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_DISCRETE, kOutSampleRate, 16, 512);
+ CHANNEL_LAYOUT_DISCRETE, kOutSampleRate, 512);
output_params_.set_channels_for_discrete(2);
audio_buffer_converter_.reset(new AudioBufferConverter(output_params_));
AddInput(MakeTestBuffer(kOutSampleRate, CHANNEL_LAYOUT_STEREO, 2, 512));
@@ -219,7 +218,6 @@ TEST_F(AudioBufferConverterTest, LargeBuffersResampling) {
output_params_ = AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
kOutChannelLayout,
kOutSampleRate,
- 16,
2048);
audio_buffer_converter_.reset(new AudioBufferConverter(output_params_));
diff --git a/chromium/media/base/audio_bus.cc b/chromium/media/base/audio_bus.cc
index b490718a66a..c21c6c93642 100644
--- a/chromium/media/base/audio_bus.cc
+++ b/chromium/media/base/audio_bus.cc
@@ -14,7 +14,6 @@
#include "base/memory/ptr_util.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/audio_parameters.h"
-#include "media/base/audio_sample_types.h"
#include "media/base/limits.h"
#include "media/base/vector_math.h"
diff --git a/chromium/media/base/audio_bus.h b/chromium/media/base/audio_bus.h
index b78eabce302..5e55b5c7ed3 100644
--- a/chromium/media/base/audio_bus.h
+++ b/chromium/media/base/audio_bus.h
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
+#include "media/base/audio_sample_types.h"
#include "media/base/media_shmem_export.h"
namespace media {
diff --git a/chromium/media/base/audio_bus_unittest.cc b/chromium/media/base/audio_bus_unittest.cc
index a7e12f3493b..39f2f425f9e 100644
--- a/chromium/media/base/audio_bus_unittest.cc
+++ b/chromium/media/base/audio_bus_unittest.cc
@@ -139,7 +139,7 @@ TEST_F(AudioBusTest, Create) {
TEST_F(AudioBusTest, CreateUsingAudioParameters) {
std::unique_ptr<AudioBus> bus = AudioBus::Create(
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
- kSampleRate, 32, kFrameCount));
+ kSampleRate, kFrameCount));
VerifyChannelAndFrameCount(bus.get());
VerifyReadWriteAndAlignment(bus.get());
}
@@ -176,9 +176,8 @@ TEST_F(AudioBusTest, WrapVector) {
// Verify an AudioBus created via wrapping a memory block works as advertised.
TEST_F(AudioBusTest, WrapMemory) {
- AudioParameters params(
- AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout, kSampleRate, 32,
- kFrameCount);
+ AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
+ kSampleRate, kFrameCount);
int data_size = AudioBus::CalculateMemorySize(params);
std::unique_ptr<float, base::AlignedFreeDeleter> data(static_cast<float*>(
base::AlignedAlloc(data_size, AudioBus::kChannelAlignment)));
@@ -205,9 +204,8 @@ TEST_F(AudioBusTest, WrapMemory) {
TEST_F(AudioBusTest, CopyTo) {
// Create one bus with AudioParameters and the other through direct values to
// test for parity between the Create() functions.
- AudioParameters params(
- AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout, kSampleRate, 32,
- kFrameCount);
+ AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
+ kSampleRate, kFrameCount);
std::unique_ptr<AudioBus> bus1 = AudioBus::Create(kChannels, kFrameCount);
std::unique_ptr<AudioBus> bus2 = AudioBus::Create(params);
diff --git a/chromium/media/base/audio_capturer_source.h b/chromium/media/base/audio_capturer_source.h
index 6a2c0c0f2e0..9fe8142b390 100644
--- a/chromium/media/base/audio_capturer_source.h
+++ b/chromium/media/base/audio_capturer_source.h
@@ -49,15 +49,10 @@ class AudioCapturerSource
virtual ~CaptureCallback() {}
};
- // Sets information about the audio stream format and the device
- // to be used. It must be called before any of the other methods.
- // The |session_id| is used by the browser to identify which input device to
- // be used. For clients who do not care about device permission and device
- // selection, pass |session_id| using
- // AudioInputDeviceManager::kFakeOpenSessionId.
+ // Sets information about the audio stream format and the device to be used.
+ // It must be called exactly once before any of the other methods.
virtual void Initialize(const AudioParameters& params,
- CaptureCallback* callback,
- int session_id) = 0;
+ CaptureCallback* callback) = 0;
// Starts the audio recording.
virtual void Start() = 0;
@@ -72,6 +67,11 @@ class AudioCapturerSource
// Enables or disables the WebRtc AGC control.
virtual void SetAutomaticGainControl(bool enable) = 0;
+ // Sets the output device the source should cancel echo from, if
+ // supported. Must be called on the main thread. Device ids are gotten from
+ // device enumerations.
+ virtual void SetOutputDeviceForAec(const std::string& output_device_id) = 0;
+
protected:
friend class base::RefCountedThreadSafe<AudioCapturerSource>;
virtual ~AudioCapturerSource() {}
diff --git a/chromium/media/base/audio_codecs.cc b/chromium/media/base/audio_codecs.cc
index ea5bafa0229..10e0286b600 100644
--- a/chromium/media/base/audio_codecs.cc
+++ b/chromium/media/base/audio_codecs.cc
@@ -44,6 +44,8 @@ std::string GetCodecName(AudioCodec codec) {
return "alac";
case kCodecAC3:
return "ac3";
+ case kCodecMpegHAudio:
+ return "mpeg-h-audio";
}
NOTREACHED();
return "";
@@ -62,6 +64,8 @@ AudioCodec StringToAudioCodec(const std::string& codec_id) {
return kCodecALAC;
if (codec_id == "flac")
return kCodecFLAC;
+ if (base::StartsWith(codec_id, "mhm1.", base::CompareCase::SENSITIVE))
+ return kCodecMpegHAudio;
if (codec_id == "opus")
return kCodecOpus;
if (codec_id == "vorbis")
diff --git a/chromium/media/base/audio_codecs.h b/chromium/media/base/audio_codecs.h
index 1219d7acf24..756a3bb3293 100644
--- a/chromium/media/base/audio_codecs.h
+++ b/chromium/media/base/audio_codecs.h
@@ -32,13 +32,14 @@ enum AudioCodec {
kCodecPCM_ALAW = 14,
kCodecALAC = 15,
kCodecAC3 = 16,
+ kCodecMpegHAudio = 17,
// DO NOT ADD RANDOM AUDIO CODECS!
//
// The only acceptable time to add a new codec is if there is production code
// that uses said codec in the same CL.
// Must always be equal to the largest entry ever logged.
- kAudioCodecMax = kCodecAC3,
+ kAudioCodecMax = kCodecMpegHAudio,
};
std::string MEDIA_EXPORT GetCodecName(AudioCodec codec);
diff --git a/chromium/media/base/audio_converter_perftest.cc b/chromium/media/base/audio_converter_perftest.cc
index 34ec050332b..7ed529a1105 100644
--- a/chromium/media/base/audio_converter_perftest.cc
+++ b/chromium/media/base/audio_converter_perftest.cc
@@ -53,10 +53,10 @@ void RunConvertBenchmark(const AudioParameters& in_params,
TEST(AudioConverterPerfTest, ConvertBenchmark) {
// Create input and output parameters to convert between the two most common
// sets of parameters (as indicated via UMA data).
- AudioParameters input_params(
- AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO, 48000, 16, 2048);
- AudioParameters output_params(
- AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO, 44100, 16, 440);
+ AudioParameters input_params(AudioParameters::AUDIO_PCM_LINEAR,
+ CHANNEL_LAYOUT_MONO, 48000, 2048);
+ AudioParameters output_params(AudioParameters::AUDIO_PCM_LINEAR,
+ CHANNEL_LAYOUT_STEREO, 44100, 440);
RunConvertBenchmark(input_params, output_params, false, "convert");
}
@@ -67,10 +67,9 @@ TEST(AudioConverterPerfTest, ConvertBenchmarkFIFO) {
AudioParameters input_params(AudioParameters::AUDIO_PCM_LINEAR,
CHANNEL_LAYOUT_STEREO,
44100,
- 16,
2048);
- AudioParameters output_params(
- AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO, 44100, 16, 440);
+ AudioParameters output_params(AudioParameters::AUDIO_PCM_LINEAR,
+ CHANNEL_LAYOUT_STEREO, 44100, 440);
RunConvertBenchmark(input_params, output_params, true, "convert_fifo_only");
RunConvertBenchmark(input_params, output_params, false,
diff --git a/chromium/media/base/audio_converter_unittest.cc b/chromium/media/base/audio_converter_unittest.cc
index dabc5eae8a9..d05d8d062bf 100644
--- a/chromium/media/base/audio_converter_unittest.cc
+++ b/chromium/media/base/audio_converter_unittest.cc
@@ -23,7 +23,6 @@ static const int kConvertInputs = 8;
static const int kConvertCycles = 3;
// Parameters used for testing.
-static const int kBitsPerChannel = 32;
static const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
static const int kHighLatencyBufferSize = 2048;
static const int kLowLatencyBufferSize = 256;
@@ -39,12 +38,12 @@ class AudioConverterTest
public:
AudioConverterTest() : epsilon_(std::get<3>(GetParam())) {
// Create input and output parameters based on test parameters.
- input_parameters_ = AudioParameters(
- AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
- std::get<0>(GetParam()), kBitsPerChannel, kHighLatencyBufferSize);
+ input_parameters_ =
+ AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
+ std::get<0>(GetParam()), kHighLatencyBufferSize);
output_parameters_ = AudioParameters(
AudioParameters::AUDIO_PCM_LOW_LATENCY, std::get<2>(GetParam()),
- std::get<1>(GetParam()), 16, kLowLatencyBufferSize);
+ std::get<1>(GetParam()), kLowLatencyBufferSize);
converter_.reset(new AudioConverter(
input_parameters_, output_parameters_, false));
@@ -201,11 +200,11 @@ TEST(AudioConverterTest, AudioDelayAndDiscreteChannelCount) {
// multiple calls to fill the buffer.
AudioParameters input_parameters(AudioParameters::AUDIO_PCM_LINEAR,
CHANNEL_LAYOUT_DISCRETE, kSampleRate,
- kBitsPerChannel, kLowLatencyBufferSize);
+ kLowLatencyBufferSize);
input_parameters.set_channels_for_discrete(10);
AudioParameters output_parameters(AudioParameters::AUDIO_PCM_LINEAR,
CHANNEL_LAYOUT_DISCRETE, kSampleRate * 2,
- kBitsPerChannel, kHighLatencyBufferSize);
+ kHighLatencyBufferSize);
output_parameters.set_channels_for_discrete(5);
AudioConverter converter(input_parameters, output_parameters, false);
diff --git a/chromium/media/base/audio_decoder_config.cc b/chromium/media/base/audio_decoder_config.cc
index c81697b7ce7..7a3690ed399 100644
--- a/chromium/media/base/audio_decoder_config.cc
+++ b/chromium/media/base/audio_decoder_config.cc
@@ -10,15 +10,7 @@
namespace media {
-AudioDecoderConfig::AudioDecoderConfig()
- : codec_(kUnknownAudioCodec),
- sample_format_(kUnknownSampleFormat),
- bytes_per_channel_(0),
- channel_layout_(CHANNEL_LAYOUT_UNSUPPORTED),
- samples_per_second_(0),
- bytes_per_frame_(0),
- codec_delay_(0),
- should_discard_decoder_delay_(true) {}
+AudioDecoderConfig::AudioDecoderConfig() {}
AudioDecoderConfig::AudioDecoderConfig(
AudioCodec codec,
@@ -97,8 +89,9 @@ std::string AudioDecoderConfig::AsHumanReadableString() const {
<< " bytes_per_frame: " << bytes_per_frame()
<< " seek_preroll: " << seek_preroll().InMilliseconds() << "ms"
<< " codec_delay: " << codec_delay() << " has extra data? "
- << (extra_data().empty() ? "false" : "true") << " encrypted? "
- << (is_encrypted() ? "true" : "false") << " discard decoder delay? "
+ << (extra_data().empty() ? "false" : "true")
+ << " encryption scheme: " << encryption_scheme()
+ << " discard decoder delay? "
<< (should_discard_decoder_delay() ? "true" : "false");
return s.str();
}
diff --git a/chromium/media/base/audio_decoder_config.h b/chromium/media/base/audio_decoder_config.h
index a4328af4adf..b251bf7f025 100644
--- a/chromium/media/base/audio_decoder_config.h
+++ b/chromium/media/base/audio_decoder_config.h
@@ -102,17 +102,32 @@ class MEDIA_EXPORT AudioDecoderConfig {
should_discard_decoder_delay_ = false;
}
+ // Optionally set by renderer to provide hardware layout when playback
+ // starts. Intentionally not part of IsValid(). Layout is not updated for
+ // device changes - use with care!
+ void set_target_output_channel_layout(ChannelLayout output_layout) {
+ target_output_channel_layout_ = output_layout;
+ }
+ ChannelLayout target_output_channel_layout() const {
+ return target_output_channel_layout_;
+ }
+
private:
- AudioCodec codec_;
- SampleFormat sample_format_;
- int bytes_per_channel_;
- ChannelLayout channel_layout_;
- int channels_;
- int samples_per_second_;
- int bytes_per_frame_;
+ AudioCodec codec_ = kUnknownAudioCodec;
+ SampleFormat sample_format_ = kUnknownSampleFormat;
+ int bytes_per_channel_ = 0;
+ int samples_per_second_ = 0;
+ int bytes_per_frame_ = 0;
std::vector<uint8_t> extra_data_;
EncryptionScheme encryption_scheme_;
+ // Layout and count of the *stream* being decoded.
+ ChannelLayout channel_layout_ = CHANNEL_LAYOUT_UNSUPPORTED;
+ int channels_ = 0;
+
+ // Layout of the output hardware. Optionally set. See setter comments.
+ ChannelLayout target_output_channel_layout_ = CHANNEL_LAYOUT_NONE;
+
// |seek_preroll_| is the duration of the data that the decoder must decode
// before the decoded data is valid.
base::TimeDelta seek_preroll_;
@@ -120,11 +135,11 @@ class MEDIA_EXPORT AudioDecoderConfig {
// |codec_delay_| is the number of frames the decoder should discard before
// returning decoded data. This value can include both decoder delay as well
// as padding added during encoding.
- int codec_delay_;
+ int codec_delay_ = 0;
// Indicates if a decoder should implicitly discard decoder delay without it
// being explicitly marked in discard padding.
- bool should_discard_decoder_delay_;
+ bool should_discard_decoder_delay_ = true;
// Not using DISALLOW_COPY_AND_ASSIGN here intentionally to allow the compiler
// generated copy constructor and assignment operator. Since the extra data is
diff --git a/chromium/media/base/audio_parameters.cc b/chromium/media/base/audio_parameters.cc
index b8dcb4989f1..bacbef8a8d8 100644
--- a/chromium/media/base/audio_parameters.cc
+++ b/chromium/media/base/audio_parameters.cc
@@ -56,16 +56,14 @@ uint32_t ComputeAudioOutputBufferSize(int channels, int frames) {
}
AudioParameters::AudioParameters()
- : AudioParameters(AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_NONE, 0, 0, 0) {}
+ : AudioParameters(AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_NONE, 0, 0) {}
AudioParameters::AudioParameters(Format format,
ChannelLayout channel_layout,
int sample_rate,
- int bits_per_sample,
int frames_per_buffer)
: latency_tag_(AudioLatency::LATENCY_COUNT) {
- Reset(format, channel_layout, sample_rate, bits_per_sample,
- frames_per_buffer);
+ Reset(format, channel_layout, sample_rate, frames_per_buffer);
}
AudioParameters::~AudioParameters() = default;
@@ -76,13 +74,11 @@ AudioParameters& AudioParameters::operator=(const AudioParameters&) = default;
void AudioParameters::Reset(Format format,
ChannelLayout channel_layout,
int sample_rate,
- int bits_per_sample,
int frames_per_buffer) {
format_ = format;
channel_layout_ = channel_layout;
channels_ = ChannelLayoutToChannelCount(channel_layout);
sample_rate_ = sample_rate;
- bits_per_sample_ = bits_per_sample;
frames_per_buffer_ = frames_per_buffer;
effects_ = NO_EFFECTS;
mic_positions_.clear();
@@ -93,8 +89,6 @@ bool AudioParameters::IsValid() const {
(channel_layout_ > CHANNEL_LAYOUT_UNSUPPORTED) &&
(sample_rate_ >= media::limits::kMinSampleRate) &&
(sample_rate_ <= media::limits::kMaxSampleRate) &&
- (bits_per_sample_ > 0) &&
- (bits_per_sample_ <= media::limits::kMaxBitsPerSample) &&
(frames_per_buffer_ > 0) &&
(frames_per_buffer_ <= media::limits::kMaxSamplesPerPacket) &&
(channel_layout_ == CHANNEL_LAYOUT_DISCRETE ||
@@ -105,23 +99,18 @@ std::string AudioParameters::AsHumanReadableString() const {
std::ostringstream s;
s << "format: " << format() << " channel_layout: " << channel_layout()
<< " channels: " << channels() << " sample_rate: " << sample_rate()
- << " bits_per_sample: " << bits_per_sample()
<< " frames_per_buffer: " << frames_per_buffer()
<< " effects: " << effects()
<< " mic_positions: " << PointsToString(mic_positions_);
return s.str();
}
-int AudioParameters::GetBytesPerBuffer() const {
- return frames_per_buffer_ * GetBytesPerFrame();
+int AudioParameters::GetBytesPerBuffer(SampleFormat fmt) const {
+ return GetBytesPerFrame(fmt) * frames_per_buffer_;
}
-int AudioParameters::GetBytesPerSecond() const {
- return sample_rate_ * GetBytesPerFrame();
-}
-
-int AudioParameters::GetBytesPerFrame() const {
- return channels_ * bits_per_sample_ / 8;
+int AudioParameters::GetBytesPerFrame(SampleFormat fmt) const {
+ return channels_ * SampleFormatToBytesPerChannel(fmt);
}
double AudioParameters::GetMicrosecondsPerFrame() const {
@@ -138,7 +127,6 @@ bool AudioParameters::Equals(const AudioParameters& other) const {
return format_ == other.format() && sample_rate_ == other.sample_rate() &&
channel_layout_ == other.channel_layout() &&
channels_ == other.channels() &&
- bits_per_sample_ == other.bits_per_sample() &&
frames_per_buffer_ == other.frames_per_buffer() &&
effects_ == other.effects() && mic_positions_ == other.mic_positions_;
}
@@ -154,7 +142,7 @@ AudioParameters AudioParameters::UnavailableDeviceParams() {
// significantly differs from 10 ms used there, see http://crbug/701000.
return media::AudioParameters(
media::AudioParameters::AUDIO_FAKE, media::CHANNEL_LAYOUT_STEREO,
- media::AudioParameters::kAudioCDSampleRate, 16,
+ media::AudioParameters::kAudioCDSampleRate,
media::AudioParameters::kAudioCDSampleRate / 100);
}
diff --git a/chromium/media/base/audio_parameters.h b/chromium/media/base/audio_parameters.h
index ee0f0cf0fce..de18c65d95f 100644
--- a/chromium/media/base/audio_parameters.h
+++ b/chromium/media/base/audio_parameters.h
@@ -18,6 +18,7 @@
#include "media/base/audio_point.h"
#include "media/base/channel_layout.h"
#include "media/base/media_shmem_export.h"
+#include "media/base/sample_format.h"
namespace media {
@@ -149,7 +150,6 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
AudioParameters(Format format,
ChannelLayout channel_layout,
int sample_rate,
- int bits_per_sample,
int frames_per_buffer);
~AudioParameters();
@@ -158,7 +158,6 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
void Reset(Format format,
ChannelLayout channel_layout,
int sample_rate,
- int bits_per_sample,
int frames_per_buffer);
// Checks that all values are in the expected range. All limits are specified
@@ -169,14 +168,12 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
// output only.
std::string AsHumanReadableString() const;
- // Returns size of audio buffer in bytes.
- int GetBytesPerBuffer() const;
+ // Returns size of audio buffer in bytes when using |fmt| for samples.
+ int GetBytesPerBuffer(SampleFormat fmt) const;
- // Returns the number of bytes representing one second of audio.
- int GetBytesPerSecond() const;
-
- // Returns the number of bytes representing a frame of audio.
- int GetBytesPerFrame() const;
+ // Returns the number of bytes representing a frame of audio when using |fmt|
+ // for samples.
+ int GetBytesPerFrame(SampleFormat fmt) const;
// Returns the number of microseconds per frame of audio. Intentionally
// reported as a double to surface of partial microseconds per frame, which
@@ -212,11 +209,6 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
void set_sample_rate(int sample_rate) { sample_rate_ = sample_rate; }
int sample_rate() const { return sample_rate_; }
- void set_bits_per_sample(int bits_per_sample) {
- bits_per_sample_ = bits_per_sample;
- }
- int bits_per_sample() const { return bits_per_sample_; }
-
void set_frames_per_buffer(int frames_per_buffer) {
frames_per_buffer_ = frames_per_buffer;
}
@@ -247,7 +239,6 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
int channels_; // Number of channels. Value set based on
// |channel_layout|.
int sample_rate_; // Sampling frequency/rate.
- int bits_per_sample_; // Number of bits per sample.
int frames_per_buffer_; // Number of frames in a buffer.
int effects_; // Bitmask using PlatformEffectsMask.
@@ -277,8 +268,6 @@ inline bool operator<(const AudioParameters& a, const AudioParameters& b) {
return a.channels() < b.channels();
if (a.sample_rate() != b.sample_rate())
return a.sample_rate() < b.sample_rate();
- if (a.bits_per_sample() != b.bits_per_sample())
- return a.bits_per_sample() < b.bits_per_sample();
return a.frames_per_buffer() < b.frames_per_buffer();
}
diff --git a/chromium/media/base/audio_parameters_unittest.cc b/chromium/media/base/audio_parameters_unittest.cc
index b07d7101c40..d3ecf4be962 100644
--- a/chromium/media/base/audio_parameters_unittest.cc
+++ b/chromium/media/base/audio_parameters_unittest.cc
@@ -13,7 +13,6 @@ namespace media {
TEST(AudioParameters, Constructor_Default) {
AudioParameters::Format expected_format = AudioParameters::AUDIO_PCM_LINEAR;
- int expected_bits = 0;
int expected_channels = 0;
ChannelLayout expected_channel_layout = CHANNEL_LAYOUT_NONE;
int expected_rate = 0;
@@ -25,7 +24,6 @@ TEST(AudioParameters, Constructor_Default) {
AudioParameters params;
EXPECT_EQ(expected_format, params.format());
- EXPECT_EQ(expected_bits, params.bits_per_sample());
EXPECT_EQ(expected_channels, params.channels());
EXPECT_EQ(expected_channel_layout, params.channel_layout());
EXPECT_EQ(expected_rate, params.sample_rate());
@@ -37,17 +35,15 @@ TEST(AudioParameters, Constructor_Default) {
TEST(AudioParameters, Constructor_ParameterValues) {
AudioParameters::Format expected_format =
AudioParameters::AUDIO_PCM_LOW_LATENCY;
- int expected_bits = 16;
int expected_channels = 6;
ChannelLayout expected_channel_layout = CHANNEL_LAYOUT_5_1;
int expected_rate = 44100;
int expected_samples = 880;
AudioParameters params(expected_format, expected_channel_layout,
- expected_rate, expected_bits, expected_samples);
+ expected_rate, expected_samples);
EXPECT_EQ(expected_format, params.format());
- EXPECT_EQ(expected_bits, params.bits_per_sample());
EXPECT_EQ(expected_channels, params.channels());
EXPECT_EQ(expected_channel_layout, params.channel_layout());
EXPECT_EQ(expected_rate, params.sample_rate());
@@ -56,147 +52,81 @@ TEST(AudioParameters, Constructor_ParameterValues) {
TEST(AudioParameters, GetBytesPerBuffer) {
EXPECT_EQ(100, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_MONO, 1000, 8, 100)
- .GetBytesPerBuffer());
+ CHANNEL_LAYOUT_MONO, 1000, 100)
+ .GetBytesPerBuffer(kSampleFormatU8));
EXPECT_EQ(200, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_MONO, 1000, 16, 100)
- .GetBytesPerBuffer());
+ CHANNEL_LAYOUT_MONO, 1000, 100)
+ .GetBytesPerBuffer(kSampleFormatS16));
EXPECT_EQ(200, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 1000, 8, 100)
- .GetBytesPerBuffer());
+ CHANNEL_LAYOUT_STEREO, 1000, 100)
+ .GetBytesPerBuffer(kSampleFormatU8));
EXPECT_EQ(200, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_MONO, 1000, 8, 200)
- .GetBytesPerBuffer());
+ CHANNEL_LAYOUT_MONO, 1000, 200)
+ .GetBytesPerBuffer(kSampleFormatU8));
EXPECT_EQ(800, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 1000, 16, 200)
- .GetBytesPerBuffer());
- EXPECT_EQ(
- 300, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 8, 100)
- .GetBytesPerBuffer());
-}
-
-TEST(AudioParameters, GetBytesPerSecond) {
- EXPECT_EQ(0, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_NONE, 0, 0, 0)
- .GetBytesPerSecond());
- EXPECT_EQ(0, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 0, 0, 0)
- .GetBytesPerSecond());
- EXPECT_EQ(0, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_NONE, 100, 0, 0)
- .GetBytesPerSecond());
- EXPECT_EQ(0, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_NONE, 0, 8, 0)
- .GetBytesPerSecond());
- EXPECT_EQ(200, AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO, 100, 8, 0)
- .GetBytesPerSecond());
+ CHANNEL_LAYOUT_STEREO, 1000, 200)
+ .GetBytesPerBuffer(kSampleFormatS16));
+ EXPECT_EQ(300,
+ AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 100)
+ .GetBytesPerBuffer(kSampleFormatU8));
}
TEST(AudioParameters, Compare) {
AudioParameters values[] = {
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 1000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 1000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 1000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 1000, 16, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 2000, 8, 100),
+ 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 2000, 8, 200),
+ 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 2000, 16, 100),
+ 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_MONO,
- 2000, 16, 200),
+ 2000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 1000, 8, 100),
+ 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 1000, 8, 200),
+ 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 1000, 16, 100),
+ 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 1000, 16, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 2000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 2000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 2000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, CHANNEL_LAYOUT_STEREO,
- 2000, 16, 200),
+ 2000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 16, 200),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 8, 100),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 8, 200),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LINEAR,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 16, 200),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 1000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 1000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 1000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 1000, 16, 200),
+ CHANNEL_LAYOUT_MONO, 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 2000, 8, 100),
+ CHANNEL_LAYOUT_MONO, 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 2000, 8, 200),
+ CHANNEL_LAYOUT_MONO, 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 2000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_MONO, 2000, 16, 200),
+ CHANNEL_LAYOUT_MONO, 2000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 1000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 1000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 1000, 16, 100),
+ CHANNEL_LAYOUT_STEREO, 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 1000, 16, 200),
+ CHANNEL_LAYOUT_STEREO, 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 2000, 8, 100),
+ CHANNEL_LAYOUT_STEREO, 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 2000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 2000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 2000, 16, 200),
+ CHANNEL_LAYOUT_STEREO, 2000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 8, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 8, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 16, 100),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 16, 200),
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 8, 100),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 8, 200),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 1000, 200),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 16, 100),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 100),
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 16, 200),
+ CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC, 2000, 200),
};
for (size_t i = 0; i < arraysize(values); ++i) {
@@ -216,7 +146,7 @@ TEST(AudioParameters, Constructor_ValidChannelCounts) {
ChannelLayout expected_layout = CHANNEL_LAYOUT_DISCRETE;
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- expected_layout, 44100, 16, 880);
+ expected_layout, 44100, 880);
params.set_channels_for_discrete(expected_channels);
EXPECT_EQ(expected_channels, params.channels());
EXPECT_EQ(expected_layout, params.channel_layout());
diff --git a/chromium/media/base/audio_renderer_mixer_input.cc b/chromium/media/base/audio_renderer_mixer_input.cc
index 513f3da7aae..5d6164c9d67 100644
--- a/chromium/media/base/audio_renderer_mixer_input.cc
+++ b/chromium/media/base/audio_renderer_mixer_input.cc
@@ -19,7 +19,6 @@ AudioRendererMixerInput::AudioRendererMixerInput(
AudioRendererMixerPool* mixer_pool,
int owner_id,
const std::string& device_id,
- const url::Origin& security_origin,
AudioLatency::LatencyType latency)
: mixer_pool_(mixer_pool),
started_(false),
@@ -27,7 +26,6 @@ AudioRendererMixerInput::AudioRendererMixerInput(
volume_(1.0f),
owner_id_(owner_id),
device_id_(device_id),
- security_origin_(security_origin),
latency_(latency),
mixer_(nullptr),
callback_(nullptr),
@@ -58,8 +56,8 @@ void AudioRendererMixerInput::Start() {
DCHECK(callback_); // Initialized.
started_ = true;
- mixer_ = mixer_pool_->GetMixer(owner_id_, params_, latency_, device_id_,
- security_origin_, nullptr);
+ mixer_ =
+ mixer_pool_->GetMixer(owner_id_, params_, latency_, device_id_, nullptr);
if (!mixer_) {
callback_->OnRenderError();
return;
@@ -109,10 +107,9 @@ bool AudioRendererMixerInput::SetVolume(double volume) {
}
OutputDeviceInfo AudioRendererMixerInput::GetOutputDeviceInfo() {
- return mixer_
- ? mixer_->GetOutputDeviceInfo()
- : mixer_pool_->GetOutputDeviceInfo(owner_id_, 0 /* session_id */,
- device_id_, security_origin_);
+ return mixer_ ? mixer_->GetOutputDeviceInfo()
+ : mixer_pool_->GetOutputDeviceInfo(
+ owner_id_, 0 /* session_id */, device_id_);
}
bool AudioRendererMixerInput::IsOptimizedForHardwareParameters() {
@@ -125,7 +122,6 @@ bool AudioRendererMixerInput::CurrentThreadIsRenderingThread() {
void AudioRendererMixerInput::SwitchOutputDevice(
const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) {
if (device_id == device_id_) {
callback.Run(OUTPUT_DEVICE_STATUS_OK);
@@ -134,9 +130,8 @@ void AudioRendererMixerInput::SwitchOutputDevice(
if (mixer_) {
OutputDeviceStatus new_mixer_status = OUTPUT_DEVICE_STATUS_ERROR_INTERNAL;
- AudioRendererMixer* new_mixer =
- mixer_pool_->GetMixer(owner_id_, params_, latency_, device_id,
- security_origin, &new_mixer_status);
+ AudioRendererMixer* new_mixer = mixer_pool_->GetMixer(
+ owner_id_, params_, latency_, device_id, &new_mixer_status);
if (new_mixer_status != OUTPUT_DEVICE_STATUS_OK) {
callback.Run(new_mixer_status);
return;
@@ -145,7 +140,6 @@ void AudioRendererMixerInput::SwitchOutputDevice(
bool was_playing = playing_;
Stop();
device_id_ = device_id;
- security_origin_ = security_origin;
mixer_ = new_mixer;
mixer_->AddErrorCallback(error_cb_);
started_ = true;
@@ -156,15 +150,13 @@ void AudioRendererMixerInput::SwitchOutputDevice(
} else {
OutputDeviceStatus new_mixer_status =
mixer_pool_
- ->GetOutputDeviceInfo(owner_id_, 0 /* session_id */, device_id,
- security_origin)
+ ->GetOutputDeviceInfo(owner_id_, 0 /* session_id */, device_id)
.device_status();
if (new_mixer_status != OUTPUT_DEVICE_STATUS_OK) {
callback.Run(new_mixer_status);
return;
}
device_id_ = device_id;
- security_origin_ = security_origin;
}
callback.Run(OUTPUT_DEVICE_STATUS_OK);
diff --git a/chromium/media/base/audio_renderer_mixer_input.h b/chromium/media/base/audio_renderer_mixer_input.h
index 62ae3a7de37..8109faca65a 100644
--- a/chromium/media/base/audio_renderer_mixer_input.h
+++ b/chromium/media/base/audio_renderer_mixer_input.h
@@ -24,7 +24,6 @@
#include "media/base/audio_converter.h"
#include "media/base/audio_latency.h"
#include "media/base/audio_renderer_sink.h"
-#include "url/origin.h"
namespace media {
@@ -38,7 +37,6 @@ class MEDIA_EXPORT AudioRendererMixerInput
AudioRendererMixerInput(AudioRendererMixerPool* mixer_pool,
int owner_id,
const std::string& device_id,
- const url::Origin& security_origin,
AudioLatency::LatencyType latency);
// SwitchableAudioRendererSink implementation.
@@ -52,7 +50,6 @@ class MEDIA_EXPORT AudioRendererMixerInput
void Initialize(const AudioParameters& params,
AudioRendererSink::RenderCallback* renderer) override;
void SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) override;
// This is expected to be called on the audio rendering thread. The caller
// must ensure that this input has been added to a mixer before calling the
@@ -88,7 +85,6 @@ class MEDIA_EXPORT AudioRendererMixerInput
const int owner_id_;
std::string device_id_; // ID of hardware device to use
- url::Origin security_origin_;
const AudioLatency::LatencyType latency_;
// AudioRendererMixer obtained from mixer pool during Initialize(),
diff --git a/chromium/media/base/audio_renderer_mixer_input_unittest.cc b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
index 309956c99d4..7b6013c84cf 100644
--- a/chromium/media/base/audio_renderer_mixer_input_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
@@ -25,7 +25,6 @@ void LogUma(int value) {}
namespace media {
-static const int kBitsPerChannel = 16;
static const int kSampleRate = 48000;
static const int kBufferSize = 8192;
static const int kRenderFrameId = 42;
@@ -39,9 +38,9 @@ class AudioRendererMixerInputTest : public testing::Test,
AudioRendererMixerPool {
public:
AudioRendererMixerInputTest() {
- audio_parameters_ = AudioParameters(
- AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout, kSampleRate,
- kBitsPerChannel, kBufferSize);
+ audio_parameters_ =
+ AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
+ kSampleRate, kBufferSize);
CreateMixerInput(kDefaultDeviceId);
fake_callback_.reset(new FakeAudioRenderCallback(0, kSampleRate));
@@ -50,7 +49,7 @@ class AudioRendererMixerInputTest : public testing::Test,
void CreateMixerInput(const std::string& device_id) {
mixer_input_ = new AudioRendererMixerInput(this, kRenderFrameId, device_id,
- url::Origin(),
+
AudioLatency::LATENCY_PLAYBACK);
}
@@ -58,8 +57,7 @@ class AudioRendererMixerInputTest : public testing::Test,
const AudioParameters& params,
AudioLatency::LatencyType latency,
const std::string& device_id,
- const url::Origin& security_origin,
- OutputDeviceStatus* device_status) {
+ OutputDeviceStatus* device_status) override {
EXPECT_TRUE(params.IsValid());
if (device_id == kNonexistentDeviceId) {
if (device_status)
@@ -96,8 +94,7 @@ class AudioRendererMixerInputTest : public testing::Test,
OutputDeviceInfo GetOutputDeviceInfo(int source_render_frame_id,
int session_id,
- const std::string& device_id,
- const url::Origin& security_origin) {
+ const std::string& device_id) override {
OutputDeviceStatus status = OUTPUT_DEVICE_STATUS_OK;
if (device_id == kNonexistentDeviceId)
status = OUTPUT_DEVICE_STATUS_ERROR_NOT_FOUND;
@@ -123,7 +120,7 @@ class AudioRendererMixerInputTest : public testing::Test,
AudioRendererMixer* GetInputMixer() { return mixer_input_->mixer_; }
protected:
- virtual ~AudioRendererMixerInputTest() = default;
+ ~AudioRendererMixerInputTest() override = default;
base::test::ScopedTaskEnvironment scoped_task_environment_;
AudioParameters audio_parameters_;
@@ -225,9 +222,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDevice) {
EXPECT_EQ(old_mixer, mixers_[0].get());
base::RunLoop run_loop;
mixer_input_->SwitchOutputDevice(
- kDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
run_loop.Run();
AudioRendererMixer* new_mixer = GetInputMixer();
EXPECT_EQ(new_mixer, mixers_[1].get());
@@ -245,9 +241,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToSameDevice) {
AudioRendererMixer* old_mixer = GetInputMixer();
base::RunLoop run_loop;
mixer_input_->SwitchOutputDevice(
- kDefaultDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kDefaultDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
run_loop.Run();
AudioRendererMixer* new_mixer = GetInputMixer();
EXPECT_EQ(old_mixer, new_mixer);
@@ -264,9 +259,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToAnotherDevice) {
AudioRendererMixer* old_mixer = GetInputMixer();
base::RunLoop run_loop;
mixer_input_->SwitchOutputDevice(
- kAnotherDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
run_loop.Run();
AudioRendererMixer* new_mixer = GetInputMixer();
EXPECT_NE(old_mixer, new_mixer);
@@ -283,7 +277,7 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToNonexistentDevice) {
SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_ERROR_NOT_FOUND));
base::RunLoop run_loop;
mixer_input_->SwitchOutputDevice(
- kNonexistentDeviceId, url::Origin(),
+ kNonexistentDeviceId,
base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
base::Unretained(this), &run_loop));
run_loop.Run();
@@ -300,7 +294,7 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceToUnauthorizedDevice) {
SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_ERROR_NOT_AUTHORIZED));
base::RunLoop run_loop;
mixer_input_->SwitchOutputDevice(
- kUnauthorizedDeviceId, url::Origin(),
+ kUnauthorizedDeviceId,
base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
base::Unretained(this), &run_loop));
run_loop.Run();
@@ -315,9 +309,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceBeforeStart) {
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
- kAnotherDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
mixer_input_->Start();
run_loop.Run();
mixer_input_->Stop();
@@ -332,9 +325,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceWithoutStart) {
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
- kAnotherDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
mixer_input_->Stop();
run_loop.Run();
}
@@ -357,9 +349,8 @@ TEST_F(AudioRendererMixerInputTest, CreateWithInvalidDevice) {
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(testing::_));
mixer_input_->SwitchOutputDevice(
- kDefaultDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kDefaultDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
mixer_input_->Stop();
run_loop.Run();
}
@@ -376,9 +367,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceAfterStopBeforeRestart) {
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
- kAnotherDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
run_loop.Run();
mixer_input_->Start();
@@ -394,9 +384,8 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceBeforeInitialize) {
base::RunLoop run_loop;
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
mixer_input_->SwitchOutputDevice(
- kAnotherDeviceId, url::Origin(),
- base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
- base::Unretained(this), &run_loop));
+ kAnotherDeviceId, base::Bind(&AudioRendererMixerInputTest::SwitchCallback,
+ base::Unretained(this), &run_loop));
run_loop.Run();
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
diff --git a/chromium/media/base/audio_renderer_mixer_pool.h b/chromium/media/base/audio_renderer_mixer_pool.h
index a26bf338f33..760f3eed032 100644
--- a/chromium/media/base/audio_renderer_mixer_pool.h
+++ b/chromium/media/base/audio_renderer_mixer_pool.h
@@ -10,10 +10,6 @@
#include "media/base/audio_latency.h"
#include "media/base/output_device_info.h"
-namespace url {
-class Origin;
-}
-
namespace media {
class AudioParameters;
class AudioRendererMixer;
@@ -32,7 +28,6 @@ class MEDIA_EXPORT AudioRendererMixerPool {
const AudioParameters& params,
AudioLatency::LatencyType latency,
const std::string& device_id,
- const url::Origin& security_origin,
OutputDeviceStatus* device_status) = 0;
// Returns mixer back to the pool, must be called when the mixer is not needed
@@ -43,8 +38,7 @@ class MEDIA_EXPORT AudioRendererMixerPool {
virtual OutputDeviceInfo GetOutputDeviceInfo(
int owner_id,
int session_id,
- const std::string& device_id,
- const url::Origin& security_origin) = 0;
+ const std::string& device_id) = 0;
private:
DISALLOW_COPY_AND_ASSIGN(AudioRendererMixerPool);
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index 4ca1b9c8466..3e615bce905 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -6,8 +6,11 @@
#include <stddef.h>
+#include <algorithm>
+#include <limits>
#include <memory>
#include <tuple>
+#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
@@ -33,7 +36,6 @@ const int kOddMixerInputs = 7;
const int kMixerCycles = 3;
// Parameters used for testing.
-const int kBitsPerChannel = 32;
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
const int kHighLatencyBufferSize = 8192;
const int kLowLatencyBufferSize = 256;
@@ -61,14 +63,14 @@ class AudioRendererMixerTest
const int* const sample_rates = std::get<0>(GetParam());
size_t sample_rates_count = std::get<1>(GetParam());
for (size_t i = 0; i < sample_rates_count; ++i)
- input_parameters_.push_back(AudioParameters(
- AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout, sample_rates[i],
- kBitsPerChannel, kHighLatencyBufferSize));
+ input_parameters_.push_back(
+ AudioParameters(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
+ sample_rates[i], kHighLatencyBufferSize));
// Create output parameters based on test parameters.
output_parameters_ =
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, kChannelLayout,
- std::get<2>(GetParam()), 16, kLowLatencyBufferSize);
+ std::get<2>(GetParam()), kLowLatencyBufferSize);
sink_ = new MockAudioRendererSink();
EXPECT_CALL(*sink_.get(), Start());
@@ -92,18 +94,16 @@ class AudioRendererMixerTest
const AudioParameters& params,
AudioLatency::LatencyType latency,
const std::string& device_id,
- const url::Origin& security_origin,
OutputDeviceStatus* device_status) final {
return mixer_.get();
};
- void ReturnMixer(AudioRendererMixer* mixer) {
+ void ReturnMixer(AudioRendererMixer* mixer) override {
EXPECT_EQ(mixer_.get(), mixer);
}
- MOCK_METHOD4(
- GetOutputDeviceInfo,
- OutputDeviceInfo(int, int, const std::string&, const url::Origin&));
+ MOCK_METHOD3(GetOutputDeviceInfo,
+ OutputDeviceInfo(int, int, const std::string&));
void InitializeInputs(int inputs_per_sample_rate) {
mixer_inputs_.reserve(inputs_per_sample_rate * input_parameters_.size());
@@ -334,10 +334,10 @@ class AudioRendererMixerTest
}
scoped_refptr<AudioRendererMixerInput> CreateMixerInput() {
- return new AudioRendererMixerInput(
- this,
- // Zero frame id, default device ID and security origin.
- 0, std::string(), url::Origin(), AudioLatency::LATENCY_PLAYBACK);
+ return new AudioRendererMixerInput(this,
+ // Zero frame id, default device ID.
+ 0, std::string(),
+ AudioLatency::LATENCY_PLAYBACK);
}
protected:
diff --git a/chromium/media/base/audio_renderer_sink.h b/chromium/media/base/audio_renderer_sink.h
index e806d29a2e3..112bbc90891 100644
--- a/chromium/media/base/audio_renderer_sink.h
+++ b/chromium/media/base/audio_renderer_sink.h
@@ -14,7 +14,6 @@
#include "media/base/audio_bus.h"
#include "media/base/audio_parameters.h"
#include "media/base/output_device_info.h"
-#include "url/origin.h"
namespace media {
@@ -104,7 +103,6 @@ class SwitchableAudioRendererSink : public RestartableAudioRendererSink {
// the media::OutputDeviceStatus enum.
// There is no guarantee about the thread where |callback| will be invoked.
virtual void SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) = 0;
protected:
diff --git a/chromium/media/base/bind_to_current_loop_unittest.cc b/chromium/media/base/bind_to_current_loop_unittest.cc
index c3680460057..b15d87a7aef 100644
--- a/chromium/media/base/bind_to_current_loop_unittest.cc
+++ b/chromium/media/base/bind_to_current_loop_unittest.cc
@@ -12,6 +12,7 @@
#include "base/run_loop.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
+#include "base/threading/thread_checker_impl.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -44,15 +45,13 @@ void BoundIntegersSet(int* a_var, int* b_var, int a_val, int b_val) {
}
struct ThreadRestrictionChecker {
- ThreadRestrictionChecker() : bound_loop_(base::MessageLoop::current()) {}
-
- void Run() { EXPECT_EQ(bound_loop_, base::MessageLoop::current()); }
+ void Run() { EXPECT_TRUE(thread_checker_.CalledOnValidThread()); }
~ThreadRestrictionChecker() {
- EXPECT_EQ(bound_loop_, base::MessageLoop::current());
+ EXPECT_TRUE(thread_checker_.CalledOnValidThread());
}
- base::MessageLoop* bound_loop_;
+ base::ThreadCheckerImpl thread_checker_;
};
void ClearReference(base::OnceClosure cb) {}
diff --git a/chromium/media/base/bitstream_buffer.cc b/chromium/media/base/bitstream_buffer.cc
index 944200a54d8..a4662663cce 100644
--- a/chromium/media/base/bitstream_buffer.cc
+++ b/chromium/media/base/bitstream_buffer.cc
@@ -4,6 +4,8 @@
#include "media/base/bitstream_buffer.h"
+#include "media/base/decrypt_config.h"
+
namespace media {
BitstreamBuffer::BitstreamBuffer()
@@ -24,10 +26,26 @@ BitstreamBuffer::BitstreamBuffer(const BitstreamBuffer& other) = default;
BitstreamBuffer::~BitstreamBuffer() = default;
-void BitstreamBuffer::SetDecryptConfig(const DecryptConfig& decrypt_config) {
- key_id_ = decrypt_config.key_id();
- iv_ = decrypt_config.iv();
- subsamples_ = decrypt_config.subsamples();
+scoped_refptr<DecoderBuffer> BitstreamBuffer::ToDecoderBuffer() const {
+ scoped_refptr<DecoderBuffer> buffer =
+ DecoderBuffer::FromSharedMemoryHandle(handle_, offset_, size_);
+ if (!buffer)
+ return nullptr;
+ buffer->set_timestamp(presentation_timestamp_);
+ if (!key_id_.empty()) {
+ buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig(key_id_, iv_, subsamples_));
+ }
+ return buffer;
+}
+
+void BitstreamBuffer::SetDecryptionSettings(
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples) {
+ key_id_ = key_id;
+ iv_ = iv;
+ subsamples_ = subsamples;
}
} // namespace media
diff --git a/chromium/media/base/bitstream_buffer.h b/chromium/media/base/bitstream_buffer.h
index 35793c079d2..524b4df7eaa 100644
--- a/chromium/media/base/bitstream_buffer.h
+++ b/chromium/media/base/bitstream_buffer.h
@@ -9,8 +9,10 @@
#include <stdint.h>
#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
#include "base/memory/shared_memory.h"
#include "base/time/time.h"
+#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "media/base/media_export.h"
#include "media/base/timestamp_constants.h"
@@ -44,7 +46,20 @@ class MEDIA_EXPORT BitstreamBuffer {
~BitstreamBuffer();
- void SetDecryptConfig(const DecryptConfig& decrypt_config);
+ // Produce an equivalent DecoderBuffer. This consumes handle(), even if
+ // nullptr is returned.
+ //
+ // This method is only intended to be used by VDAs that are being converted to
+ // use DecoderBuffer.
+ //
+ // TODO(sandersd): Remove once all VDAs are converted.
+ scoped_refptr<DecoderBuffer> ToDecoderBuffer() const;
+
+ // TODO(crbug.com/813845): As this is only used by Android, include
+ // EncryptionMode and optional EncryptionPattern when updating for Android.
+ void SetDecryptionSettings(const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples);
int32_t id() const { return id_; }
base::SharedMemoryHandle handle() const { return handle_; }
@@ -63,8 +78,7 @@ class MEDIA_EXPORT BitstreamBuffer {
void set_handle(const base::SharedMemoryHandle& handle) { handle_ = handle; }
- // The following methods come from DecryptConfig.
-
+ // The following methods come from SetDecryptionSettings().
const std::string& key_id() const { return key_id_; }
const std::string& iv() const { return iv_; }
const std::vector<SubsampleEntry>& subsamples() const { return subsamples_; }
@@ -80,10 +94,11 @@ class MEDIA_EXPORT BitstreamBuffer {
// determine the output order.
base::TimeDelta presentation_timestamp_;
- // The following fields come from DecryptConfig.
+ // Note that BitstreamBuffer uses the settings in Audio/VideoDecoderConfig
+ // to determine the encryption mode and pattern (if required by the encryption
+ // scheme).
// TODO(timav): Try to DISALLOW_COPY_AND_ASSIGN and include these params as
// std::unique_ptr<DecryptConfig> or explain why copy & assign is needed.
-
std::string key_id_; // key ID.
std::string iv_; // initialization vector
std::vector<SubsampleEntry> subsamples_; // clear/cypher sizes
diff --git a/chromium/media/base/callback_registry.h b/chromium/media/base/callback_registry.h
new file mode 100644
index 00000000000..f5c315e4ecc
--- /dev/null
+++ b/chromium/media/base/callback_registry.h
@@ -0,0 +1,105 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_CALLBACK_REGISTRY_H_
+#define MEDIA_BASE_CALLBACK_REGISTRY_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <memory>
+
+#include "base/callback.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/synchronization/lock.h"
+#include "media/base/bind_to_current_loop.h"
+
+namespace media {
+
+// A class that keeps a callback registered. The callback will be unregistered
+// upon destruction of this object.
+class CallbackRegistration {
+ public:
+ CallbackRegistration() = default;
+ virtual ~CallbackRegistration() = default;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CallbackRegistration);
+};
+
+template <typename Sig>
+class CallbackRegistry;
+
+// A helper class that can register, unregister callbacks, and notify registered
+// callbacks. This class is thread safe: all methods can be called on any
+// thread. The CallbackRegistry must outlive all CallbackRegistrations returned
+// by Register().
+// TODO(xhwang): This class is similar to base::CallbackList, but is simpler,
+// and provides thread safty. Consider merging these two.
+template <typename... Args>
+class CallbackRegistry<void(Args...)> {
+ public:
+ using CallbackType = base::RepeatingCallback<void(Args...)>;
+
+ CallbackRegistry() = default;
+ ~CallbackRegistry() = default;
+
+ std::unique_ptr<CallbackRegistration> Register(CallbackType cb)
+ WARN_UNUSED_RESULT {
+ base::AutoLock lock(lock_);
+ DCHECK(cb);
+ uint32_t registration_id = ++next_registration_id_;
+ DVLOG(1) << __func__ << ": registration_id = " << registration_id;
+
+ // Use BindToCurrentLoop so that the callbacks are always posted to the
+ // thread where Register() is called. Also, this helps avoid reentrancy
+ // and deadlock issues, e.g. Register() is called in one of the callbacks.
+ callbacks_[registration_id] = BindToCurrentLoop(std::move(cb));
+
+ return std::make_unique<RegistrationImpl>(this, registration_id);
+ }
+
+ void Notify(Args&&... args) {
+ DVLOG(1) << __func__;
+ base::AutoLock lock(lock_);
+ for (auto const& entry : callbacks_)
+ entry.second.Run(std::forward<Args>(args)...);
+ }
+
+ private:
+ class RegistrationImpl : public CallbackRegistration {
+ public:
+ RegistrationImpl(CallbackRegistry<void(Args...)>* registry,
+ uint32_t registration_id)
+ : registry_(registry), registration_id_(registration_id) {}
+
+ ~RegistrationImpl() override { registry_->Unregister(registration_id_); }
+
+ private:
+ CallbackRegistry<void(Args...)>* registry_ = nullptr;
+ uint32_t registration_id_ = 0;
+
+ DISALLOW_COPY_AND_ASSIGN(RegistrationImpl);
+ };
+
+ void Unregister(uint32_t registration_id) {
+ DVLOG(1) << __func__ << ": registration_id = " << registration_id;
+ base::AutoLock lock(lock_);
+ size_t num_callbacks_removed = callbacks_.erase(registration_id);
+ DCHECK_EQ(num_callbacks_removed, 1u);
+ }
+
+ base::Lock lock_;
+ uint32_t next_registration_id_ = 0;
+ std::map<uint32_t, CallbackType> callbacks_;
+
+ DISALLOW_COPY_AND_ASSIGN(CallbackRegistry);
+};
+
+using ClosureRegistry = CallbackRegistry<void()>;
+
+} // namespace media
+
+#endif // MEDIA_BASE_CALLBACK_REGISTRY_H_
diff --git a/chromium/media/base/callback_registry_unittest.cc b/chromium/media/base/callback_registry_unittest.cc
new file mode 100644
index 00000000000..6c1ff1e7aed
--- /dev/null
+++ b/chromium/media/base/callback_registry_unittest.cc
@@ -0,0 +1,175 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/callback_registry.h"
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/test/mock_callback.h"
+#include "base/test/scoped_task_environment.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace {
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::IsNull;
+
+class CallbackRegistryTest : public testing::Test {
+ protected:
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+};
+
+TEST_F(CallbackRegistryTest, RegisterWithNoParam) {
+ ClosureRegistry registry;
+
+ base::MockCallback<base::RepeatingCallback<void()>> callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ EXPECT_CALL(callback, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterWithOneParam) {
+ CallbackRegistry<void(int)> registry;
+
+ base::MockCallback<base::RepeatingCallback<void(int)>> callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ EXPECT_CALL(callback, Run(1));
+ registry.Notify(1);
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterWithTwoParams) {
+ CallbackRegistry<void(int, int)> registry;
+
+ base::MockCallback<base::RepeatingCallback<void(int, int)>> callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ EXPECT_CALL(callback, Run(1, 2));
+ registry.Notify(1, 2);
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterWithMoveOnlyParam) {
+ CallbackRegistry<void(std::unique_ptr<int>)> registry;
+
+ base::MockCallback<base::RepeatingCallback<void(std::unique_ptr<int>)>>
+ callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ EXPECT_CALL(callback, Run(_));
+ registry.Notify(std::make_unique<int>(1));
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterWithPointerParam) {
+ CallbackRegistry<void(int*)> registry;
+
+ base::MockCallback<base::RepeatingCallback<void(int*)>> callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ EXPECT_CALL(callback, Run(IsNull()));
+ registry.Notify(nullptr);
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterWithReferenceParam) {
+ CallbackRegistry<void(const int&)> registry;
+
+ base::MockCallback<base::RepeatingCallback<void(const int&)>> callback;
+ auto registration = registry.Register(callback.Get());
+ EXPECT_TRUE(registration);
+
+ int i = 1;
+ EXPECT_CALL(callback, Run(i));
+ registry.Notify(i);
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterAfterNotify) {
+ ClosureRegistry registry;
+
+ base::MockCallback<base::RepeatingClosure> callback_1;
+ auto registration_1 = registry.Register(callback_1.Get());
+ EXPECT_TRUE(registration_1);
+
+ EXPECT_CALL(callback_1, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+
+ base::MockCallback<base::RepeatingClosure> callback_2;
+ auto registration_2 = registry.Register(callback_2.Get());
+ EXPECT_TRUE(registration_2);
+
+ EXPECT_CALL(callback_1, Run());
+ EXPECT_CALL(callback_2, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, EmptyRegistry) {
+ ClosureRegistry registry;
+ registry.Notify();
+}
+
+TEST_F(CallbackRegistryTest, UnregisterCallback) {
+ ClosureRegistry registry;
+
+ base::MockCallback<base::RepeatingClosure> callback_1;
+ base::MockCallback<base::RepeatingClosure> callback_2;
+ auto registration_1 = registry.Register(callback_1.Get());
+ auto registration_2 = registry.Register(callback_2.Get());
+ EXPECT_TRUE(registration_1);
+ EXPECT_TRUE(registration_2);
+
+ EXPECT_CALL(callback_1, Run());
+ EXPECT_CALL(callback_2, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+
+ registration_1.reset();
+ EXPECT_CALL(callback_2, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+
+ registration_2.reset();
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+}
+
+TEST_F(CallbackRegistryTest, RegisterDuringNotification) {
+ ClosureRegistry registry;
+
+ base::MockCallback<base::RepeatingClosure> callback_1;
+ base::MockCallback<base::RepeatingClosure> callback_2;
+ auto registration_1 = registry.Register(callback_1.Get());
+ std::unique_ptr<CallbackRegistration> registration_2;
+ EXPECT_TRUE(registration_1);
+
+ // Register callback_2 during callback_1's notification run.
+ EXPECT_CALL(callback_1, Run()).WillOnce(Invoke([&]() {
+ registration_2 = registry.Register(callback_2.Get());
+ }));
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+ EXPECT_TRUE(registration_2);
+
+ EXPECT_CALL(callback_1, Run());
+ EXPECT_CALL(callback_2, Run());
+ registry.Notify();
+ scoped_task_environment_.RunUntilIdle();
+}
+
+} // namespace
+} // namespace media
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index ec697cec7ea..1eb70b442c4 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -4,12 +4,19 @@
#include "media/base/cdm_context.h"
+#include "media/base/callback_registry.h"
+
namespace media {
CdmContext::CdmContext() = default;
CdmContext::~CdmContext() = default;
+std::unique_ptr<CallbackRegistration> CdmContext::RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) {
+ return nullptr;
+}
+
Decryptor* CdmContext::GetDecryptor() {
return nullptr;
}
@@ -30,10 +37,6 @@ MediaCryptoContext* CdmContext::GetMediaCryptoContext() {
}
#endif
-void* CdmContext::GetClassIdentifier() const {
- return nullptr;
-}
-
void IgnoreCdmAttached(bool /* success */) {}
} // namespace media
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index c58d6a70ddc..9c3dfadc0a2 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -13,6 +13,7 @@
namespace media {
+class CallbackRegistration;
class CdmProxyContext;
class Decryptor;
class MediaCryptoContext;
@@ -20,6 +21,14 @@ class MediaCryptoContext;
// An interface representing the context that a media player needs from a
// content decryption module (CDM) to decrypt (and decode) encrypted buffers.
// Typically this will be passed to the media player (e.g. using SetCdm()).
+//
+// Lifetime: The returned raw pointers are only guaranteed to be valid when the
+// CdmContext is alive, which is usually guaranteed by holding a CdmContextRef
+// (see below).
+//
+// Thread Model: Since this interface is used in many different contexts (e.g.
+// different processes or platforms), the thread model is not defined as part
+// of this interface. Subclasses must ensure thread safty.
class MEDIA_EXPORT CdmContext {
public:
// Indicates an invalid CDM ID. See GetCdmId() for details.
@@ -27,36 +36,43 @@ class MEDIA_EXPORT CdmContext {
virtual ~CdmContext();
+ // Registers a callback which will be called when an additional usable key is
+ // available in the CDM. Can be called multiple times to register multiple
+ // callbacks, all of which will be called when a new usable key is available.
+ // Lifetime: The caller should keep the returned CallbackRegistration object
+ // to keep the callback registered. The callback will be unregistered upon the
+ // destruction of the returned CallbackRegistration object. The returned
+ // CallbackRegistration object can be destructed on any thread.
+ // Thread Model: Can be called on any thread. The registered callback will
+ // always be called on the thread where RegisterNewKeyCB() is called.
+ // TODO(xhwang): We are not using base::CallbackList because it is not thread-
+ // safe. Consider refactoring base::CallbackList to avoid code duplication.
+ virtual std::unique_ptr<CallbackRegistration> RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb);
+
// Gets the Decryptor object associated with the CDM. Returns nullptr if the
// CDM does not support a Decryptor (i.e. platform-based CDMs where decryption
- // occurs implicitly along with decoding). The returned object is only
- // guaranteed to be valid during the CDM's lifetime.
+ // occurs implicitly along with decoding).
virtual Decryptor* GetDecryptor();
// Returns an ID that can be used to find a remote CDM, in which case this CDM
// serves as a proxy to the remote one. Returns kInvalidCdmId when remote CDM
// is not supported (e.g. this CDM is a local CDM).
+ // TODO(crbug.com/804397): Use base::UnguessableToken for CDM ID.
virtual int GetCdmId() const;
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
- // Returns a CdmProxyContext. The default implementation returns a nullptr
- // to indidate that there is no context.
- // If CdmProxy is not used, then this returns a nullptr.
- // The pointer is owned by the callee.
+ // Returns a CdmProxyContext that can be used by hardware decoders/decryptors.
+ // Returns nullptr if CdmProxyContext is not supported, e.g. |this| is not
+ // hosted by a CdmProxy.
virtual CdmProxyContext* GetCdmProxyContext();
#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
#if defined(OS_ANDROID)
// Returns a MediaCryptoContext that can be used by MediaCodec based decoders.
- // The returned object is only guaranteed to be valid during the CDM's
- // lifetime.
virtual MediaCryptoContext* GetMediaCryptoContext();
#endif
- // Returns a unique class identifier. Some subclasses override and use this
- // method to provide safe down-casting to their type.
- virtual void* GetClassIdentifier() const;
-
protected:
CdmContext();
diff --git a/chromium/media/base/channel_layout.cc b/chromium/media/base/channel_layout.cc
index 7d3e754df18..d0ba542e852 100644
--- a/chromium/media/base/channel_layout.cc
+++ b/chromium/media/base/channel_layout.cc
@@ -44,6 +44,7 @@ static const int kLayoutToChannels[] = {
0, // CHANNEL_LAYOUT_DISCRETE
3, // CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC
5, // CHANNEL_LAYOUT_4_1_QUAD_SIDE
+ 0, // CHANNEL_LAYOUT_BITSTREAM
};
// The channel orderings for each layout as specified by FFmpeg. Each value
@@ -155,6 +156,9 @@ static const int kChannelOrderings[CHANNEL_LAYOUT_MAX + 1][CHANNELS_MAX + 1] = {
// CHANNEL_LAYOUT_4_1_QUAD_SIDE
{ 0 , 1 , -1 , 4 , -1 , -1 , -1 , -1 , -1 , 2 , 3 },
+ // CHANNEL_LAYOUT_BITSTREAM
+ { -1 , -1 , -1 , -1 , -1 , -1 , -1 , -1 , -1 , -1 , -1 },
+
// FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR
};
@@ -261,6 +265,8 @@ const char* ChannelLayoutToString(ChannelLayout layout) {
return "STEREO_AND_KEYBOARD_MIC";
case CHANNEL_LAYOUT_4_1_QUAD_SIDE:
return "4.1_QUAD_SIDE";
+ case CHANNEL_LAYOUT_BITSTREAM:
+ return "BITSTREAM";
}
NOTREACHED() << "Invalid channel layout provided: " << layout;
return "";
diff --git a/chromium/media/base/channel_layout.h b/chromium/media/base/channel_layout.h
index ea0df021be0..008c081208b 100644
--- a/chromium/media/base/channel_layout.h
+++ b/chromium/media/base/channel_layout.h
@@ -108,8 +108,13 @@ enum ChannelLayout {
// Front L, Front R, Side L, Side R, LFE
CHANNEL_LAYOUT_4_1_QUAD_SIDE = 31,
+ // Actual channel layout is specified in the bitstream and the actual channel
+ // count is unknown at Chromium media pipeline level (useful for audio
+ // pass-through mode).
+ CHANNEL_LAYOUT_BITSTREAM = 32,
+
// Max value, must always equal the largest entry ever logged.
- CHANNEL_LAYOUT_MAX = CHANNEL_LAYOUT_4_1_QUAD_SIDE
+ CHANNEL_LAYOUT_MAX = CHANNEL_LAYOUT_BITSTREAM
};
// Note: Do not reorder or reassign these values; other code depends on their
diff --git a/chromium/media/base/channel_mixer.cc b/chromium/media/base/channel_mixer.cc
index a73a0b12111..841933842a6 100644
--- a/chromium/media/base/channel_mixer.cc
+++ b/chromium/media/base/channel_mixer.cc
@@ -42,12 +42,20 @@ void ChannelMixer::Initialize(
ChannelMixer::~ChannelMixer() = default;
void ChannelMixer::Transform(const AudioBus* input, AudioBus* output) {
+ CHECK_EQ(input->frames(), output->frames());
+ TransformPartial(input, input->frames(), output);
+}
+
+void ChannelMixer::TransformPartial(const AudioBus* input,
+ int frame_count,
+ AudioBus* output) {
CHECK_EQ(matrix_.size(), static_cast<size_t>(output->channels()));
CHECK_EQ(matrix_[0].size(), static_cast<size_t>(input->channels()));
- CHECK_EQ(input->frames(), output->frames());
+ CHECK_LE(frame_count, input->frames());
+ CHECK_LE(frame_count, output->frames());
// Zero initialize |output| so we're accumulating from zero.
- output->Zero();
+ output->ZeroFrames(frame_count);
// If we're just remapping we can simply copy the correct input to output.
if (remapping_) {
@@ -57,7 +65,7 @@ void ChannelMixer::Transform(const AudioBus* input, AudioBus* output) {
if (scale > 0) {
DCHECK_EQ(scale, 1.0f);
memcpy(output->channel(output_ch), input->channel(input_ch),
- sizeof(*output->channel(output_ch)) * output->frames());
+ sizeof(*output->channel(output_ch)) * frame_count);
break;
}
}
@@ -71,7 +79,7 @@ void ChannelMixer::Transform(const AudioBus* input, AudioBus* output) {
// Scale should always be positive. Don't bother scaling by zero.
DCHECK_GE(scale, 0);
if (scale > 0) {
- vector_math::FMAC(input->channel(input_ch), scale, output->frames(),
+ vector_math::FMAC(input->channel(input_ch), scale, frame_count,
output->channel(output_ch));
}
}
diff --git a/chromium/media/base/channel_mixer.h b/chromium/media/base/channel_mixer.h
index a629b681f42..a7ed6536bb0 100644
--- a/chromium/media/base/channel_mixer.h
+++ b/chromium/media/base/channel_mixer.h
@@ -35,6 +35,14 @@ class MEDIA_EXPORT ChannelMixer {
// Transforms all channels from |input| into |output| channels.
void Transform(const AudioBus* input, AudioBus* output);
+ // Transforms all channels from |input| into |output| channels, for just the
+ // initial part of the input. Callers can use this to avoid reallocating
+ // AudioBuses, if the length of the data changes frequently for their use
+ // case.
+ void TransformPartial(const AudioBus* input,
+ int frame_count,
+ AudioBus* output);
+
private:
void Initialize(ChannelLayout input_layout, int input_channels,
ChannelLayout output_layout, int output_channels);
diff --git a/chromium/media/base/channel_mixer_unittest.cc b/chromium/media/base/channel_mixer_unittest.cc
index 250f9995f08..a4ea611bb68 100644
--- a/chromium/media/base/channel_mixer_unittest.cc
+++ b/chromium/media/base/channel_mixer_unittest.cc
@@ -24,11 +24,13 @@ TEST(ChannelMixerTest, ConstructAllPossibleLayouts) {
for (ChannelLayout output_layout = CHANNEL_LAYOUT_MONO;
output_layout <= CHANNEL_LAYOUT_MAX;
output_layout = static_cast<ChannelLayout>(output_layout + 1)) {
- // DISCRETE can't be tested here based on the current approach.
+ // DISCRETE, BITSTREAM can't be tested here based on the current approach.
// CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC is not mixable.
// Stereo down mix should never be the output layout.
- if (input_layout == CHANNEL_LAYOUT_DISCRETE ||
+ if (input_layout == CHANNEL_LAYOUT_BITSTREAM ||
+ input_layout == CHANNEL_LAYOUT_DISCRETE ||
input_layout == CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC ||
+ output_layout == CHANNEL_LAYOUT_BITSTREAM ||
output_layout == CHANNEL_LAYOUT_DISCRETE ||
output_layout == CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC ||
output_layout == CHANNEL_LAYOUT_STEREO_DOWNMIX) {
@@ -104,7 +106,7 @@ TEST_P(ChannelMixerTest, Mixing) {
std::unique_ptr<AudioBus> input_bus =
AudioBus::Create(input_channels, kFrames);
AudioParameters input_audio(AudioParameters::AUDIO_PCM_LINEAR, input_layout,
- AudioParameters::kAudioCDSampleRate, 16, kFrames);
+ AudioParameters::kAudioCDSampleRate, kFrames);
if (input_layout == CHANNEL_LAYOUT_DISCRETE)
input_audio.set_channels_for_discrete(input_channels);
@@ -113,8 +115,7 @@ TEST_P(ChannelMixerTest, Mixing) {
std::unique_ptr<AudioBus> output_bus =
AudioBus::Create(output_channels, kFrames);
AudioParameters output_audio(AudioParameters::AUDIO_PCM_LINEAR, output_layout,
- AudioParameters::kAudioCDSampleRate, 16,
- kFrames);
+ AudioParameters::kAudioCDSampleRate, kFrames);
if (output_layout == CHANNEL_LAYOUT_DISCRETE)
output_audio.set_channels_for_discrete(output_channels);
diff --git a/chromium/media/base/channel_mixing_matrix_unittest.cc b/chromium/media/base/channel_mixing_matrix_unittest.cc
index 92df75cc235..a64ae05020f 100644
--- a/chromium/media/base/channel_mixing_matrix_unittest.cc
+++ b/chromium/media/base/channel_mixing_matrix_unittest.cc
@@ -21,11 +21,13 @@ TEST(ChannelMixingMatrixTest, ConstructAllPossibleLayouts) {
for (ChannelLayout output_layout = CHANNEL_LAYOUT_MONO;
output_layout <= CHANNEL_LAYOUT_MAX;
output_layout = static_cast<ChannelLayout>(output_layout + 1)) {
- // DISCRETE can't be tested here based on the current approach.
+ // DISCRETE, BITSTREAM can't be tested here based on the current approach.
// CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC is not mixable.
// Stereo down mix should never be the output layout.
- if (input_layout == CHANNEL_LAYOUT_DISCRETE ||
+ if (input_layout == CHANNEL_LAYOUT_BITSTREAM ||
+ input_layout == CHANNEL_LAYOUT_DISCRETE ||
input_layout == CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC ||
+ output_layout == CHANNEL_LAYOUT_BITSTREAM ||
output_layout == CHANNEL_LAYOUT_DISCRETE ||
output_layout == CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC ||
output_layout == CHANNEL_LAYOUT_STEREO_DOWNMIX) {
diff --git a/chromium/media/base/decode_capabilities.cc b/chromium/media/base/decode_capabilities.cc
index 94d8292a090..9d30a878c3f 100644
--- a/chromium/media/base/decode_capabilities.cc
+++ b/chromium/media/base/decode_capabilities.cc
@@ -8,7 +8,7 @@
#include "base/feature_list.h"
#include "media/base/media_switches.h"
#include "media/media_buildflags.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
#include "ui/display/display_switches.h"
#if BUILDFLAG(ENABLE_LIBVPX)
@@ -148,6 +148,7 @@ bool IsSupportedAudioConfig(const AudioConfig& config) {
case media::kCodecEAC3:
case media::kCodecALAC:
case media::kCodecAC3:
+ case media::kCodecMpegHAudio:
case media::kUnknownAudioCodec:
return false;
}
diff --git a/chromium/media/base/decoder_buffer.cc b/chromium/media/base/decoder_buffer.cc
index 63dad33a353..20344051edd 100644
--- a/chromium/media/base/decoder_buffer.cc
+++ b/chromium/media/base/decoder_buffer.cc
@@ -44,6 +44,13 @@ DecoderBuffer::DecoderBuffer(const uint8_t* data,
memcpy(side_data_.get(), side_data, side_data_size_);
}
+DecoderBuffer::DecoderBuffer(std::unique_ptr<UnalignedSharedMemory> shm,
+ size_t size)
+ : size_(size),
+ side_data_size_(0),
+ shm_(std::move(shm)),
+ is_key_frame_(false) {}
+
DecoderBuffer::~DecoderBuffer() = default;
void DecoderBuffer::Initialize() {
@@ -73,6 +80,17 @@ scoped_refptr<DecoderBuffer> DecoderBuffer::CopyFrom(const uint8_t* data,
}
// static
+scoped_refptr<DecoderBuffer> DecoderBuffer::FromSharedMemoryHandle(
+ const base::SharedMemoryHandle& handle,
+ off_t offset,
+ size_t size) {
+ auto shm = std::make_unique<UnalignedSharedMemory>(handle, true);
+ if (size == 0 || !shm->MapAt(offset, size))
+ return nullptr;
+ return base::WrapRefCounted(new DecoderBuffer(std::move(shm), size));
+}
+
+// static
scoped_refptr<DecoderBuffer> DecoderBuffer::CreateEOSBuffer() {
return base::WrapRefCounted(new DecoderBuffer(NULL, 0, NULL, 0));
}
diff --git a/chromium/media/base/decoder_buffer.h b/chromium/media/base/decoder_buffer.h
index 60ffba708b0..d1e4d8df907 100644
--- a/chromium/media/base/decoder_buffer.h
+++ b/chromium/media/base/decoder_buffer.h
@@ -16,11 +16,13 @@
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
#include "base/memory/ref_counted.h"
+#include "base/memory/shared_memory_handle.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/decrypt_config.h"
#include "media/base/media_export.h"
#include "media/base/timestamp_constants.h"
+#include "media/base/unaligned_shared_memory.h"
namespace media {
@@ -64,6 +66,19 @@ class MEDIA_EXPORT DecoderBuffer
const uint8_t* side_data,
size_t side_data_size);
+ // Create a DecoderBuffer where data() of |size| bytes resides within the
+ // memory referred to by |handle| at non-negative offset |offset|. The
+ // buffer's |is_key_frame_| will default to false.
+ //
+ // The shared memory will be mapped read-only.
+ //
+ // If mapping fails, nullptr will be returned. In all cases |handle| is
+ // consumed.
+ static scoped_refptr<DecoderBuffer> FromSharedMemoryHandle(
+ const base::SharedMemoryHandle& handle,
+ off_t offset,
+ size_t size);
+
// Create a DecoderBuffer indicating we've reached end of stream.
//
// Calling any method other than end_of_stream() on the resulting buffer
@@ -94,11 +109,15 @@ class MEDIA_EXPORT DecoderBuffer
const uint8_t* data() const {
DCHECK(!end_of_stream());
+ if (shm_)
+ return static_cast<uint8_t*>(shm_->memory());
return data_.get();
}
+ // TODO(sandersd): Remove writable_data(). https://crbug.com/834088
uint8_t* writable_data() const {
DCHECK(!end_of_stream());
+ DCHECK(!shm_);
return data_.get();
}
@@ -144,9 +163,7 @@ class MEDIA_EXPORT DecoderBuffer
}
// If there's no data in this buffer, it represents end of stream.
- bool end_of_stream() const {
- return data_ == NULL;
- }
+ bool end_of_stream() const { return !shm_ && !data_; }
bool is_key_frame() const {
DCHECK(!end_of_stream());
@@ -179,6 +196,9 @@ class MEDIA_EXPORT DecoderBuffer
size_t size,
const uint8_t* side_data,
size_t side_data_size);
+
+ DecoderBuffer(std::unique_ptr<UnalignedSharedMemory> shm, size_t size);
+
virtual ~DecoderBuffer();
private:
@@ -189,6 +209,7 @@ class MEDIA_EXPORT DecoderBuffer
std::unique_ptr<uint8_t, base::AlignedFreeDeleter> data_;
size_t side_data_size_;
std::unique_ptr<uint8_t, base::AlignedFreeDeleter> side_data_;
+ std::unique_ptr<UnalignedSharedMemory> shm_;
std::unique_ptr<DecryptConfig> decrypt_config_;
DiscardPadding discard_padding_;
bool is_key_frame_;
diff --git a/chromium/media/base/decoder_buffer_unittest.cc b/chromium/media/base/decoder_buffer_unittest.cc
index bdbc27363ef..fb8249687ad 100644
--- a/chromium/media/base/decoder_buffer_unittest.cc
+++ b/chromium/media/base/decoder_buffer_unittest.cc
@@ -5,9 +5,12 @@
#include "media/base/decoder_buffer.h"
#include <stdint.h>
+#include <string.h>
+
#include <memory>
#include "base/macros.h"
+#include "base/memory/shared_memory.h"
#include "base/strings/string_util.h"
#include "build/build_config.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -59,6 +62,55 @@ TEST(DecoderBufferTest, CopyFrom) {
EXPECT_FALSE(buffer3->is_key_frame());
}
+TEST(DecoderBufferTest, FromSharedMemoryHandle) {
+ const uint8_t kData[] = "hello";
+ const size_t kDataSize = arraysize(kData);
+
+ base::SharedMemory mem;
+ ASSERT_TRUE(mem.CreateAndMapAnonymous(kDataSize));
+ memcpy(mem.memory(), kData, kDataSize);
+
+ scoped_refptr<DecoderBuffer> buffer(
+ DecoderBuffer::FromSharedMemoryHandle(mem.TakeHandle(), 0, kDataSize));
+ ASSERT_TRUE(buffer.get());
+ EXPECT_EQ(buffer->data_size(), kDataSize);
+ EXPECT_EQ(0, memcmp(buffer->data(), kData, kDataSize));
+ EXPECT_FALSE(buffer->end_of_stream());
+ EXPECT_FALSE(buffer->is_key_frame());
+}
+
+TEST(DecoderBufferTest, FromSharedMemoryHandle_Unaligned) {
+ const uint8_t kData[] = "XXXhello";
+ const size_t kDataSize = arraysize(kData);
+ const off_t kDataOffset = 3;
+
+ base::SharedMemory mem;
+ ASSERT_TRUE(mem.CreateAndMapAnonymous(kDataSize));
+ memcpy(mem.memory(), kData, kDataSize);
+
+ scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::FromSharedMemoryHandle(
+ mem.TakeHandle(), kDataOffset, kDataSize - kDataOffset));
+ ASSERT_TRUE(buffer.get());
+ EXPECT_EQ(buffer->data_size(), kDataSize - kDataOffset);
+ EXPECT_EQ(
+ 0, memcmp(buffer->data(), kData + kDataOffset, kDataSize - kDataOffset));
+ EXPECT_FALSE(buffer->end_of_stream());
+ EXPECT_FALSE(buffer->is_key_frame());
+}
+
+TEST(DecoderBufferTest, FromSharedMemoryHandle_ZeroSize) {
+ const uint8_t kData[] = "hello";
+ const size_t kDataSize = arraysize(kData);
+
+ base::SharedMemory mem;
+ ASSERT_TRUE(mem.CreateAndMapAnonymous(kDataSize));
+ memcpy(mem.memory(), kData, kDataSize);
+
+ scoped_refptr<DecoderBuffer> buffer(
+ DecoderBuffer::FromSharedMemoryHandle(mem.TakeHandle(), 0, 0));
+ ASSERT_FALSE(buffer.get());
+}
+
#if !defined(OS_ANDROID)
TEST(DecoderBufferTest, PaddingAlignment) {
const uint8_t kData[] = "hello";
@@ -115,13 +167,14 @@ TEST(DecoderBufferTest, DecryptConfig) {
subsamples.push_back(SubsampleEntry(10, 5));
subsamples.push_back(SubsampleEntry(15, 7));
- DecryptConfig decrypt_config(kKeyId, kIv, subsamples);
+ std::unique_ptr<DecryptConfig> decrypt_config =
+ DecryptConfig::CreateCencConfig(kKeyId, kIv, subsamples);
buffer->set_decrypt_config(
- std::make_unique<DecryptConfig>(kKeyId, kIv, subsamples));
+ DecryptConfig::CreateCencConfig(kKeyId, kIv, subsamples));
EXPECT_TRUE(buffer->decrypt_config());
- EXPECT_TRUE(buffer->decrypt_config()->Matches(decrypt_config));
+ EXPECT_TRUE(buffer->decrypt_config()->Matches(*decrypt_config));
}
TEST(DecoderBufferTest, IsKeyFrame) {
diff --git a/chromium/media/base/decoder_factory.cc b/chromium/media/base/decoder_factory.cc
index b8ff2854d0f..1eb9877eb6c 100644
--- a/chromium/media/base/decoder_factory.cc
+++ b/chromium/media/base/decoder_factory.cc
@@ -14,6 +14,7 @@ DecoderFactory::~DecoderFactory() = default;
void DecoderFactory::CreateAudioDecoders(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {}
void DecoderFactory::CreateVideoDecoders(
@@ -21,6 +22,7 @@ void DecoderFactory::CreateVideoDecoders(
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) {}
} // namespace media
diff --git a/chromium/media/base/decoder_factory.h b/chromium/media/base/decoder_factory.h
index 71c7c91adaf..eed5bec05fa 100644
--- a/chromium/media/base/decoder_factory.h
+++ b/chromium/media/base/decoder_factory.h
@@ -15,7 +15,11 @@
namespace base {
class SingleThreadTaskRunner;
-}
+} // namespace base
+
+namespace gfx {
+class ColorSpace;
+} // namespace gfx
namespace media {
@@ -34,6 +38,7 @@ class MEDIA_EXPORT DecoderFactory {
// Decoders are single-threaded, each decoder should run on |task_runner|.
virtual void CreateAudioDecoders(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders);
// Creates video decoders and append them to the end of |video_decoders|.
@@ -43,6 +48,7 @@ class MEDIA_EXPORT DecoderFactory {
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders);
private:
diff --git a/chromium/media/base/decrypt_config.cc b/chromium/media/base/decrypt_config.cc
index c48ddad240b..90df7adb934 100644
--- a/chromium/media/base/decrypt_config.cc
+++ b/chromium/media/base/decrypt_config.cc
@@ -7,26 +7,80 @@
#include <stddef.h>
#include "base/logging.h"
+#include "base/memory/ptr_util.h"
#include "base/strings/string_number_conversions.h"
+#include "media/media_buildflags.h"
namespace media {
-DecryptConfig::DecryptConfig(const std::string& key_id,
- const std::string& iv,
- const std::vector<SubsampleEntry>& subsamples)
- : key_id_(key_id),
+namespace {
+
+const char* EncryptionModeAsString(EncryptionMode mode) {
+ switch (mode) {
+ case EncryptionMode::kUnencrypted:
+ return "Unencrypted";
+ case EncryptionMode::kCenc:
+ return "CENC";
+ case EncryptionMode::kCbcs:
+ return "CBCS";
+ default:
+ return "Unknown";
+ }
+}
+
+} // namespace
+
+// static
+std::unique_ptr<DecryptConfig> DecryptConfig::CreateCencConfig(
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples) {
+ return std::make_unique<DecryptConfig>(EncryptionMode::kCenc, key_id, iv,
+ subsamples, base::nullopt);
+}
+
+// static
+std::unique_ptr<DecryptConfig> DecryptConfig::CreateCbcsConfig(
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples,
+ base::Optional<EncryptionPattern> encryption_pattern) {
+ return std::make_unique<DecryptConfig>(EncryptionMode::kCbcs, key_id, iv,
+ subsamples,
+ std::move(encryption_pattern));
+}
+
+DecryptConfig::DecryptConfig(
+ const EncryptionMode& encryption_mode,
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples,
+ base::Optional<EncryptionPattern> encryption_pattern)
+ : encryption_mode_(encryption_mode),
+ key_id_(key_id),
iv_(iv),
- subsamples_(subsamples) {
- CHECK_GT(key_id.size(), 0u);
- CHECK(iv.size() == static_cast<size_t>(DecryptConfig::kDecryptionKeySize) ||
- iv.empty());
+ subsamples_(subsamples),
+ encryption_pattern_(std::move(encryption_pattern)) {
+ // Unencrypted blocks should not have a DecryptConfig.
+ DCHECK_NE(encryption_mode_, EncryptionMode::kUnencrypted);
+ CHECK_GT(key_id_.size(), 0u);
+ CHECK_EQ(iv_.size(), static_cast<size_t>(DecryptConfig::kDecryptionKeySize));
+
+ // Pattern not allowed for non-'cbcs' modes.
+ DCHECK(encryption_mode_ == EncryptionMode::kCbcs || !encryption_pattern_);
}
DecryptConfig::~DecryptConfig() = default;
+bool DecryptConfig::HasPattern() const {
+ return encryption_pattern_.has_value();
+}
+
bool DecryptConfig::Matches(const DecryptConfig& config) const {
if (key_id() != config.key_id() || iv() != config.iv() ||
- subsamples().size() != config.subsamples().size()) {
+ subsamples().size() != config.subsamples().size() ||
+ encryption_mode_ != config.encryption_mode_ ||
+ encryption_pattern_ != config.encryption_pattern_) {
return false;
}
@@ -42,7 +96,13 @@ bool DecryptConfig::Matches(const DecryptConfig& config) const {
std::ostream& DecryptConfig::Print(std::ostream& os) const {
os << "key_id:'" << base::HexEncode(key_id_.data(), key_id_.size()) << "'"
- << " iv:'" << base::HexEncode(iv_.data(), iv_.size()) << "'";
+ << " iv:'" << base::HexEncode(iv_.data(), iv_.size()) << "'"
+ << " mode:" << EncryptionModeAsString(encryption_mode_);
+
+ if (encryption_pattern_) {
+ os << " pattern:" << encryption_pattern_->crypt_byte_block() << ":"
+ << encryption_pattern_->skip_byte_block();
+ }
os << " subsamples:[";
for (const SubsampleEntry& entry : subsamples_) {
diff --git a/chromium/media/base/decrypt_config.h b/chromium/media/base/decrypt_config.h
index e025d0f4282..5d99153bf13 100644
--- a/chromium/media/base/decrypt_config.h
+++ b/chromium/media/base/decrypt_config.h
@@ -8,15 +8,27 @@
#include <stdint.h>
#include <iosfwd>
+#include <memory>
#include <string>
#include <vector>
#include "base/macros.h"
+#include "base/optional.h"
+#include "media/base/encryption_pattern.h"
#include "media/base/media_export.h"
#include "media/base/subsample_entry.h"
namespace media {
+// The encryption mode. The definitions are from ISO/IEC 23001-7:2016.
+// TODO(crbug.com/825041): Merge this with existing media::EncryptionScheme.
+enum class EncryptionMode {
+ kUnencrypted = 0,
+ kCenc, // 'cenc' subsample encryption using AES-CTR mode.
+ kCbcs, // 'cbcs' pattern encryption using AES-CBC mode.
+ kMaxValue = kCbcs
+};
+
// Contains all information that a decryptor needs to decrypt a media sample.
class MEDIA_EXPORT DecryptConfig {
public:
@@ -25,23 +37,42 @@ class MEDIA_EXPORT DecryptConfig {
// |key_id| is the ID that references the decryption key for this sample.
// |iv| is the initialization vector defined by the encrypted format.
- // Currently |iv| must be 16 bytes as defined by WebM and ISO. Or must be
- // empty which signals an unencrypted frame.
+ // Currently |iv| must be 16 bytes as defined by WebM and ISO. It must
+ // be provided.
// |subsamples| defines the clear and encrypted portions of the sample as
// described above. A decrypted buffer will be equal in size to the sum
// of the subsample sizes.
- DecryptConfig(const std::string& key_id,
+ // |encryption_pattern| is the pattern used ('cbcs' only). It is optional
+ // as Common encryption of MPEG-2 transport streams v1 (23009-1:2014)
+ // does not specify patterns for cbcs encryption mode. The pattern is
+ // assumed to be 1:9 for video and 1:0 for audio.
+ static std::unique_ptr<DecryptConfig> CreateCencConfig(
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples);
+ static std::unique_ptr<DecryptConfig> CreateCbcsConfig(
+ const std::string& key_id,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsamples,
+ base::Optional<EncryptionPattern> encryption_pattern);
+
+ DecryptConfig(const EncryptionMode& encryption_mode,
+ const std::string& key_id,
const std::string& iv,
- const std::vector<SubsampleEntry>& subsamples);
+ const std::vector<SubsampleEntry>& subsamples,
+ base::Optional<EncryptionPattern> encryption_pattern);
~DecryptConfig();
const std::string& key_id() const { return key_id_; }
const std::string& iv() const { return iv_; }
const std::vector<SubsampleEntry>& subsamples() const { return subsamples_; }
+ const EncryptionMode& encryption_mode() const { return encryption_mode_; }
+ const base::Optional<EncryptionPattern>& encryption_pattern() const {
+ return encryption_pattern_;
+ };
- // Returns true if the corresponding decoder buffer requires decryption and
- // false if that buffer is clear despite the presense of DecryptConfig.
- bool is_encrypted() const { return !key_id_.empty() && !iv_.empty(); }
+ // Returns whether this config has EncryptionPattern set or not.
+ bool HasPattern() const;
// Returns true if all fields in |config| match this config.
bool Matches(const DecryptConfig& config) const;
@@ -50,6 +81,7 @@ class MEDIA_EXPORT DecryptConfig {
std::ostream& Print(std::ostream& os) const;
private:
+ const EncryptionMode encryption_mode_;
const std::string key_id_;
// Initialization vector.
@@ -59,6 +91,9 @@ class MEDIA_EXPORT DecryptConfig {
// (less data ignored by data_offset_) is encrypted.
const std::vector<SubsampleEntry> subsamples_;
+ // Only specified if |encryption_mode_| requires a pattern.
+ base::Optional<EncryptionPattern> encryption_pattern_;
+
DISALLOW_COPY_AND_ASSIGN(DecryptConfig);
};
diff --git a/chromium/media/base/decrypt_config_unittest.cc b/chromium/media/base/decrypt_config_unittest.cc
new file mode 100644
index 00000000000..a42266ddbc4
--- /dev/null
+++ b/chromium/media/base/decrypt_config_unittest.cc
@@ -0,0 +1,203 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/decrypt_config.h"
+
+#include <sstream>
+
+#include "media/base/encryption_pattern.h"
+#include "media/base/subsample_entry.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+const char kDefaultKeyId[] = "key_id";
+const char kDefaultIV[] = "1234567890123456";
+const char kAlternateKeyId[] = "alternate";
+const char kAlternateIV[] = "abcdefghijklmnop";
+
+} // namespace
+
+TEST(DecryptConfigTest, CencConstruction) {
+ auto config = DecryptConfig::CreateCencConfig(kDefaultKeyId, kDefaultIV, {});
+ EXPECT_EQ(config->key_id(), kDefaultKeyId);
+ EXPECT_EQ(config->iv(), kDefaultIV);
+ EXPECT_EQ(config->subsamples().size(), 0u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCenc);
+
+ // Now with single subsample entry.
+ config =
+ DecryptConfig::CreateCencConfig(kAlternateKeyId, kDefaultIV, {{1, 2}});
+ EXPECT_EQ(config->key_id(), kAlternateKeyId);
+ EXPECT_EQ(config->iv(), kDefaultIV);
+ EXPECT_EQ(config->subsamples().size(), 1u);
+ EXPECT_EQ(config->subsamples()[0].clear_bytes, 1u);
+ EXPECT_EQ(config->subsamples()[0].cypher_bytes, 2u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCenc);
+
+ // Now with multiple subsample entries.
+ config = DecryptConfig::CreateCencConfig(kDefaultKeyId, kAlternateIV,
+ {{1, 2}, {3, 4}, {5, 6}, {7, 8}});
+ EXPECT_EQ(config->key_id(), kDefaultKeyId);
+ EXPECT_EQ(config->iv(), kAlternateIV);
+ EXPECT_EQ(config->subsamples().size(), 4u);
+ EXPECT_EQ(config->subsamples()[1].clear_bytes, 3u);
+ EXPECT_EQ(config->subsamples()[3].cypher_bytes, 8u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCenc);
+}
+
+TEST(DecryptConfigTest, CbcsConstruction) {
+ auto config = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ EncryptionPattern(1, 2));
+ EXPECT_EQ(config->key_id(), kDefaultKeyId);
+ EXPECT_EQ(config->iv(), kDefaultIV);
+ EXPECT_EQ(config->subsamples().size(), 0u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCbcs);
+ EXPECT_TRUE(config->HasPattern());
+ EXPECT_EQ(config->encryption_pattern()->crypt_byte_block(), 1u);
+ EXPECT_EQ(config->encryption_pattern()->skip_byte_block(), 2u);
+
+ // Now with multiple subsample entries.
+ config = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kAlternateIV,
+ {{1, 2}, {3, 4}, {5, 6}, {7, 8}},
+ EncryptionPattern(1, 0));
+ EXPECT_EQ(config->key_id(), kDefaultKeyId);
+ EXPECT_EQ(config->iv(), kAlternateIV);
+ EXPECT_EQ(config->subsamples().size(), 4u);
+ EXPECT_EQ(config->subsamples()[0].clear_bytes, 1u);
+ EXPECT_EQ(config->subsamples()[0].cypher_bytes, 2u);
+ EXPECT_EQ(config->subsamples()[3].clear_bytes, 7u);
+ EXPECT_EQ(config->subsamples()[3].cypher_bytes, 8u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCbcs);
+ EXPECT_TRUE(config->HasPattern());
+ EXPECT_EQ(config->encryption_pattern()->crypt_byte_block(), 1u);
+ EXPECT_EQ(config->encryption_pattern()->skip_byte_block(), 0u);
+
+ // Now without pattern.
+ config = DecryptConfig::CreateCbcsConfig(kAlternateKeyId, kDefaultIV,
+ {{1, 2}}, base::nullopt);
+ EXPECT_EQ(config->key_id(), kAlternateKeyId);
+ EXPECT_EQ(config->iv(), kDefaultIV);
+ EXPECT_EQ(config->subsamples().size(), 1u);
+ EXPECT_EQ(config->subsamples()[0].clear_bytes, 1u);
+ EXPECT_EQ(config->subsamples()[0].cypher_bytes, 2u);
+ EXPECT_EQ(config->encryption_mode(), EncryptionMode::kCbcs);
+ EXPECT_FALSE(config->HasPattern());
+}
+
+TEST(DecryptConfigTest, Matches) {
+ auto config1 = DecryptConfig::CreateCencConfig(kDefaultKeyId, kDefaultIV, {});
+ EXPECT_TRUE(config1->Matches(*config1));
+
+ auto config2 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ EncryptionPattern(1, 2));
+ EXPECT_TRUE(config2->Matches(*config2));
+
+ EXPECT_FALSE(config1->Matches(*config2));
+ EXPECT_FALSE(config2->Matches(*config1));
+}
+
+TEST(DecryptConfigTest, CencMatches) {
+ auto config1 = DecryptConfig::CreateCencConfig(kDefaultKeyId, kDefaultIV, {});
+ EXPECT_TRUE(config1->Matches(*config1));
+
+ // Different key_id.
+ auto config2 =
+ DecryptConfig::CreateCencConfig(kAlternateKeyId, kDefaultIV, {});
+ EXPECT_FALSE(config1->Matches(*config2));
+ EXPECT_FALSE(config2->Matches(*config1));
+
+ // Different IV.
+ auto config3 =
+ DecryptConfig::CreateCencConfig(kDefaultKeyId, kAlternateIV, {});
+ EXPECT_FALSE(config1->Matches(*config3));
+ EXPECT_FALSE(config2->Matches(*config3));
+ EXPECT_FALSE(config3->Matches(*config1));
+ EXPECT_FALSE(config3->Matches(*config2));
+
+ // Different subsamples.
+ auto config4 = DecryptConfig::CreateCencConfig(
+ kDefaultKeyId, kDefaultIV, {{1, 2}, {3, 4}, {5, 6}, {7, 8}});
+ EXPECT_FALSE(config1->Matches(*config4));
+ EXPECT_FALSE(config2->Matches(*config4));
+ EXPECT_FALSE(config3->Matches(*config4));
+ EXPECT_FALSE(config4->Matches(*config1));
+ EXPECT_FALSE(config4->Matches(*config2));
+ EXPECT_FALSE(config4->Matches(*config3));
+}
+
+TEST(DecryptConfigTest, CbcsMatches) {
+ auto config1 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ EncryptionPattern(1, 2));
+ EXPECT_TRUE(config1->Matches(*config1));
+
+ // Different key_id.
+ auto config2 = DecryptConfig::CreateCbcsConfig(kAlternateKeyId, kDefaultIV,
+ {}, EncryptionPattern(1, 2));
+ EXPECT_FALSE(config1->Matches(*config2));
+ EXPECT_FALSE(config2->Matches(*config1));
+
+ // Different IV.
+ auto config3 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kAlternateIV,
+ {}, EncryptionPattern(1, 2));
+ EXPECT_FALSE(config1->Matches(*config3));
+ EXPECT_FALSE(config2->Matches(*config3));
+ EXPECT_FALSE(config3->Matches(*config1));
+ EXPECT_FALSE(config3->Matches(*config2));
+
+ // Different subsamples.
+ auto config4 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV,
+ {{1, 2}, {3, 4}, {5, 6}},
+ EncryptionPattern(1, 2));
+ EXPECT_FALSE(config1->Matches(*config4));
+ EXPECT_FALSE(config2->Matches(*config4));
+ EXPECT_FALSE(config3->Matches(*config4));
+ EXPECT_FALSE(config4->Matches(*config1));
+ EXPECT_FALSE(config4->Matches(*config2));
+ EXPECT_FALSE(config4->Matches(*config3));
+
+ // Different pattern.
+ auto config5 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ EncryptionPattern(5, 6));
+ EXPECT_FALSE(config1->Matches(*config5));
+ EXPECT_FALSE(config2->Matches(*config5));
+ EXPECT_FALSE(config3->Matches(*config5));
+ EXPECT_FALSE(config4->Matches(*config5));
+ EXPECT_FALSE(config5->Matches(*config1));
+ EXPECT_FALSE(config5->Matches(*config2));
+ EXPECT_FALSE(config5->Matches(*config3));
+ EXPECT_FALSE(config5->Matches(*config4));
+
+ // Without pattern.
+ auto config6 = DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ base::nullopt);
+ EXPECT_FALSE(config1->Matches(*config6));
+ EXPECT_FALSE(config5->Matches(*config6));
+ EXPECT_FALSE(config6->Matches(*config1));
+ EXPECT_FALSE(config6->Matches(*config5));
+}
+
+TEST(DecryptConfigTest, Output) {
+ std::ostringstream stream;
+
+ // Simple 'cenc' config.
+ stream << *DecryptConfig::CreateCencConfig(kDefaultKeyId, kDefaultIV, {});
+
+ // Try with subsamples.
+ stream << *DecryptConfig::CreateCencConfig(kAlternateKeyId, kAlternateIV,
+ {{1, 2}, {3, 4}, {5, 6}});
+
+ // Simple 'cbcs' config.
+ stream << *DecryptConfig::CreateCbcsConfig(kDefaultKeyId, kDefaultIV, {},
+ base::nullopt);
+
+ // 'cbcs' config with subsamples and pattern.
+ stream << *DecryptConfig::CreateCbcsConfig(kAlternateKeyId, kAlternateIV,
+ {{1, 2}, {3, 4}, {5, 6}, {7, 8}},
+ EncryptionPattern(1, 2));
+}
+
+} // namespace media
diff --git a/chromium/media/base/demuxer.h b/chromium/media/base/demuxer.h
index 79940834163..60e46c86331 100644
--- a/chromium/media/base/demuxer.h
+++ b/chromium/media/base/demuxer.h
@@ -74,6 +74,12 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
using MediaTracksUpdatedCB =
base::Callback<void(std::unique_ptr<MediaTracks>)>;
+ // Called once the demuxer has finished enabling or disabling tracks. The type
+ // argument is required because the vector may be empty.
+ using TrackChangeCB =
+ base::OnceCallback<void(DemuxerStream::Type type,
+ const std::vector<DemuxerStream*>&)>;
+
Demuxer();
~Demuxer() override;
@@ -142,15 +148,19 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
// Returns the memory usage in bytes for the demuxer.
virtual int64_t GetMemoryUsage() const = 0;
+ // The |track_ids| vector has either 1 track, or is empty, indicating that
+ // all tracks should be disabled. |change_completed_cb| is fired after the
+ // demuxer streams are disabled, however this callback should then notify
+ // the appropriate renderer in order for tracks to be switched fully.
virtual void OnEnabledAudioTracksChanged(
const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) = 0;
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) = 0;
- // |track_id| either contains the selected video track id or is null,
- // indicating that all video tracks are deselected/disabled.
virtual void OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> track_id,
- base::TimeDelta curr_time) = 0;
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) = 0;
private:
DISALLOW_COPY_AND_ASSIGN(Demuxer);
diff --git a/chromium/media/base/eme_constants.h b/chromium/media/base/eme_constants.h
index e4f898da60b..29e492d126b 100644
--- a/chromium/media/base/eme_constants.h
+++ b/chromium/media/base/eme_constants.h
@@ -18,13 +18,10 @@ enum class EmeInitDataType { UNKNOWN, WEBM, CENC, KEYIDS, MAX = KEYIDS };
// Defines bitmask values that specify codecs used in Encrypted Media Extension
// (EME). Each value represents a codec within a specific container.
-// The mask values are stored in a SupportedCodecs.
//
// TODO(yucliu): Remove container name from the enum. See crbug.com/724362 for
// more details.
-enum EmeCodec {
- // *_ALL values should only be used for masking, do not use them to specify
- // codec support because they may be extended to include more codecs.
+enum EmeCodec : uint32_t {
EME_CODEC_NONE = 0,
EME_CODEC_WEBM_OPUS = 1 << 0,
EME_CODEC_WEBM_VORBIS = 1 << 1,
@@ -39,53 +36,79 @@ enum EmeCodec {
EME_CODEC_MP4_DV_HEVC = 1 << 9,
EME_CODEC_MP4_AC3 = 1 << 10,
EME_CODEC_MP4_EAC3 = 1 << 11,
- EME_CODEC_WEBM_AUDIO_ALL = EME_CODEC_WEBM_OPUS | EME_CODEC_WEBM_VORBIS,
- EME_CODEC_WEBM_VIDEO_ALL =
- (EME_CODEC_WEBM_VP8 | EME_CODEC_WEBM_VP9 | EME_CODEC_COMMON_VP9),
- EME_CODEC_WEBM_ALL = (EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_WEBM_VIDEO_ALL),
+ EME_CODEC_MP4_MPEG_H_AUDIO = 1 << 12,
+ EME_CODEC_MP4_FLAC = 1 << 13,
+};
+
+// *_ALL values should only be used for masking, do not use them to specify
+// codec support because they may be extended to include more codecs.
+
+using SupportedCodecs = uint32_t;
+
+constexpr SupportedCodecs GetMp4AudioCodecs() {
+ SupportedCodecs codecs = EME_CODEC_MP4_FLAC;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- EME_CODEC_MP4_AUDIO_ALL = (EME_CODEC_MP4_AAC
+ codecs |= EME_CODEC_MP4_AAC;
#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
- |
- EME_CODEC_MP4_AC3 |
- EME_CODEC_MP4_EAC3
+ codecs |= EME_CODEC_MP4_AC3 | EME_CODEC_MP4_EAC3;
#endif // BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
- ),
- EME_CODEC_MP4_VIDEO_ALL = (EME_CODEC_MP4_AVC1 | EME_CODEC_COMMON_VP9
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ codecs |= EME_CODEC_MP4_MPEG_H_AUDIO;
+#endif // BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+ return codecs;
+}
+
+constexpr SupportedCodecs GetMp4VideoCodecs() {
+ SupportedCodecs codecs = EME_CODEC_COMMON_VP9;
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ codecs |= EME_CODEC_MP4_AVC1;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- |
- EME_CODEC_MP4_HEVC
+ codecs |= EME_CODEC_MP4_HEVC;
#endif // BUILDFLAG(ENABLE_HEVC_DEMUXING)
#if BUILDFLAG(ENABLE_DOLBY_VISION_DEMUXING)
- |
- EME_CODEC_MP4_DV_AVC
+ codecs |= EME_CODEC_MP4_DV_AVC;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- |
- EME_CODEC_MP4_DV_HEVC
+ codecs |= EME_CODEC_MP4_DV_HEVC;
#endif // BUILDFLAG(ENABLE_HEVC_DEMUXING)
#endif // BUILDFLAG(ENABLE_DOLBY_VISION_DEMUXING)
- ),
- EME_CODEC_MP4_ALL = (EME_CODEC_MP4_AUDIO_ALL | EME_CODEC_MP4_VIDEO_ALL),
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+ return codecs;
+}
+
+constexpr SupportedCodecs EME_CODEC_WEBM_AUDIO_ALL =
+ EME_CODEC_WEBM_OPUS | EME_CODEC_WEBM_VORBIS;
+
+constexpr SupportedCodecs EME_CODEC_WEBM_VIDEO_ALL =
+ EME_CODEC_WEBM_VP8 | EME_CODEC_WEBM_VP9 | EME_CODEC_COMMON_VP9;
+
+constexpr SupportedCodecs EME_CODEC_WEBM_ALL =
+ EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_WEBM_VIDEO_ALL;
+
+constexpr SupportedCodecs EME_CODEC_MP4_AUDIO_ALL = GetMp4AudioCodecs();
+constexpr SupportedCodecs EME_CODEC_MP4_VIDEO_ALL = GetMp4VideoCodecs();
+
+constexpr SupportedCodecs EME_CODEC_MP4_ALL =
+ EME_CODEC_MP4_AUDIO_ALL | EME_CODEC_MP4_VIDEO_ALL;
+
+constexpr SupportedCodecs EME_CODEC_AUDIO_ALL =
+ EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_MP4_AUDIO_ALL;
+
+constexpr SupportedCodecs EME_CODEC_VIDEO_ALL =
+ EME_CODEC_WEBM_VIDEO_ALL | EME_CODEC_MP4_VIDEO_ALL;
+
+constexpr SupportedCodecs EME_CODEC_ALL =
+ EME_CODEC_WEBM_ALL | EME_CODEC_MP4_ALL;
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
- EME_CODEC_MP2T_VIDEO_ALL = EME_CODEC_MP4_AVC1,
- EME_CODEC_MP2T_ALL = EME_CODEC_MP2T_VIDEO_ALL,
- EME_CODEC_AUDIO_ALL = (EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_MP4_AUDIO_ALL),
- EME_CODEC_VIDEO_ALL = (EME_CODEC_WEBM_VIDEO_ALL | EME_CODEC_MP4_VIDEO_ALL |
- EME_CODEC_MP2T_VIDEO_ALL),
- EME_CODEC_ALL = (EME_CODEC_WEBM_ALL | EME_CODEC_MP4_ALL | EME_CODEC_MP2T_ALL),
-#else
- EME_CODEC_AUDIO_ALL = (EME_CODEC_WEBM_AUDIO_ALL | EME_CODEC_MP4_AUDIO_ALL),
- EME_CODEC_VIDEO_ALL = (EME_CODEC_WEBM_VIDEO_ALL | EME_CODEC_MP4_VIDEO_ALL),
- EME_CODEC_ALL = (EME_CODEC_WEBM_ALL | EME_CODEC_MP4_ALL),
+constexpr SupportedCodecs EME_CODEC_MP2T_VIDEO_ALL = EME_CODEC_MP4_AVC1;
+static_assert(
+ (EME_CODEC_MP2T_VIDEO_ALL & EME_CODEC_VIDEO_ALL) ==
+ EME_CODEC_MP2T_VIDEO_ALL,
+ "EME_CODEC_MP2T_VIDEO_ALL should be a subset of EME_CODEC_MP4_ALL");
#endif // BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
-#else
- EME_CODEC_AUDIO_ALL = EME_CODEC_WEBM_AUDIO_ALL,
- EME_CODEC_VIDEO_ALL = EME_CODEC_WEBM_VIDEO_ALL,
- EME_CODEC_ALL = EME_CODEC_WEBM_ALL,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-};
-
-typedef uint32_t SupportedCodecs;
enum class EmeSessionTypeSupport {
// Invalid default value.
diff --git a/chromium/media/base/encryption_pattern.cc b/chromium/media/base/encryption_pattern.cc
index ebd87cd866b..e2c194083b8 100644
--- a/chromium/media/base/encryption_pattern.cc
+++ b/chromium/media/base/encryption_pattern.cc
@@ -12,15 +12,28 @@ EncryptionPattern::EncryptionPattern(uint32_t crypt_byte_block,
uint32_t skip_byte_block)
: crypt_byte_block_(crypt_byte_block), skip_byte_block_(skip_byte_block) {}
+EncryptionPattern::EncryptionPattern(const EncryptionPattern& rhs) = default;
+
+EncryptionPattern& EncryptionPattern::operator=(const EncryptionPattern& rhs) =
+ default;
+
EncryptionPattern::~EncryptionPattern() = default;
-bool EncryptionPattern::Matches(const EncryptionPattern& other) const {
- return crypt_byte_block_ == other.crypt_byte_block() &&
- skip_byte_block_ == other.skip_byte_block();
+bool EncryptionPattern::IsInEffect() const {
+ // ISO/IEC 23001-7(2016), section 10.3, discussing 'cens' pattern encryption
+ // scheme, states "Tracks other than video are protected using whole-block
+ // full-sample encryption as specified in 9.7 and hence skip_byte_block
+ // SHALL be 0." So pattern is in effect as long as |crypt_byte_block_| is set.
+ return crypt_byte_block_ != 0;
+}
+
+bool EncryptionPattern::operator==(const EncryptionPattern& other) const {
+ return crypt_byte_block_ == other.crypt_byte_block_ &&
+ skip_byte_block_ == other.skip_byte_block_;
}
-bool EncryptionPattern::IsInEffect() const {
- return crypt_byte_block_ != 0 && skip_byte_block_ != 0;
+bool EncryptionPattern::operator!=(const EncryptionPattern& other) const {
+ return !operator==(other);
}
} // namespace media
diff --git a/chromium/media/base/encryption_pattern.h b/chromium/media/base/encryption_pattern.h
index 1ff952d7184..41217c4c553 100644
--- a/chromium/media/base/encryption_pattern.h
+++ b/chromium/media/base/encryption_pattern.h
@@ -20,26 +20,24 @@ namespace media {
// encrypted, and the next nine are skipped. This pattern is applied
// repeatedly until the end of the last 16-byte block in the subsample.
// Any remaining bytes are left clear.
-// If either of crypt_byte_block or skip_byte_block is 0, pattern encryption
-// is disabled.
+// If crypt_byte_block is 0, pattern encryption is disabled.
// TODO(jrummell): Use base::Optional<EncryptionPattern> everywhere, and remove
// IsInEffect().
class MEDIA_EXPORT EncryptionPattern {
public:
EncryptionPattern();
EncryptionPattern(uint32_t crypt_byte_block, uint32_t skip_byte_block);
+ EncryptionPattern(const EncryptionPattern& rhs);
+ EncryptionPattern& operator=(const EncryptionPattern& rhs);
~EncryptionPattern();
- bool Matches(const EncryptionPattern& other) const;
-
uint32_t crypt_byte_block() const { return crypt_byte_block_; }
uint32_t skip_byte_block() const { return skip_byte_block_; }
bool IsInEffect() const;
- // Allow copy and assignment.
- EncryptionPattern(const EncryptionPattern& rhs) = default;
- EncryptionPattern& operator=(const EncryptionPattern& rhs) = default;
+ bool operator==(const EncryptionPattern& other) const;
+ bool operator!=(const EncryptionPattern& other) const;
private:
uint32_t crypt_byte_block_ = 0; // Count of the encrypted blocks.
diff --git a/chromium/media/base/encryption_scheme.cc b/chromium/media/base/encryption_scheme.cc
index b98f9635b52..78349d2499e 100644
--- a/chromium/media/base/encryption_scheme.cc
+++ b/chromium/media/base/encryption_scheme.cc
@@ -4,6 +4,10 @@
#include "media/base/encryption_scheme.h"
+#include <ostream>
+
+#include "base/logging.h"
+
namespace media {
EncryptionScheme::EncryptionScheme() = default;
@@ -27,7 +31,29 @@ const EncryptionPattern& EncryptionScheme::pattern() const {
}
bool EncryptionScheme::Matches(const EncryptionScheme& other) const {
- return mode_ == other.mode_ && pattern_.Matches(other.pattern_);
+ return mode_ == other.mode_ && pattern_ == other.pattern_;
+}
+
+std::ostream& operator<<(std::ostream& os,
+ const EncryptionScheme& encryption_scheme) {
+ if (!encryption_scheme.is_encrypted())
+ return os << "Unencrypted";
+
+ bool pattern_in_effect = encryption_scheme.pattern().IsInEffect();
+
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CTR &&
+ !pattern_in_effect) {
+ return os << "CENC";
+ }
+
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC &&
+ pattern_in_effect) {
+ return os << "CBCS";
+ }
+
+ NOTREACHED();
+ return os << "Unknown (mode = " << encryption_scheme.mode()
+ << ", pattern_in_effect = " << pattern_in_effect << ")";
}
} // namespace media
diff --git a/chromium/media/base/encryption_scheme.h b/chromium/media/base/encryption_scheme.h
index 53b228dafb3..093b19b56da 100644
--- a/chromium/media/base/encryption_scheme.h
+++ b/chromium/media/base/encryption_scheme.h
@@ -7,6 +7,8 @@
#include <stdint.h>
+#include <iosfwd>
+
#include "media/base/encryption_pattern.h"
#include "media/base/media_export.h"
@@ -45,6 +47,11 @@ class MEDIA_EXPORT EncryptionScheme {
// Allow copy and assignment.
};
+// For logging use only.
+MEDIA_EXPORT std::ostream& operator<<(
+ std::ostream& os,
+ const EncryptionScheme& encryption_scheme);
+
} // namespace media
#endif // MEDIA_BASE_ENCRYPTION_SCHEME_H_
diff --git a/chromium/media/base/fake_audio_renderer_sink.cc b/chromium/media/base/fake_audio_renderer_sink.cc
index 06bbf7d8452..e80a0f817d9 100644
--- a/chromium/media/base/fake_audio_renderer_sink.cc
+++ b/chromium/media/base/fake_audio_renderer_sink.cc
@@ -15,7 +15,6 @@ FakeAudioRendererSink::FakeAudioRendererSink()
AudioParameters(AudioParameters::AUDIO_FAKE,
CHANNEL_LAYOUT_STEREO,
AudioParameters::kTelephoneSampleRate,
- 16,
1)) {}
FakeAudioRendererSink::FakeAudioRendererSink(
diff --git a/chromium/media/base/fake_audio_worker_unittest.cc b/chromium/media/base/fake_audio_worker_unittest.cc
index dee4453e63d..44e48d64bee 100644
--- a/chromium/media/base/fake_audio_worker_unittest.cc
+++ b/chromium/media/base/fake_audio_worker_unittest.cc
@@ -22,7 +22,6 @@ class FakeAudioWorkerTest : public testing::Test {
: params_(AudioParameters::AUDIO_FAKE,
CHANNEL_LAYOUT_STEREO,
44100,
- 8,
128),
fake_worker_(message_loop_.task_runner(), params_),
seen_callbacks_(0) {
diff --git a/chromium/media/base/fake_demuxer_stream.cc b/chromium/media/base/fake_demuxer_stream.cc
index b97effaa96b..ee5db19d387 100644
--- a/chromium/media/base/fake_demuxer_stream.cc
+++ b/chromium/media/base/fake_demuxer_stream.cc
@@ -18,6 +18,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
#include "media/base/media_util.h"
#include "media/base/test_helpers.h"
#include "media/base/timestamp_constants.h"
@@ -189,7 +190,7 @@ void FakeDemuxerStream::DoRead() {
// TODO(xhwang): Output out-of-order buffers if needed.
if (is_encrypted_) {
- buffer->set_decrypt_config(std::make_unique<DecryptConfig>(
+ buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
std::string(kKeyId, kKeyId + arraysize(kKeyId)),
std::string(kIv, kIv + arraysize(kIv)), std::vector<SubsampleEntry>()));
}
@@ -220,9 +221,4 @@ std::vector<DemuxerStream*> FakeMediaResource::GetAllStreams() {
return result;
}
-void FakeMediaResource::SetStreamStatusChangeCB(
- const StreamStatusChangeCB& cb) {
- NOTIMPLEMENTED();
-}
-
} // namespace media
diff --git a/chromium/media/base/fake_demuxer_stream.h b/chromium/media/base/fake_demuxer_stream.h
index d4f83509d11..42f3bb5c093 100644
--- a/chromium/media/base/fake_demuxer_stream.h
+++ b/chromium/media/base/fake_demuxer_stream.h
@@ -113,7 +113,6 @@ class FakeMediaResource : public MediaResource {
// MediaResource implementation.
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
private:
FakeDemuxerStream fake_video_stream_;
diff --git a/chromium/media/base/fake_text_track_stream.h b/chromium/media/base/fake_text_track_stream.h
index ddec7fd8f7a..d93e5469563 100644
--- a/chromium/media/base/fake_text_track_stream.h
+++ b/chromium/media/base/fake_text_track_stream.h
@@ -6,12 +6,15 @@
#define MEDIA_BASE_FAKE_TEXT_TRACK_STREAM_H_
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/demuxer_stream.h"
#include "media/base/video_decoder_config.h"
#include "testing/gmock/include/gmock/gmock.h"
+namespace base {
+class SingleThreadTaskRunner;
+}
+
namespace media {
// Fake implementation of the DemuxerStream. These are the stream objects
diff --git a/chromium/media/base/fallback_video_decoder.cc b/chromium/media/base/fallback_video_decoder.cc
new file mode 100644
index 00000000000..50568e53141
--- /dev/null
+++ b/chromium/media/base/fallback_video_decoder.cc
@@ -0,0 +1,102 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <utility>
+
+#include "base/bind.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/fallback_video_decoder.h"
+#include "media/base/video_decoder_config.h"
+
+namespace media {
+
+FallbackVideoDecoder::FallbackVideoDecoder(
+ std::unique_ptr<VideoDecoder> preferred,
+ std::unique_ptr<VideoDecoder> fallback)
+ : preferred_decoder_(std::move(preferred)),
+ fallback_decoder_(std::move(fallback)),
+ weak_factory_(this) {}
+
+void FallbackVideoDecoder::Initialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) {
+ // If we've already fallen back, just reinitialize the selected decoder.
+ if (selected_decoder_ && did_fallback_) {
+ selected_decoder_->Initialize(config, low_delay, cdm_context, init_cb,
+ output_cb, waiting_for_decryption_key_cb);
+ return;
+ }
+
+ InitCB fallback_initialize_cb = base::BindRepeating(
+ &FallbackVideoDecoder::FallbackInitialize, weak_factory_.GetWeakPtr(),
+ config, low_delay, cdm_context, init_cb, output_cb,
+ waiting_for_decryption_key_cb);
+
+ preferred_decoder_->Initialize(config, low_delay, cdm_context,
+ std::move(fallback_initialize_cb), output_cb,
+ waiting_for_decryption_key_cb);
+}
+
+void FallbackVideoDecoder::FallbackInitialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb,
+ bool success) {
+ // The preferred decoder was successfully initialized.
+ if (success) {
+ selected_decoder_ = preferred_decoder_.get();
+ init_cb.Run(true);
+ return;
+ }
+
+ did_fallback_ = true;
+ preferred_decoder_.reset();
+ selected_decoder_ = fallback_decoder_.get();
+ fallback_decoder_->Initialize(config, low_delay, cdm_context, init_cb,
+ output_cb, waiting_for_decryption_key_cb);
+}
+
+void FallbackVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
+ const DecodeCB& decode_cb) {
+ DCHECK(selected_decoder_);
+ selected_decoder_->Decode(std::move(buffer), decode_cb);
+}
+
+void FallbackVideoDecoder::Reset(const base::RepeatingClosure& reset_cb) {
+ DCHECK(selected_decoder_);
+ selected_decoder_->Reset(reset_cb);
+}
+
+bool FallbackVideoDecoder::NeedsBitstreamConversion() const {
+ DCHECK(selected_decoder_);
+ return selected_decoder_->NeedsBitstreamConversion();
+}
+
+bool FallbackVideoDecoder::CanReadWithoutStalling() const {
+ DCHECK(selected_decoder_);
+ return selected_decoder_->CanReadWithoutStalling();
+}
+
+int FallbackVideoDecoder::GetMaxDecodeRequests() const {
+ DCHECK(selected_decoder_);
+ return selected_decoder_->GetMaxDecodeRequests();
+}
+
+std::string FallbackVideoDecoder::GetDisplayName() const {
+ // MojoVideoDecoder always identifies itself as such, and never asks for the
+ // name of the underlying decoder.
+ NOTREACHED();
+ return "FallbackVideoDecoder";
+}
+
+FallbackVideoDecoder::~FallbackVideoDecoder() = default;
+
+} // namespace media
diff --git a/chromium/media/base/fallback_video_decoder.h b/chromium/media/base/fallback_video_decoder.h
new file mode 100644
index 00000000000..ed7bcd3d7d6
--- /dev/null
+++ b/chromium/media/base/fallback_video_decoder.h
@@ -0,0 +1,63 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
+#define MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
+
+#include <memory>
+#include <string>
+
+#include "base/memory/weak_ptr.h"
+#include "media/base/video_decoder.h"
+
+namespace media {
+
+// A Wrapper VideoDecoder which supports a fallback and a preferred decoder.
+class MEDIA_EXPORT FallbackVideoDecoder : public VideoDecoder {
+ public:
+ FallbackVideoDecoder(std::unique_ptr<VideoDecoder> preferred,
+ std::unique_ptr<VideoDecoder> fallback);
+
+ // media::VideoDecoder implementation.
+ std::string GetDisplayName() const override;
+ void Initialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) override;
+ void Decode(scoped_refptr<DecoderBuffer> buffer,
+ const DecodeCB& decode_cb) override;
+ void Reset(const base::RepeatingClosure& reset_cb) override;
+ bool NeedsBitstreamConversion() const override;
+ bool CanReadWithoutStalling() const override;
+ int GetMaxDecodeRequests() const override;
+
+ protected:
+ ~FallbackVideoDecoder() override;
+
+ private:
+ void FallbackInitialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb,
+ bool success);
+
+ std::unique_ptr<media::VideoDecoder> preferred_decoder_;
+ std::unique_ptr<media::VideoDecoder> fallback_decoder_;
+ media::VideoDecoder* selected_decoder_ = nullptr;
+ bool did_fallback_ = false;
+
+ base::WeakPtrFactory<FallbackVideoDecoder> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(FallbackVideoDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
diff --git a/chromium/media/base/fallback_video_decoder_unittest.cc b/chromium/media/base/fallback_video_decoder_unittest.cc
new file mode 100644
index 00000000000..ca4bcbce478
--- /dev/null
+++ b/chromium/media/base/fallback_video_decoder_unittest.cc
@@ -0,0 +1,160 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <tuple>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/run_loop.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/fallback_video_decoder.h"
+#include "media/base/gmock_callback_support.h"
+#include "media/base/mock_filters.h"
+#include "media/base/test_helpers.h"
+#include "media/base/video_decoder.h"
+#include "media/base/video_decoder_config.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest-param-test.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::StrictMock;
+using ::testing::_;
+
+namespace media {
+
+class FallbackVideoDecoderUnittest : public ::testing::TestWithParam<bool> {
+ public:
+ FallbackVideoDecoderUnittest()
+ : backup_decoder_(nullptr),
+ preferred_decoder_(nullptr),
+ fallback_decoder_(nullptr) {}
+
+ ~FallbackVideoDecoderUnittest() override { Destroy(); }
+
+ std::unique_ptr<VideoDecoder> MakeMockDecoderWithExpectations(
+ bool is_fallback,
+ bool preferred_should_succeed) {
+ std::string n = is_fallback ? "Fallback" : "Preferred";
+ StrictMock<MockVideoDecoder>* result = new StrictMock<MockVideoDecoder>(n);
+
+ if (is_fallback && !preferred_should_succeed) {
+ EXPECT_CALL(*result, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(true));
+ }
+
+ if (!is_fallback) {
+ preferred_decoder_ = result;
+ EXPECT_CALL(*result, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(preferred_should_succeed));
+ } else {
+ backup_decoder_ = result;
+ }
+
+ return std::unique_ptr<VideoDecoder>(result);
+ }
+
+ void Initialize(bool preferred_should_succeed) {
+ fallback_decoder_ = new FallbackVideoDecoder(
+ MakeMockDecoderWithExpectations(false, preferred_should_succeed),
+ MakeMockDecoderWithExpectations(true, preferred_should_succeed));
+
+ fallback_decoder_->Initialize(
+ video_decoder_config_, false, nullptr,
+ base::BindRepeating([](bool success) { EXPECT_TRUE(success); }),
+ base::DoNothing(), base::DoNothing());
+ }
+
+ protected:
+ void Destroy() { std::default_delete<VideoDecoder>()(fallback_decoder_); }
+
+ bool PreferredShouldSucceed() { return GetParam(); }
+
+ StrictMock<MockVideoDecoder>* backup_decoder_;
+ StrictMock<MockVideoDecoder>* preferred_decoder_;
+ VideoDecoder* fallback_decoder_;
+ VideoDecoderConfig video_decoder_config_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FallbackVideoDecoderUnittest);
+};
+
+INSTANTIATE_TEST_CASE_P(DoesPreferredInitFail,
+ FallbackVideoDecoderUnittest,
+ testing::ValuesIn({true, false}));
+
+#define EXPECT_ON_CORRECT_DECODER(method) \
+ if (PreferredShouldSucceed()) \
+ EXPECT_CALL(*preferred_decoder_, method); \
+ else \
+ EXPECT_CALL(*backup_decoder_, method) // Intentionally leave off semicolon.
+
+// Do not test the name lookup; it is NOTREACHED.
+TEST_P(FallbackVideoDecoderUnittest, MethodsRedirectedAsExpected) {
+ Initialize(PreferredShouldSucceed());
+
+ EXPECT_ON_CORRECT_DECODER(Decode(_, _));
+ fallback_decoder_->Decode(nullptr, base::DoNothing());
+
+ EXPECT_ON_CORRECT_DECODER(Reset(_));
+ fallback_decoder_->Reset(base::DoNothing());
+
+ EXPECT_ON_CORRECT_DECODER(NeedsBitstreamConversion());
+ fallback_decoder_->NeedsBitstreamConversion();
+
+ EXPECT_ON_CORRECT_DECODER(CanReadWithoutStalling());
+ fallback_decoder_->CanReadWithoutStalling();
+
+ EXPECT_ON_CORRECT_DECODER(GetMaxDecodeRequests());
+ fallback_decoder_->GetMaxDecodeRequests();
+}
+
+// │ first initialization │ second initialization │
+// preferred │ preferred │ backup │ preferred │ backup │
+// will succeed │ init called │ init called │ init called │ init called │
+//───────────────┼─────────────┼─────────────┼─────────────┼─────────────┤
+// false │ ✓ │ ✓ │ x │ ✓ │
+// true │ ✓ │ x │ ✓ │ ✓ │
+TEST_P(FallbackVideoDecoderUnittest, ReinitializeWithPreferredFailing) {
+ Initialize(PreferredShouldSucceed());
+
+ // If we succeedd the first time, it should still be alive.
+ if (PreferredShouldSucceed()) {
+ EXPECT_CALL(*preferred_decoder_, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(false)); // fail initialization
+ }
+ EXPECT_CALL(*backup_decoder_, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(true));
+
+ fallback_decoder_->Initialize(
+ video_decoder_config_, false, nullptr,
+ base::BindRepeating([](bool success) { EXPECT_TRUE(success); }),
+ base::DoNothing(), base::DoNothing());
+}
+
+// │ first initialization │ second initialization │
+// preferred │ preferred │ backup │ preferred │ backup │
+// will succeed │ init called │ init called │ init called │ init called │
+//───────────────┼─────────────┼─────────────┼─────────────┼─────────────┤
+// false │ ✓ │ ✓ │ x │ ✓ │
+// true │ ✓ │ x │ ✓ │ x │
+TEST_P(FallbackVideoDecoderUnittest, ReinitializeWithPreferredSuccessful) {
+ Initialize(PreferredShouldSucceed());
+
+ // If we succeedd the first time, it should still be alive.
+ if (PreferredShouldSucceed()) {
+ EXPECT_CALL(*preferred_decoder_, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(true)); // pass initialization
+ } else {
+ // Otherwise, preferred was deleted, and we only backup still exists.
+ EXPECT_CALL(*backup_decoder_, Initialize(_, _, _, _, _, _))
+ .WillOnce(RunCallback<3>(true));
+ }
+
+ fallback_decoder_->Initialize(
+ video_decoder_config_, false, nullptr,
+ base::BindRepeating([](bool success) { EXPECT_TRUE(success); }),
+ base::DoNothing(), base::DoNothing());
+}
+
+} // namespace media
diff --git a/chromium/media/base/ipc/media_param_traits.cc b/chromium/media/base/ipc/media_param_traits.cc
index 6f5e321d205..9967314bd6b 100644
--- a/chromium/media/base/ipc/media_param_traits.cc
+++ b/chromium/media/base/ipc/media_param_traits.cc
@@ -26,7 +26,6 @@ void ParamTraits<AudioParameters>::Write(base::Pickle* m,
WriteParam(m, p.format());
WriteParam(m, p.channel_layout());
WriteParam(m, p.sample_rate());
- WriteParam(m, p.bits_per_sample());
WriteParam(m, p.frames_per_buffer());
WriteParam(m, p.channels());
WriteParam(m, p.effects());
@@ -39,13 +38,12 @@ bool ParamTraits<AudioParameters>::Read(const base::Pickle* m,
AudioParameters* r) {
AudioParameters::Format format;
ChannelLayout channel_layout;
- int sample_rate, bits_per_sample, frames_per_buffer, channels, effects;
+ int sample_rate, frames_per_buffer, channels, effects;
std::vector<media::Point> mic_positions;
AudioLatency::LatencyType latency_tag;
if (!ReadParam(m, iter, &format) || !ReadParam(m, iter, &channel_layout) ||
!ReadParam(m, iter, &sample_rate) ||
- !ReadParam(m, iter, &bits_per_sample) ||
!ReadParam(m, iter, &frames_per_buffer) ||
!ReadParam(m, iter, &channels) || !ReadParam(m, iter, &effects) ||
!ReadParam(m, iter, &mic_positions) ||
@@ -53,7 +51,7 @@ bool ParamTraits<AudioParameters>::Read(const base::Pickle* m,
return false;
}
- AudioParameters params(format, channel_layout, sample_rate, bits_per_sample,
+ AudioParameters params(format, channel_layout, sample_rate,
frames_per_buffer);
params.set_channels_for_discrete(channels);
params.set_effects(effects);
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index 59e2f0946c0..2e7aa74fce5 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -15,6 +15,7 @@
#include "media/base/channel_layout.h"
#include "media/base/content_decryption_module.h"
#include "media/base/decode_status.h"
+#include "media/base/decrypt_config.h"
#include "media/base/decryptor.h"
#include "media/base/demuxer_stream.h"
#include "media/base/eme_constants.h"
@@ -92,6 +93,9 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::DemuxerStream::Type,
IPC_ENUM_TRAITS_MAX_VALUE(media::EmeInitDataType, media::EmeInitDataType::MAX)
+IPC_ENUM_TRAITS_MAX_VALUE(media::EncryptionMode,
+ media::EncryptionMode::kMaxValue)
+
IPC_ENUM_TRAITS_MAX_VALUE(media::EncryptionScheme::CipherMode,
media::EncryptionScheme::CipherMode::CIPHER_MODE_MAX)
diff --git a/chromium/media/base/key_system_properties.h b/chromium/media/base/key_system_properties.h
index 16d089159b3..33d2d52de3d 100644
--- a/chromium/media/base/key_system_properties.h
+++ b/chromium/media/base/key_system_properties.h
@@ -8,6 +8,7 @@
#include <string>
#include "build/build_config.h"
+#include "media/base/decrypt_config.h"
#include "media/base/eme_constants.h"
#include "media/base/media_export.h"
@@ -25,6 +26,10 @@ class MEDIA_EXPORT KeySystemProperties {
virtual bool IsSupportedInitDataType(
EmeInitDataType init_data_type) const = 0;
+ // Returns whether |encryption_scheme| is supported by this key system.
+ virtual bool IsEncryptionSchemeSupported(
+ EncryptionMode encryption_scheme) const = 0;
+
// Returns the codecs supported by this key system.
virtual SupportedCodecs GetSupportedCodecs() const = 0;
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index 36f5f498845..a51a28df388 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -31,28 +31,33 @@ const char kClearKeyKeySystem[] = "org.w3.clearkey";
const char kClearKeyKeySystemNameForUMA[] = "ClearKey";
const char kUnknownKeySystemNameForUMA[] = "Unknown";
-struct NamedCodec {
- const char* name;
- EmeCodec type;
+struct MimeTypeToCodecs {
+ const char* mime_type;
+ SupportedCodecs codecs;
};
// Mapping between containers and their codecs.
// Only audio codecs can belong to a "audio/*" mime_type, and only video codecs
// can belong to a "video/*" mime_type.
-static const NamedCodec kMimeTypeToCodecMasks[] = {
+static const MimeTypeToCodecs kMimeTypeToCodecsMap[] = {
{"audio/webm", EME_CODEC_WEBM_AUDIO_ALL},
{"video/webm", EME_CODEC_WEBM_VIDEO_ALL},
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
{"audio/mp4", EME_CODEC_MP4_AUDIO_ALL},
{"video/mp4", EME_CODEC_MP4_VIDEO_ALL},
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
{"video/mp2t", EME_CODEC_MP2T_VIDEO_ALL},
#endif // BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+}; // namespace media
+
+struct NameToCodec {
+ const char* name;
+ EmeCodec codec;
};
// Mapping between codec names and enum values.
-static const NamedCodec kCodecStrings[] = {
+static const NameToCodec kCodecMap[] = {
{"opus", EME_CODEC_WEBM_OPUS}, // Opus.
{"vorbis", EME_CODEC_WEBM_VORBIS}, // Vorbis.
{"vp8", EME_CODEC_WEBM_VP8}, // VP8.
@@ -60,12 +65,16 @@ static const NamedCodec kCodecStrings[] = {
{"vp9", EME_CODEC_WEBM_VP9}, // VP9.
{"vp9.0", EME_CODEC_WEBM_VP9}, // VP9.
{"vp09", EME_CODEC_COMMON_VP9}, // New multi-part VP9 for WebM and MP4.
+ {"flac", EME_CODEC_MP4_FLAC}, // FLAC.
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
{"mp4a", EME_CODEC_MP4_AAC}, // AAC.
#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
{"ac-3", EME_CODEC_MP4_AC3}, // AC3.
{"ec-3", EME_CODEC_MP4_EAC3}, // EAC3.
#endif
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ {"mhm1", EME_CODEC_MP4_MPEG_H_AUDIO}, // MPEG-H Audio.
+#endif
{"avc1", EME_CODEC_MP4_AVC1}, // AVC1 for MP4 and MP2T
{"avc3", EME_CODEC_MP4_AVC1}, // AVC3 for MP4 and MP2T
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
@@ -88,25 +97,29 @@ class ClearKeyProperties : public KeySystemProperties {
std::string GetKeySystemName() const override { return kClearKeyKeySystem; }
bool IsSupportedInitDataType(EmeInitDataType init_data_type) const override {
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (init_data_type == EmeInitDataType::CENC)
- return true;
-#endif
- return init_data_type == EmeInitDataType::WEBM ||
+ return init_data_type == EmeInitDataType::CENC ||
+ init_data_type == EmeInitDataType::WEBM ||
init_data_type == EmeInitDataType::KEYIDS;
}
+ bool IsEncryptionSchemeSupported(
+ EncryptionMode encryption_scheme) const override {
+ switch (encryption_scheme) {
+ case EncryptionMode::kCenc:
+ case EncryptionMode::kCbcs:
+ return true;
+ case EncryptionMode::kUnencrypted:
+ break;
+ }
+ NOTREACHED();
+ return false;
+ }
+
SupportedCodecs GetSupportedCodecs() const override {
// On Android, Vorbis, VP8, AAC and AVC1 are supported in MediaCodec:
// http://developer.android.com/guide/appendix/media-formats.html
// VP9 support is device dependent.
- SupportedCodecs codecs = EME_CODEC_WEBM_ALL;
-
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- codecs |= EME_CODEC_MP4_ALL;
-#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-
- return codecs;
+ return EME_CODEC_WEBM_ALL | EME_CODEC_MP4_ALL;
}
EmeConfigRule GetRobustnessConfigRule(
@@ -201,6 +214,10 @@ class KeySystemsImpl : public KeySystems {
bool IsSupportedInitDataType(const std::string& key_system,
EmeInitDataType init_data_type) const override;
+ bool IsEncryptionSchemeSupported(
+ const std::string& key_system,
+ EncryptionMode encryption_scheme) const override;
+
EmeConfigRule GetContentTypeConfigRule(
const std::string& key_system,
EmeMediaType media_type,
@@ -235,14 +252,14 @@ class KeySystemsImpl : public KeySystems {
void AddSupportedKeySystems(
std::vector<std::unique_ptr<KeySystemProperties>> key_systems);
- void RegisterMimeType(const std::string& mime_type, EmeCodec codecs_mask);
+ void RegisterMimeType(const std::string& mime_type, SupportedCodecs codecs);
bool IsValidMimeTypeCodecsCombination(const std::string& mime_type,
- SupportedCodecs codecs_mask) const;
+ SupportedCodecs codecs) const;
typedef base::hash_map<std::string, std::unique_ptr<KeySystemProperties>>
KeySystemPropertiesMap;
- typedef base::hash_map<std::string, SupportedCodecs> MimeTypeCodecsMap;
- typedef base::hash_map<std::string, EmeCodec> CodecsMap;
+ typedef base::hash_map<std::string, SupportedCodecs> MimeTypeToCodecsMap;
+ typedef base::hash_map<std::string, EmeCodec> CodecMap;
typedef base::hash_map<std::string, EmeInitDataType> InitDataTypesMap;
// TODO(sandersd): Separate container enum from codec mask value.
@@ -256,8 +273,8 @@ class KeySystemsImpl : public KeySystems {
KeySystemPropertiesMap key_system_properties_map_;
// This member should only be modified by RegisterMimeType().
- MimeTypeCodecsMap mime_type_to_codec_mask_map_;
- CodecsMap codec_string_map_;
+ MimeTypeToCodecsMap mime_type_to_codecs_map_;
+ CodecMap codec_map_;
SupportedCodecs audio_codec_mask_;
SupportedCodecs video_codec_mask_;
@@ -279,14 +296,14 @@ KeySystemsImpl* KeySystemsImpl::GetInstance() {
KeySystemsImpl::KeySystemsImpl()
: audio_codec_mask_(EME_CODEC_AUDIO_ALL),
video_codec_mask_(EME_CODEC_VIDEO_ALL) {
- for (size_t i = 0; i < arraysize(kCodecStrings); ++i) {
- const std::string& name = kCodecStrings[i].name;
- DCHECK(!codec_string_map_.count(name));
- codec_string_map_[name] = kCodecStrings[i].type;
+ for (size_t i = 0; i < arraysize(kCodecMap); ++i) {
+ const std::string& name = kCodecMap[i].name;
+ DCHECK(!codec_map_.count(name));
+ codec_map_[name] = kCodecMap[i].codec;
}
- for (size_t i = 0; i < arraysize(kMimeTypeToCodecMasks); ++i) {
- RegisterMimeType(kMimeTypeToCodecMasks[i].name,
- kMimeTypeToCodecMasks[i].type);
+ for (size_t i = 0; i < arraysize(kMimeTypeToCodecsMap); ++i) {
+ RegisterMimeType(kMimeTypeToCodecsMap[i].mime_type,
+ kMimeTypeToCodecsMap[i].codecs);
}
// Always update supported key systems during construction.
@@ -297,9 +314,9 @@ KeySystemsImpl::~KeySystemsImpl() = default;
SupportedCodecs KeySystemsImpl::GetCodecMaskForMimeType(
const std::string& container_mime_type) const {
- MimeTypeCodecsMap::const_iterator iter =
- mime_type_to_codec_mask_map_.find(container_mime_type);
- if (iter == mime_type_to_codec_mask_map_.end())
+ MimeTypeToCodecsMap::const_iterator iter =
+ mime_type_to_codecs_map_.find(container_mime_type);
+ if (iter == mime_type_to_codecs_map_.end())
return EME_CODEC_NONE;
DCHECK(IsValidMimeTypeCodecsCombination(container_mime_type, iter->second));
@@ -307,8 +324,8 @@ SupportedCodecs KeySystemsImpl::GetCodecMaskForMimeType(
}
EmeCodec KeySystemsImpl::GetCodecForString(const std::string& codec) const {
- CodecsMap::const_iterator iter = codec_string_map_.find(codec);
- if (iter != codec_string_map_.end())
+ CodecMap::const_iterator iter = codec_map_.find(codec);
+ if (iter != codec_map_.end())
return iter->second;
return EME_CODEC_NONE;
}
@@ -440,14 +457,15 @@ void KeySystemsImpl::AddSupportedKeySystems(
}
// Adds the MIME type with the codec mask after verifying the validity.
-// Only this function should modify |mime_type_to_codec_mask_map_|.
+// Only this function should modify |mime_type_to_codecs_map_|.
void KeySystemsImpl::RegisterMimeType(const std::string& mime_type,
- EmeCodec codecs_mask) {
+ SupportedCodecs codecs) {
DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(!mime_type_to_codec_mask_map_.count(mime_type));
- DCHECK(IsValidMimeTypeCodecsCombination(mime_type, codecs_mask));
+ DCHECK(!mime_type_to_codecs_map_.count(mime_type));
+ DCHECK(IsValidMimeTypeCodecsCombination(mime_type, codecs))
+ << ": mime_type = " << mime_type << ", codecs = " << codecs;
- mime_type_to_codec_mask_map_[mime_type] = static_cast<EmeCodec>(codecs_mask);
+ mime_type_to_codecs_map_[mime_type] = codecs;
}
// Returns whether |mime_type| follows a valid format and the specified codecs
@@ -455,14 +473,15 @@ void KeySystemsImpl::RegisterMimeType(const std::string& mime_type,
// Only audio/ or video/ MIME types with their respective codecs are allowed.
bool KeySystemsImpl::IsValidMimeTypeCodecsCombination(
const std::string& mime_type,
- SupportedCodecs codecs_mask) const {
+ SupportedCodecs codecs) const {
DCHECK(thread_checker_.CalledOnValidThread());
- if (!codecs_mask)
- return false;
+ if (codecs == EME_CODEC_NONE)
+ return true;
+
if (base::StartsWith(mime_type, "audio/", base::CompareCase::SENSITIVE))
- return !(codecs_mask & ~audio_codec_mask_);
+ return !(codecs & ~audio_codec_mask_);
if (base::StartsWith(mime_type, "video/", base::CompareCase::SENSITIVE))
- return !(codecs_mask & ~video_codec_mask_);
+ return !(codecs & ~video_codec_mask_);
return false;
}
@@ -481,6 +500,21 @@ bool KeySystemsImpl::IsSupportedInitDataType(
return key_system_iter->second->IsSupportedInitDataType(init_data_type);
}
+bool KeySystemsImpl::IsEncryptionSchemeSupported(
+ const std::string& key_system,
+ EncryptionMode encryption_scheme) const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ KeySystemPropertiesMap::const_iterator key_system_iter =
+ key_system_properties_map_.find(key_system);
+ if (key_system_iter == key_system_properties_map_.end()) {
+ NOTREACHED();
+ return false;
+ }
+ return key_system_iter->second->IsEncryptionSchemeSupported(
+ encryption_scheme);
+}
+
std::string KeySystemsImpl::GetKeySystemNameForUMA(
const std::string& key_system) const {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -513,8 +547,8 @@ void KeySystemsImpl::AddCodecMask(EmeMediaType media_type,
const std::string& codec,
uint32_t mask) {
DCHECK(thread_checker_.CalledOnValidThread());
- DCHECK(!codec_string_map_.count(codec));
- codec_string_map_[codec] = static_cast<EmeCodec>(mask);
+ DCHECK(!codec_map_.count(codec));
+ codec_map_[codec] = static_cast<EmeCodec>(mask);
if (media_type == EmeMediaType::AUDIO) {
audio_codec_mask_ |= mask;
} else {
diff --git a/chromium/media/base/key_systems.h b/chromium/media/base/key_systems.h
index 6d885336a81..a99e45b2c98 100644
--- a/chromium/media/base/key_systems.h
+++ b/chromium/media/base/key_systems.h
@@ -10,6 +10,7 @@
#include <string>
#include <vector>
+#include "media/base/decrypt_config.h"
#include "media/base/eme_constants.h"
#include "media/base/media_export.h"
#include "media/media_buildflags.h"
@@ -35,6 +36,11 @@ class MEDIA_EXPORT KeySystems {
const std::string& key_system,
EmeInitDataType init_data_type) const = 0;
+ // Returns whether |encryption_scheme| is supported by |key_system|.
+ virtual bool IsEncryptionSchemeSupported(
+ const std::string& key_system,
+ EncryptionMode encryption_scheme) const = 0;
+
// Returns the configuration rule for supporting a container and list of
// codecs.
virtual EmeConfigRule GetContentTypeConfigRule(
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index a291c82f7a4..19aaa277906 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -13,6 +13,7 @@
#include <vector>
#include "base/logging.h"
+#include "media/base/decrypt_config.h"
#include "media/base/eme_constants.h"
#include "media/base/key_systems.h"
#include "media/base/media.h"
@@ -84,6 +85,12 @@ class AesKeySystemProperties : public TestKeySystemPropertiesBase {
std::string GetKeySystemName() const override { return name_; }
+ bool IsEncryptionSchemeSupported(
+ EncryptionMode encryption_scheme) const override {
+ return encryption_scheme == EncryptionMode::kUnencrypted ||
+ encryption_scheme == EncryptionMode::kCenc;
+ }
+
EmeSessionTypeSupport GetPersistentLicenseSessionSupport() const override {
return EmeSessionTypeSupport::NOT_SUPPORTED;
}
@@ -106,6 +113,11 @@ class ExternalKeySystemProperties : public TestKeySystemPropertiesBase {
public:
std::string GetKeySystemName() const override { return kExternal; }
+ bool IsEncryptionSchemeSupported(
+ EncryptionMode encryption_scheme) const override {
+ return encryption_scheme != EncryptionMode::kUnencrypted;
+ }
+
#if defined(OS_ANDROID)
// We have hw-secure FOO_VIDEO codec support.
SupportedCodecs GetSupportedSecureCodecs() const override {
@@ -140,6 +152,12 @@ class ExternalKeySystemProperties : public TestKeySystemPropertiesBase {
}
};
+static bool IsEncryptionSchemeSupported(const std::string& key_system,
+ EncryptionMode encryption_scheme) {
+ return KeySystems::GetInstance()->IsEncryptionSchemeSupported(
+ key_system, encryption_scheme);
+}
+
static EmeConfigRule GetVideoContentTypeConfigRule(
const std::string& mime_type,
const std::vector<std::string>& codecs,
@@ -570,6 +588,14 @@ TEST_F(KeySystemsTest,
kAudioFoo, vorbis_codec(), kUsesAes));
}
+TEST_F(KeySystemsTest,
+ IsSupportedKeySystem_UsesAesDecryptor_EncryptionSchemes) {
+ EXPECT_TRUE(
+ IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kUnencrypted));
+ EXPECT_TRUE(IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kCenc));
+ EXPECT_FALSE(IsEncryptionSchemeSupported(kUsesAes, EncryptionMode::kCbcs));
+}
+
//
// Non-AesDecryptor-based key system.
//
@@ -692,6 +718,17 @@ TEST_F(
kAudioFoo, vorbis_codec(), kExternal));
}
+TEST_F(KeySystemsTest,
+ IsSupportedKeySystem_ExternalDecryptor_EncryptionSchemes) {
+ if (!CanRunExternalKeySystemTests())
+ return;
+
+ EXPECT_FALSE(
+ IsEncryptionSchemeSupported(kExternal, EncryptionMode::kUnencrypted));
+ EXPECT_TRUE(IsEncryptionSchemeSupported(kExternal, EncryptionMode::kCenc));
+ EXPECT_TRUE(IsEncryptionSchemeSupported(kExternal, EncryptionMode::kCbcs));
+}
+
TEST_F(KeySystemsTest, KeySystemNameForUMA) {
EXPECT_EQ("ClearKey", GetKeySystemNameForUMA(kClearKey));
EXPECT_EQ("Widevine", GetKeySystemNameForUMA(kWidevineKeySystem));
diff --git a/chromium/media/base/keyboard_event_counter.cc b/chromium/media/base/keyboard_event_counter.cc
index bddddbd5c50..46c2a2d7a73 100644
--- a/chromium/media/base/keyboard_event_counter.cc
+++ b/chromium/media/base/keyboard_event_counter.cc
@@ -4,7 +4,6 @@
#include "media/base/keyboard_event_counter.h"
-#include "base/atomicops.h"
#include "base/logging.h"
namespace media {
@@ -20,17 +19,15 @@ void KeyboardEventCounter::OnKeyboardEvent(ui::EventType event,
if (pressed_keys_.find(key_code) != pressed_keys_.end())
return;
pressed_keys_.insert(key_code);
- base::subtle::NoBarrier_AtomicIncrement(
- reinterpret_cast<base::subtle::AtomicWord*>(&total_key_presses_), 1);
+ ++total_key_presses_;
} else {
DCHECK_EQ(ui::ET_KEY_RELEASED, event);
pressed_keys_.erase(key_code);
}
}
-size_t KeyboardEventCounter::GetKeyPressCount() const {
- return base::subtle::NoBarrier_Load(
- reinterpret_cast<const base::subtle::AtomicWord*>(&total_key_presses_));
+uint32_t KeyboardEventCounter::GetKeyPressCount() const {
+ return total_key_presses_.load();
}
} // namespace media
diff --git a/chromium/media/base/keyboard_event_counter.h b/chromium/media/base/keyboard_event_counter.h
index 614d21f86b7..5e84730c672 100644
--- a/chromium/media/base/keyboard_event_counter.h
+++ b/chromium/media/base/keyboard_event_counter.h
@@ -7,10 +7,10 @@
#include <stddef.h>
+#include <atomic>
#include <set>
#include "base/macros.h"
-#include "base/synchronization/lock.h"
#include "media/base/media_export.h"
#include "ui/events/event_constants.h"
#include "ui/events/keycodes/keyboard_codes.h"
@@ -28,7 +28,7 @@ class MEDIA_EXPORT KeyboardEventCounter {
// Returns the total number of keypresses since its creation or last Reset()
// call. Can be called on any thread.
- size_t GetKeyPressCount() const;
+ uint32_t GetKeyPressCount() const;
// The client should call this method on key down or key up events.
// Must be called on a single thread.
@@ -38,7 +38,7 @@ class MEDIA_EXPORT KeyboardEventCounter {
// The set of keys currently held down.
std::set<ui::KeyboardCode> pressed_keys_;
- size_t total_key_presses_;
+ std::atomic<uint32_t> total_key_presses_;
DISALLOW_COPY_AND_ASSIGN(KeyboardEventCounter);
};
diff --git a/chromium/media/base/keyboard_event_counter_unittest.cc b/chromium/media/base/keyboard_event_counter_unittest.cc
index 0991c25b250..ec3707c7ad5 100644
--- a/chromium/media/base/keyboard_event_counter_unittest.cc
+++ b/chromium/media/base/keyboard_event_counter_unittest.cc
@@ -7,7 +7,6 @@
#include <memory>
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "build/build_config.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/base/media_log.cc b/chromium/media/base/media_log.cc
index 93700720293..623ba5d0964 100644
--- a/chromium/media/base/media_log.cc
+++ b/chromium/media/base/media_log.cc
@@ -21,6 +21,8 @@ std::string MediaLog::MediaLogLevelToString(MediaLogLevel level) {
switch (level) {
case MEDIALOG_ERROR:
return "error";
+ case MEDIALOG_WARNING:
+ return "warning";
case MEDIALOG_INFO:
return "info";
case MEDIALOG_DEBUG:
@@ -34,6 +36,8 @@ MediaLogEvent::Type MediaLog::MediaLogLevelToEventType(MediaLogLevel level) {
switch (level) {
case MEDIALOG_ERROR:
return MediaLogEvent::MEDIA_ERROR_LOG_ENTRY;
+ case MEDIALOG_WARNING:
+ return MediaLogEvent::MEDIA_WARNING_LOG_ENTRY;
case MEDIALOG_INFO:
return MediaLogEvent::MEDIA_INFO_LOG_ENTRY;
case MEDIALOG_DEBUG:
@@ -71,6 +75,8 @@ std::string MediaLog::EventTypeToString(MediaLogEvent::Type type) {
return "TEXT_ENDED";
case MediaLogEvent::MEDIA_ERROR_LOG_ENTRY:
return "MEDIA_ERROR_LOG_ENTRY";
+ case MediaLogEvent::MEDIA_WARNING_LOG_ENTRY:
+ return "MEDIA_WARNING_LOG_ENTRY";
case MediaLogEvent::MEDIA_INFO_LOG_ENTRY:
return "MEDIA_INFO_LOG_ENTRY";
case MediaLogEvent::MEDIA_DEBUG_LOG_ENTRY:
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index 8386b830fdd..084c113dcdd 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -23,11 +23,27 @@
namespace media {
+// Interface for media components to log to chrome://media-internals log.
+//
+// Implementations only need to implement AddEvent(), which must be thread-safe.
+// AddEvent() is expected to be called from multiple threads.
class MEDIA_EXPORT MediaLog {
public:
enum MediaLogLevel {
+ // Fatal error, e.g. cause of playback failure. Since this is also used to
+ // form MediaError.message, do NOT use this for non-fatal errors to avoid
+ // contaminating MediaError.message.
MEDIALOG_ERROR,
+
+ // Warning about non-fatal issues, e.g. quality of playback issues such as
+ // audio/video out of sync.
+ MEDIALOG_WARNING,
+
+ // General info useful for Chromium and/or web developers, testers and even
+ // users, e.g. audio/video codecs used in a playback instance.
MEDIALOG_INFO,
+
+ // Misc debug info for Chromium developers.
MEDIALOG_DEBUG,
};
diff --git a/chromium/media/base/media_log_event.h b/chromium/media/base/media_log_event.h
index 72f8c3b7399..27fb20e9b43 100644
--- a/chromium/media/base/media_log_event.h
+++ b/chromium/media/base/media_log_event.h
@@ -16,9 +16,7 @@ namespace media {
struct MediaLogEvent {
MediaLogEvent() {}
- MediaLogEvent(const MediaLogEvent& event) {
- *this = event;
- }
+ MediaLogEvent(const MediaLogEvent& event) { *this = event; }
MediaLogEvent& operator=(const MediaLogEvent& event) {
id = event.id;
@@ -76,6 +74,10 @@ struct MediaLogEvent {
MEDIA_ERROR_LOG_ENTRY,
// params: "error": Error string describing the error detected.
+ // Warning log reported by media code such as playback quality issues.
+ MEDIA_WARNING_LOG_ENTRY,
+ // params: "warning": String describing the warning.
+
// Informative log reported by media code.
MEDIA_INFO_LOG_ENTRY,
// params: "info": String with details of an informative log entry.
diff --git a/chromium/media/base/media_permission.h b/chromium/media/base/media_permission.h
index 53802029bac..05b10a7bd0f 100644
--- a/chromium/media/base/media_permission.h
+++ b/chromium/media/base/media_permission.h
@@ -25,15 +25,15 @@ class MEDIA_EXPORT MediaPermission {
MediaPermission();
virtual ~MediaPermission();
- // Checks whether |type| is permitted for |security_origion| without
- // triggering user interaction (e.g. permission prompt). The status will be
- // |false| if the permission has never been set.
+ // Checks whether |type| is permitted without triggering user interaction
+ // (e.g. permission prompt). The status will be |false| if the permission
+ // has never been set.
virtual void HasPermission(
Type type,
const PermissionStatusCB& permission_status_cb) = 0;
- // Requests |type| permission for |security_origion|. This may trigger user
- // interaction (e.g. permission prompt) if the permission has never been set.
+ // Requests |type| permission. This may trigger user interaction
+ // (e.g. permission prompt) if the permission has never been set.
virtual void RequestPermission(
Type type,
const PermissionStatusCB& permission_status_cb) = 0;
diff --git a/chromium/media/base/media_resource.h b/chromium/media/base/media_resource.h
index 3b65dae159d..37ac33076e1 100644
--- a/chromium/media/base/media_resource.h
+++ b/chromium/media/base/media_resource.h
@@ -17,14 +17,6 @@
namespace media {
-// The callback that is used to notify clients about streams being enabled and
-// disabled. The first parameter is the DemuxerStream whose status changed. The
-// second parameter is a bool indicating whether the stream got enabled or
-// disabled. The third parameter specifies the media playback position at the
-// time the status change happened.
-using StreamStatusChangeCB =
- base::RepeatingCallback<void(DemuxerStream*, bool, base::TimeDelta)>;
-
// Abstract class that defines how to retrieve "media resources" in
// DemuxerStream form (for most cases) or URL form (for the MediaPlayerRenderer
// case).
@@ -58,10 +50,6 @@ class MEDIA_EXPORT MediaResource {
// exists or a null pointer if there is no streams of that type.
DemuxerStream* GetFirstStream(DemuxerStream::Type type);
- // The StreamStatusChangeCB allows clients to receive notifications about one
- // of the streams being disabled or enabled.
- virtual void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) = 0;
-
// For Type::URL:
// Returns the URL parameters of the media to play. Empty URLs are legal,
// and should be handled appropriately by the caller.
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 45334f8bb6b..8fda8274d5d 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -21,6 +21,10 @@ const char kAutoplayPolicy[] = "autoplay-policy";
const char kDisableAudioOutput[] = "disable-audio-output";
+// Causes the AudioManager to fail creating audio streams. Used when testing
+// various failure cases.
+const char kFailAudioStreamCreation[] = "fail-audio-stream-creation";
+
// Set number of threads to use for video decoding.
const char kVideoThreads[] = "video-threads";
@@ -149,6 +153,20 @@ const char kMSEVideoBufferSizeLimit[] = "mse-video-buffer-size-limit";
// kExternalClearKeyForTesting.
const char kClearKeyCdmPathForTesting[] = "clear-key-cdm-path-for-testing";
+// Overrides the default enabled library CDM interface version(s) with the one
+// specified with this switch, which will be the only version enabled. For
+// example, on a build where CDM 8, CDM 9 and CDM 10 are all supported
+// (implemented), but only CDM 8 and CDM 9 are enabled by default:
+// --override-enabled-cdm-interface-version=8 : Only CDM 8 is enabled
+// --override-enabled-cdm-interface-version=9 : Only CDM 9 is enabled
+// --override-enabled-cdm-interface-version=10 : Only CDM 10 is enabled
+// --override-enabled-cdm-interface-version=11 : No CDM interface is enabled
+// This can be used for local testing and debugging. It can also be used to
+// enable an experimental CDM interface (which is always disabled by default)
+// for testing while it's still in development.
+const char kOverrideEnabledCdmInterfaceVersion[] =
+ "override-enabled-cdm-interface-version";
+
#if !defined(OS_ANDROID)
// Turns on the internal media session backend. This should be used by embedders
// that want to control the media playback with the media session interfaces.
@@ -191,7 +209,7 @@ const base::Feature kPictureInPicture{"PictureInPicture",
base::FEATURE_DISABLED_BY_DEFAULT};
const base::Feature kPreloadMetadataSuspend{"PreloadMetadataSuspend",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
// Let videos be resumed via remote controls (for example, the notification)
// when in background.
@@ -239,14 +257,15 @@ const base::Feature kMemoryPressureBasedSourceBufferGC{
const base::Feature kMojoVideoDecoder{"MojoVideoDecoder",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Enable The D3D11 Video decoder. Must also enable MojoVideoDecoder for
+// this to have any effect.
+const base::Feature kD3D11VideoDecoder{"D3D11VideoDecoder",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Manage and report MSE buffered ranges by PTS intervals, not DTS intervals.
const base::Feature kMseBufferByPts{"MseBufferByPts",
base::FEATURE_DISABLED_BY_DEFAULT};
-// Support FLAC codec within ISOBMFF streams used with Media Source Extensions.
-const base::Feature kMseFlacInIsobmff{"MseFlacInIsobmff",
- base::FEATURE_ENABLED_BY_DEFAULT};
-
// Enable new cpu load estimator. Intended for evaluation in local
// testing and origin-trial.
// TODO(nisse): Delete once we have switched over to always using the
@@ -279,6 +298,10 @@ const base::Feature kUnifiedAutoplay{"UnifiedAutoplay",
const base::Feature kUseSurfaceLayerForVideo{"UseSurfaceLayerForVideo",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Enable VA-API hardware encode acceleration for VP8.
+const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Inform video blitter of video color space.
const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -289,18 +312,17 @@ const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
const base::Feature kExternalClearKeyForTesting{
"ExternalClearKeyForTesting", base::FEATURE_DISABLED_BY_DEFAULT};
-// Enables support of experimental CDM interface version(s). This is usually
-// used to enable new CDM interface support for testing while it's still in
-// development. This switch may not be used anywhere if there's no experimental
-// CDM interface being developed.
-const base::Feature kSupportExperimentalCdmInterface{
- "SupportExperimentalCdmInterface", base::FEATURE_DISABLED_BY_DEFAULT};
-
// Enables low-delay video rendering in media pipeline on "live" stream.
const base::Feature kLowDelayVideoRenderingOnLiveStream{
"low-delay-video-rendering-on-live-stream",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Whether the autoplay policy should ignore Web Audio. When ignored, the
+// autoplay policy will be hardcoded to be the legacy one on based on the
+// platform
+const base::Feature kAutoplayIgnoreWebAudio{"AutoplayIgnoreWebAudio",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
#if defined(OS_ANDROID)
// Lock the screen orientation when a video goes fullscreen.
const base::Feature kVideoFullscreenOrientationLock{
@@ -320,6 +342,9 @@ const base::Feature kMediaDrmPersistentLicense{
const base::Feature kCafMediaRouterImpl{"CafMediaRouterImpl",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables the Android Image Reader path for Video decoding(for AVDA and MCVD)
+const base::Feature kAImageReaderVideoOutput{"AImageReaderVideoOutput",
+ base::FEATURE_DISABLED_BY_DEFAULT};
#endif
#if defined(OS_WIN)
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index f30cdc533aa..257c4892c9d 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -27,6 +27,7 @@ MEDIA_EXPORT extern const char kAudioServiceQuitTimeoutMs[];
MEDIA_EXPORT extern const char kAutoplayPolicy[];
MEDIA_EXPORT extern const char kDisableAudioOutput[];
+MEDIA_EXPORT extern const char kFailAudioStreamCreation[];
MEDIA_EXPORT extern const char kVideoThreads[];
@@ -81,6 +82,7 @@ MEDIA_EXPORT extern const char kMSEAudioBufferSizeLimit[];
MEDIA_EXPORT extern const char kMSEVideoBufferSizeLimit[];
MEDIA_EXPORT extern const char kClearKeyCdmPathForTesting[];
+MEDIA_EXPORT extern const char kOverrideEnabledCdmInterfaceVersion[];
#if !defined(OS_ANDROID)
MEDIA_EXPORT extern const char kEnableInternalMediaSession[];
@@ -102,9 +104,11 @@ namespace media {
// All features in alphabetical order. The features should be documented
// alongside the definition of their values in the .cc file.
+MEDIA_EXPORT extern const base::Feature kAutoplayIgnoreWebAudio;
MEDIA_EXPORT extern const base::Feature kAv1Decoder;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoTrackOptimization;
+MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
MEDIA_EXPORT extern const base::Feature kLowDelayVideoRenderingOnLiveStream;
MEDIA_EXPORT extern const base::Feature kMediaCastOverlayButton;
@@ -113,7 +117,6 @@ MEDIA_EXPORT extern const base::Feature kMediaEngagementBypassAutoplayPolicies;
MEDIA_EXPORT extern const base::Feature kMemoryPressureBasedSourceBufferGC;
MEDIA_EXPORT extern const base::Feature kMojoVideoDecoder;
MEDIA_EXPORT extern const base::Feature kMseBufferByPts;
-MEDIA_EXPORT extern const base::Feature kMseFlacInIsobmff;
MEDIA_EXPORT extern const base::Feature kNewAudioRenderingMixingStrategy;
MEDIA_EXPORT extern const base::Feature kNewEncodeCpuLoadEstimator;
MEDIA_EXPORT extern const base::Feature kNewRemotePlaybackPipeline;
@@ -124,11 +127,11 @@ MEDIA_EXPORT extern const base::Feature kPreloadMediaEngagementData;
MEDIA_EXPORT extern const base::Feature kPreloadMetadataSuspend;
MEDIA_EXPORT extern const base::Feature kResumeBackgroundVideo;
MEDIA_EXPORT extern const base::Feature kSpecCompliantCanPlayThrough;
-MEDIA_EXPORT extern const base::Feature kSupportExperimentalCdmInterface;
MEDIA_EXPORT extern const base::Feature kUseAndroidOverlay;
MEDIA_EXPORT extern const base::Feature kUseAndroidOverlayAggressively;
MEDIA_EXPORT extern const base::Feature kUseNewMediaCache;
MEDIA_EXPORT extern const base::Feature kUseR16Texture;
+MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVideoBlitColorAccuracy;
MEDIA_EXPORT extern const base::Feature kUnifiedAutoplay;
MEDIA_EXPORT extern const base::Feature kUseSurfaceLayerForVideo;
@@ -139,6 +142,7 @@ MEDIA_EXPORT extern const base::Feature kVideoFullscreenOrientationLock;
MEDIA_EXPORT extern const base::Feature kVideoRotateToFullscreen;
MEDIA_EXPORT extern const base::Feature kMediaDrmPersistentLicense;
MEDIA_EXPORT extern const base::Feature kCafMediaRouterImpl;
+MEDIA_EXPORT extern const base::Feature kAImageReaderVideoOutput;
#endif // defined(OS_ANDROID)
#if defined(OS_WIN)
diff --git a/chromium/media/base/media_url_demuxer.cc b/chromium/media/base/media_url_demuxer.cc
index afdd25c329f..d83213553cb 100644
--- a/chromium/media/base/media_url_demuxer.cc
+++ b/chromium/media/base/media_url_demuxer.cc
@@ -23,11 +23,6 @@ std::vector<DemuxerStream*> MediaUrlDemuxer::GetAllStreams() {
return std::vector<DemuxerStream*>();
}
-// Should never be called since MediaResource::Type is URL.
-void MediaUrlDemuxer::SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) {
- NOTREACHED();
-}
-
MediaUrlParams MediaUrlDemuxer::GetMediaUrlParams() const {
return params_;
}
@@ -75,9 +70,22 @@ int64_t MediaUrlDemuxer::GetMemoryUsage() const {
void MediaUrlDemuxer::OnEnabledAudioTracksChanged(
const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) {}
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ // TODO(tmathmeyer): potentially support track changes for this renderer.
+ std::vector<DemuxerStream*> streams;
+ std::move(change_completed_cb).Run(DemuxerStream::AUDIO, streams);
+ DLOG(WARNING) << "Track changes are not supported.";
+}
+
void MediaUrlDemuxer::OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id,
- base::TimeDelta curr_time) {}
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ // TODO(tmathmeyer): potentially support track changes for this renderer.
+ std::vector<DemuxerStream*> streams;
+ std::move(change_completed_cb).Run(DemuxerStream::VIDEO, streams);
+ DLOG(WARNING) << "Track changes are not supported.";
+}
} // namespace media
diff --git a/chromium/media/base/media_url_demuxer.h b/chromium/media/base/media_url_demuxer.h
index 15173cadf17..38527f2d5b4 100644
--- a/chromium/media/base/media_url_demuxer.h
+++ b/chromium/media/base/media_url_demuxer.h
@@ -40,7 +40,6 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
// MediaResource interface.
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
MediaUrlParams GetMediaUrlParams() const override;
MediaResource::Type GetType() const override;
@@ -58,10 +57,11 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
base::Time GetTimelineOffset() const override;
int64_t GetMemoryUsage() const override;
void OnEnabledAudioTracksChanged(const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) override;
- void OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id,
- base::TimeDelta curr_time) override;
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
+ void OnSelectedVideoTrackChanged(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
private:
MediaUrlParams params_;
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index 0c7aa01de00..22a0657b387 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -15,7 +15,7 @@
#include "media/base/video_codecs.h"
#include "media/base/video_color_space.h"
#include "media/media_buildflags.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
#if defined(OS_ANDROID)
#include "base/android/build_info.h"
@@ -147,6 +147,8 @@ AudioCodec MimeUtilToAudioCodec(MimeUtil::Codec codec) {
case MimeUtil::MPEG2_AAC:
case MimeUtil::MPEG4_AAC:
return kCodecAAC;
+ case MimeUtil::MPEG_H_AUDIO:
+ return kCodecMpegHAudio;
case MimeUtil::VORBIS:
return kCodecVorbis;
case MimeUtil::OPUS:
@@ -302,6 +304,10 @@ void MimeUtil::AddSupportedMediaFormats() {
mp4_audio_codecs.emplace(EAC3);
#endif // BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ mp4_audio_codecs.emplace(MPEG_H_AUDIO);
+#endif // BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+
mp4_video_codecs.emplace(H264);
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
mp4_video_codecs.emplace(HEVC);
@@ -568,6 +574,9 @@ bool MimeUtil::IsCodecSupportedOnAndroid(
DCHECK(!is_encrypted || platform_info.has_platform_decoders);
return true;
+ case MPEG_H_AUDIO:
+ return false;
+
case OPUS:
// If clear, the unified pipeline can always decode Opus in software.
if (!is_encrypted)
@@ -843,6 +852,13 @@ bool MimeUtil::ParseCodecHelper(const std::string& mime_type_lower_case,
}
#endif
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ if (base::StartsWith(codec_id, "mhm1.", base::CompareCase::SENSITIVE)) {
+ out_result->codec = MimeUtil::MPEG_H_AUDIO;
+ return true;
+ }
+#endif
+
DVLOG(2) << __func__ << ": Unrecognized codec id \"" << codec_id << "\"";
return false;
}
@@ -953,6 +969,7 @@ bool MimeUtil::IsCodecProprietary(Codec codec) const {
case INVALID_CODEC:
case AC3:
case EAC3:
+ case MPEG_H_AUDIO:
case MPEG2_AAC:
case MPEG4_AAC:
case H264:
diff --git a/chromium/media/base/mime_util_internal.h b/chromium/media/base/mime_util_internal.h
index 1f97545a523..a8be8bb3338 100644
--- a/chromium/media/base/mime_util_internal.h
+++ b/chromium/media/base/mime_util_internal.h
@@ -45,7 +45,8 @@ class MEDIA_EXPORT MimeUtil {
THEORA,
DOLBY_VISION,
AV1,
- LAST_CODEC = AV1
+ MPEG_H_AUDIO,
+ LAST_CODEC = MPEG_H_AUDIO
};
// Platform configuration structure. Controls which codecs are supported at
diff --git a/chromium/media/base/mime_util_unittest.cc b/chromium/media/base/mime_util_unittest.cc
index b625de0cbb3..53596fc0d64 100644
--- a/chromium/media/base/mime_util_unittest.cc
+++ b/chromium/media/base/mime_util_unittest.cc
@@ -326,10 +326,8 @@ TEST(MimeUtilTest, ParseAudioCodecString) {
// Valid FLAC string with MP4. Neither decoding nor demuxing is proprietary.
EXPECT_TRUE(ParseAudioCodecString("audio/mp4", "flac", &out_is_ambiguous,
&out_codec));
- if (kUsePropCodecs) {
- EXPECT_FALSE(out_is_ambiguous);
- EXPECT_EQ(kCodecFLAC, out_codec);
- }
+ EXPECT_FALSE(out_is_ambiguous);
+ EXPECT_EQ(kCodecFLAC, out_codec);
// Ambiguous AAC string.
// TODO(chcunningha): This can probably be allowed. I think we treat all
@@ -437,6 +435,7 @@ TEST(IsCodecSupportedOnAndroidTest, EncryptedCodecBehavior) {
// These codecs are never supported by the Android platform.
case MimeUtil::INVALID_CODEC:
case MimeUtil::AV1:
+ case MimeUtil::MPEG_H_AUDIO:
case MimeUtil::THEORA:
EXPECT_FALSE(result);
break;
@@ -495,6 +494,7 @@ TEST(IsCodecSupportedOnAndroidTest, ClearCodecBehavior) {
switch (codec) {
// These codecs are never supported by the Android platform.
case MimeUtil::INVALID_CODEC:
+ case MimeUtil::MPEG_H_AUDIO:
case MimeUtil::THEORA:
case MimeUtil::AV1:
EXPECT_FALSE(result);
diff --git a/chromium/media/base/mock_audio_renderer_sink.cc b/chromium/media/base/mock_audio_renderer_sink.cc
index b0d239a0f83..b35968196e8 100644
--- a/chromium/media/base/mock_audio_renderer_sink.cc
+++ b/chromium/media/base/mock_audio_renderer_sink.cc
@@ -19,7 +19,6 @@ MockAudioRendererSink::MockAudioRendererSink(const std::string& device_id,
AudioParameters(AudioParameters::AUDIO_FAKE,
CHANNEL_LAYOUT_STEREO,
AudioParameters::kTelephoneSampleRate,
- 16,
1)) {}
MockAudioRendererSink::MockAudioRendererSink(
@@ -32,7 +31,6 @@ MockAudioRendererSink::~MockAudioRendererSink() = default;
void MockAudioRendererSink::SwitchOutputDevice(
const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) {
// NB: output device won't be changed, since it's not required by any tests
// now.
diff --git a/chromium/media/base/mock_audio_renderer_sink.h b/chromium/media/base/mock_audio_renderer_sink.h
index 7ad1e64c37d..4551a2e76bc 100644
--- a/chromium/media/base/mock_audio_renderer_sink.h
+++ b/chromium/media/base/mock_audio_renderer_sink.h
@@ -31,12 +31,11 @@ class MockAudioRendererSink : public SwitchableAudioRendererSink {
MOCK_METHOD1(SetVolume, bool(double volume));
MOCK_METHOD0(CurrentThreadIsRenderingThread, bool());
- OutputDeviceInfo GetOutputDeviceInfo();
+ OutputDeviceInfo GetOutputDeviceInfo() override;
- bool IsOptimizedForHardwareParameters();
+ bool IsOptimizedForHardwareParameters() override;
void SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) override;
void Initialize(const AudioParameters& params,
RenderCallback* renderer) override;
diff --git a/chromium/media/base/mock_demuxer_host.h b/chromium/media/base/mock_demuxer_host.h
index 6ab7814ddd0..55329b78dc3 100644
--- a/chromium/media/base/mock_demuxer_host.h
+++ b/chromium/media/base/mock_demuxer_host.h
@@ -15,7 +15,7 @@ namespace media {
class MockDemuxerHost : public DemuxerHost {
public:
MockDemuxerHost();
- virtual ~MockDemuxerHost();
+ ~MockDemuxerHost() override;
MOCK_METHOD1(OnBufferedTimeRangesChanged,
void(const Ranges<base::TimeDelta>&));
diff --git a/chromium/media/base/mock_filters.cc b/chromium/media/base/mock_filters.cc
index d418b269397..f43b88194be 100644
--- a/chromium/media/base/mock_filters.cc
+++ b/chromium/media/base/mock_filters.cc
@@ -46,8 +46,7 @@ std::string MockDemuxer::GetDisplayName() const {
}
MockDemuxerStream::MockDemuxerStream(DemuxerStream::Type type)
- : type_(type), liveness_(LIVENESS_UNKNOWN) {
-}
+ : type_(type), liveness_(LIVENESS_UNKNOWN) {}
MockDemuxerStream::~MockDemuxerStream() = default;
@@ -195,42 +194,6 @@ MockCdm::MockCdm(const std::string& key_system,
MockCdm::~MockCdm() = default;
-void MockCdm::SetServerCertificate(const std::vector<uint8_t>& certificate,
- std::unique_ptr<SimpleCdmPromise> promise) {
- OnSetServerCertificate(certificate, promise);
-}
-
-void MockCdm::CreateSessionAndGenerateRequest(
- CdmSessionType session_type,
- EmeInitDataType init_data_type,
- const std::vector<uint8_t>& init_data,
- std::unique_ptr<NewSessionCdmPromise> promise) {
- OnCreateSessionAndGenerateRequest(session_type, init_data_type, init_data,
- promise);
-}
-
-void MockCdm::LoadSession(CdmSessionType session_type,
- const std::string& session_id,
- std::unique_ptr<NewSessionCdmPromise> promise) {
- OnLoadSession(session_type, session_id, promise);
-}
-
-void MockCdm::UpdateSession(const std::string& session_id,
- const std::vector<uint8_t>& response,
- std::unique_ptr<SimpleCdmPromise> promise) {
- OnUpdateSession(session_id, response, promise);
-}
-
-void MockCdm::CloseSession(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise> promise) {
- OnCloseSession(session_id, promise);
-}
-
-void MockCdm::RemoveSession(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise> promise) {
- OnRemoveSession(session_id, promise);
-}
-
void MockCdm::CallSessionMessageCB(const std::string& session_id,
CdmMessageType message_type,
const std::vector<uint8_t>& message) {
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 4cb961dcd4c..41f90d69b2e 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -68,7 +68,7 @@ class MockPipelineClient : public Pipeline::Client {
class MockPipeline : public Pipeline {
public:
MockPipeline();
- virtual ~MockPipeline();
+ ~MockPipeline() override;
// Note: Start() and Resume() declarations are not actually overrides; they
// take unique_ptr* instead of unique_ptr so that they can be mock methods.
@@ -87,11 +87,10 @@ class MockPipeline : public Pipeline {
void(std::unique_ptr<Renderer>*,
base::TimeDelta,
const PipelineStatusCB&));
-
- MOCK_METHOD1(OnEnabledAudioTracksChanged,
- void(const std::vector<MediaTrack::Id>&));
- MOCK_METHOD1(OnSelectedVideoTrackChanged,
- void(base::Optional<MediaTrack::Id>));
+ MOCK_METHOD2(OnEnabledAudioTracksChanged,
+ void(const std::vector<MediaTrack::Id>&, base::OnceClosure));
+ MOCK_METHOD2(OnSelectedVideoTrackChanged,
+ void(base::Optional<MediaTrack::Id>, base::OnceClosure));
// TODO(sandersd): This should automatically return true between Start() and
// Stop(). (Or better, remove it from the interface entirely.)
@@ -131,10 +130,10 @@ class MockPipeline : public Pipeline {
class MockDemuxer : public Demuxer {
public:
MockDemuxer();
- virtual ~MockDemuxer();
+ ~MockDemuxer() override;
// Demuxer implementation.
- virtual std::string GetDisplayName() const;
+ std::string GetDisplayName() const override;
MOCK_METHOD3(Initialize,
void(DemuxerHost* host, const PipelineStatusCB& cb, bool));
MOCK_METHOD1(StartWaitingForSeek, void(base::TimeDelta));
@@ -143,15 +142,18 @@ class MockDemuxer : public Demuxer {
MOCK_METHOD0(Stop, void());
MOCK_METHOD0(AbortPendingReads, void());
MOCK_METHOD0(GetAllStreams, std::vector<DemuxerStream*>());
- MOCK_METHOD1(SetStreamStatusChangeCB, void(const StreamStatusChangeCB& cb));
MOCK_CONST_METHOD0(GetStartTime, base::TimeDelta());
MOCK_CONST_METHOD0(GetTimelineOffset, base::Time());
MOCK_CONST_METHOD0(GetMemoryUsage, int64_t());
- MOCK_METHOD2(OnEnabledAudioTracksChanged,
- void(const std::vector<MediaTrack::Id>&, base::TimeDelta));
- MOCK_METHOD2(OnSelectedVideoTrackChanged,
- void(base::Optional<MediaTrack::Id>, base::TimeDelta));
+ MOCK_METHOD3(OnEnabledAudioTracksChanged,
+ void(const std::vector<MediaTrack::Id>&,
+ base::TimeDelta,
+ TrackChangeCB));
+ MOCK_METHOD3(OnSelectedVideoTrackChanged,
+ void(const std::vector<MediaTrack::Id>&,
+ base::TimeDelta,
+ TrackChangeCB));
private:
DISALLOW_COPY_AND_ASSIGN(MockDemuxer);
@@ -160,7 +162,7 @@ class MockDemuxer : public Demuxer {
class MockDemuxerStream : public DemuxerStream {
public:
explicit MockDemuxerStream(DemuxerStream::Type type);
- virtual ~MockDemuxerStream();
+ ~MockDemuxerStream() override;
// DemuxerStream implementation.
Type type() const override;
@@ -188,10 +190,10 @@ class MockVideoDecoder : public VideoDecoder {
public:
explicit MockVideoDecoder(
const std::string& decoder_name = "MockVideoDecoder");
- virtual ~MockVideoDecoder();
+ ~MockVideoDecoder() override;
// VideoDecoder implementation.
- virtual std::string GetDisplayName() const;
+ std::string GetDisplayName() const override;
MOCK_METHOD6(
Initialize,
void(const VideoDecoderConfig& config,
@@ -205,6 +207,7 @@ class MockVideoDecoder : public VideoDecoder {
MOCK_METHOD1(Reset, void(const base::Closure&));
MOCK_CONST_METHOD0(GetMaxDecodeRequests, int());
MOCK_CONST_METHOD0(CanReadWithoutStalling, bool());
+ MOCK_CONST_METHOD0(NeedsBitstreamConversion, bool());
private:
std::string decoder_name_;
@@ -215,10 +218,10 @@ class MockAudioDecoder : public AudioDecoder {
public:
explicit MockAudioDecoder(
const std::string& decoder_name = "MockAudioDecoder");
- virtual ~MockAudioDecoder();
+ ~MockAudioDecoder() override;
// AudioDecoder implementation.
- virtual std::string GetDisplayName() const;
+ std::string GetDisplayName() const override;
MOCK_METHOD5(
Initialize,
void(const AudioDecoderConfig& config,
@@ -256,7 +259,7 @@ class MockRendererClient : public RendererClient {
class MockVideoRenderer : public VideoRenderer {
public:
MockVideoRenderer();
- virtual ~MockVideoRenderer();
+ ~MockVideoRenderer() override;
// VideoRenderer implementation.
MOCK_METHOD5(Initialize,
@@ -277,7 +280,7 @@ class MockVideoRenderer : public VideoRenderer {
class MockAudioRenderer : public AudioRenderer {
public:
MockAudioRenderer();
- virtual ~MockAudioRenderer();
+ ~MockAudioRenderer() override;
// AudioRenderer implementation.
MOCK_METHOD4(Initialize,
@@ -297,7 +300,7 @@ class MockAudioRenderer : public AudioRenderer {
class MockRenderer : public Renderer {
public:
MockRenderer();
- virtual ~MockRenderer();
+ ~MockRenderer() override;
// Renderer implementation.
MOCK_METHOD3(Initialize,
@@ -314,6 +317,10 @@ class MockRenderer : public Renderer {
MOCK_METHOD2(SetCdm,
void(CdmContext* cdm_context,
const CdmAttachedCB& cdm_attached_cb));
+ MOCK_METHOD2(OnSelectedVideoTrackChanged,
+ void(std::vector<DemuxerStream*>, base::OnceClosure));
+ MOCK_METHOD2(OnSelectedAudioTracksChanged,
+ void(std::vector<DemuxerStream*>, base::OnceClosure));
private:
DISALLOW_COPY_AND_ASSIGN(MockRenderer);
@@ -322,7 +329,7 @@ class MockRenderer : public Renderer {
class MockTimeSource : public TimeSource {
public:
MockTimeSource();
- virtual ~MockTimeSource();
+ ~MockTimeSource() override;
// TimeSource implementation.
MOCK_METHOD0(StartTicking, void());
@@ -341,13 +348,14 @@ class MockTimeSource : public TimeSource {
class MockTextTrack : public TextTrack {
public:
MockTextTrack();
- virtual ~MockTextTrack();
+ ~MockTextTrack() override;
- MOCK_METHOD5(addWebVTTCue, void(const base::TimeDelta& start,
- const base::TimeDelta& end,
- const std::string& id,
- const std::string& content,
- const std::string& settings));
+ MOCK_METHOD5(addWebVTTCue,
+ void(base::TimeDelta start,
+ base::TimeDelta end,
+ const std::string& id,
+ const std::string& content,
+ const std::string& settings));
private:
DISALLOW_COPY_AND_ASSIGN(MockTextTrack);
@@ -367,8 +375,7 @@ class MockCdmClient {
const std::vector<uint8_t>& message));
MOCK_METHOD1(OnSessionClosed, void(const std::string& session_id));
- // MOCK methods don't work with move-only types like CdmKeysInfo. Add an extra
- // OnSessionKeysChangeCalled() function to work around this.
+ // Add OnSessionKeysChangeCalled() function so we can store |keys_info|.
MOCK_METHOD2(OnSessionKeysChangeCalled,
void(const std::string& session_id,
bool has_additional_usable_key));
@@ -391,10 +398,10 @@ class MockCdmClient {
class MockDecryptor : public Decryptor {
public:
MockDecryptor();
- virtual ~MockDecryptor();
+ ~MockDecryptor() override;
- MOCK_METHOD2(RegisterNewKeyCB, void(StreamType stream_type,
- const NewKeyCB& new_key_cb));
+ MOCK_METHOD2(RegisterNewKeyCB,
+ void(StreamType stream_type, const NewKeyCB& new_key_cb));
MOCK_METHOD3(Decrypt,
void(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
@@ -476,52 +483,28 @@ class MockCdm : public ContentDecryptionModule {
const SessionExpirationUpdateCB& session_expiration_update_cb);
// ContentDecryptionModule implementation.
- // As move-only parameters aren't supported by mock methods, convert promises
- // into IDs and pass them to On... methods.
- void SetServerCertificate(const std::vector<uint8_t>& certificate,
- std::unique_ptr<SimpleCdmPromise> promise) override;
- MOCK_METHOD2(OnSetServerCertificate,
+ MOCK_METHOD2(SetServerCertificate,
void(const std::vector<uint8_t>& certificate,
- std::unique_ptr<SimpleCdmPromise>& promise));
-
- void CreateSessionAndGenerateRequest(
- CdmSessionType session_type,
- EmeInitDataType init_data_type,
- const std::vector<uint8_t>& init_data,
- std::unique_ptr<NewSessionCdmPromise> promise) override;
- MOCK_METHOD4(OnCreateSessionAndGenerateRequest,
+ std::unique_ptr<SimpleCdmPromise> promise));
+ MOCK_METHOD4(CreateSessionAndGenerateRequest,
void(CdmSessionType session_type,
EmeInitDataType init_data_type,
const std::vector<uint8_t>& init_data,
- std::unique_ptr<NewSessionCdmPromise>& promise));
-
- void LoadSession(CdmSessionType session_type,
- const std::string& session_id,
- std::unique_ptr<NewSessionCdmPromise> promise) override;
- MOCK_METHOD3(OnLoadSession,
+ std::unique_ptr<NewSessionCdmPromise> promise));
+ MOCK_METHOD3(LoadSession,
void(CdmSessionType session_type,
const std::string& session_id,
- std::unique_ptr<NewSessionCdmPromise>& promise));
-
- void UpdateSession(const std::string& session_id,
- const std::vector<uint8_t>& response,
- std::unique_ptr<SimpleCdmPromise> promise) override;
- MOCK_METHOD3(OnUpdateSession,
+ std::unique_ptr<NewSessionCdmPromise> promise));
+ MOCK_METHOD3(UpdateSession,
void(const std::string& session_id,
const std::vector<uint8_t>& response,
- std::unique_ptr<SimpleCdmPromise>& promise));
-
- void CloseSession(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise> promise) override;
- MOCK_METHOD2(OnCloseSession,
+ std::unique_ptr<SimpleCdmPromise> promise));
+ MOCK_METHOD2(CloseSession,
void(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise>& promise));
-
- void RemoveSession(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise> promise) override;
- MOCK_METHOD2(OnRemoveSession,
+ std::unique_ptr<SimpleCdmPromise> promise));
+ MOCK_METHOD2(RemoveSession,
void(const std::string& session_id,
- std::unique_ptr<SimpleCdmPromise>& promise));
+ std::unique_ptr<SimpleCdmPromise> promise));
MOCK_METHOD0(GetCdmContext, CdmContext*());
diff --git a/chromium/media/base/mock_media_log.h b/chromium/media/base/mock_media_log.h
index ad4112971cb..e5b1383180a 100644
--- a/chromium/media/base/mock_media_log.h
+++ b/chromium/media/base/mock_media_log.h
@@ -16,10 +16,17 @@
// |outer| is the std::string searched for substring |sub|.
#define CONTAINS_STRING(outer, sub) (std::string::npos != (outer).find(sub))
-// "media_log_" is expected to be a MockMediaLog, optionally a NiceMock or
-// StrictMock, in scope of the usage of this macro.
+// Assumes |media_log_| is available which is a MockMediaLog, optionally a
+// NiceMock or StrictMock, in scope of the usage of this macro.
#define EXPECT_MEDIA_LOG(x) EXPECT_MEDIA_LOG_ON(media_log_, x)
+// Same as EXPECT_MEDIA_LOG, but for LIMITED_MEDIA_LOG.
+#define EXPECT_LIMITED_MEDIA_LOG(x, count, max) \
+ if (count < max) { \
+ EXPECT_MEDIA_LOG_ON(media_log_, x); \
+ count++; \
+ }
+
// |log| is expected to evaluate to a MockMediaLog, optionally a NiceMock or
// StrictMock, in scope of the usage of this macro.
#define EXPECT_MEDIA_LOG_ON(log, x) EXPECT_CALL((log), DoAddEventLogString((x)))
diff --git a/chromium/media/base/pipeline.h b/chromium/media/base/pipeline.h
index 91332efc3d9..0df4066d957 100644
--- a/chromium/media/base/pipeline.h
+++ b/chromium/media/base/pipeline.h
@@ -106,14 +106,43 @@ class MEDIA_EXPORT Pipeline {
Client* client,
const PipelineStatusCB& seek_cb) = 0;
+ // Track switching works similarly for both audio and video. Callbacks are
+ // used to notify when it is time to procede to the next step, since many of
+ // the operations are asynchronous.
+ // ──────────────────── Track Switch Control Flow ───────────────────────
+ // pipeline | demuxer | demuxer_stream | renderer | video/audio_renderer
+ // | | | |
+ // | | | |
+ // | | | |
+ // switch track | | |
+ // ---------> | | |
+ // | disable/enable stream | |
+ // | -----------> | |
+ // active streams | | |
+ // <--------- | | |
+ // | switch track | |
+ // --------------------------------------> |
+ // | | | Flush/Restart/Reset
+ // | | | --------------->
+ // Notify pipeline of completed track change (via callback)
+ // <-----------------------------------------------------
+ // ──────────────────── Sometime in the future ──────────────────────────
+ // | | | OnBufferingStateChange
+ // | | | <----------------
+ // | OnBufferingStateChange | |
+ // <-------------------------------------- |
+ // | | | |
+ // | | | |
// |enabled_track_ids| contains track ids of enabled audio tracks.
virtual void OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabled_track_ids) = 0;
+ const std::vector<MediaTrack::Id>& enabled_track_ids,
+ base::OnceClosure change_completed_cb) = 0;
// |selected_track_id| is either empty, which means no video track is
// selected, or contains the selected video track id.
virtual void OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id) = 0;
+ base::Optional<MediaTrack::Id> selected_track_id,
+ base::OnceClosure change_completed_cb) = 0;
// Stops the pipeline. This is a blocking function.
// If the pipeline is started, it must be stopped before destroying it.
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index 2194a60cc44..5776cc4abe0 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -71,12 +71,14 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
// |enabled_track_ids| contains track ids of enabled audio tracks.
void OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabled_track_ids);
+ const std::vector<MediaTrack::Id>& enabled_track_ids,
+ base::OnceClosure change_completed_cb);
// |selected_track_id| is either empty, which means no video track is
// selected, or contains the selected video track id.
void OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id);
+ base::Optional<MediaTrack::Id> selected_track_id,
+ base::OnceClosure change_completed_cb);
private:
// Contains state shared between main and media thread.
@@ -109,6 +111,13 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
base::TimeDelta suspend_timestamp = kNoTimestamp;
};
+ base::TimeDelta GetCurrentTimestamp();
+
+ void OnDemuxerCompletedTrackChange(
+ base::OnceClosure change_completed_cb,
+ DemuxerStream::Type stream_type,
+ const std::vector<DemuxerStream*>& streams);
+
// DemuxerHost implementaion.
void OnBufferedTimeRangesChanged(const Ranges<base::TimeDelta>& ranges) final;
void SetDuration(base::TimeDelta duration) final;
@@ -577,26 +586,30 @@ void PipelineImpl::RendererWrapper::OnEnded() {
CheckPlaybackEnded();
}
-void PipelineImpl::OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabled_track_ids) {
- DCHECK(thread_checker_.CalledOnValidThread());
- media_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&RendererWrapper::OnEnabledAudioTracksChanged,
- base::Unretained(renderer_wrapper_.get()), enabled_track_ids));
+// TODO(crbug/817089): Combine this functionality into renderer->GetMediaTime().
+base::TimeDelta PipelineImpl::RendererWrapper::GetCurrentTimestamp() {
+ DCHECK(demuxer_);
+ DCHECK(shared_state_.renderer || state_ != kPlaying);
+
+ return state_ == kPlaying ? shared_state_.renderer->GetMediaTime()
+ : demuxer_->GetStartTime();
}
-void PipelineImpl::OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id) {
+void PipelineImpl::OnEnabledAudioTracksChanged(
+ const std::vector<MediaTrack::Id>& enabled_track_ids,
+ base::OnceClosure change_completed_cb) {
DCHECK(thread_checker_.CalledOnValidThread());
media_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&RendererWrapper::OnSelectedVideoTrackChanged,
- base::Unretained(renderer_wrapper_.get()), selected_track_id));
+ base::BindOnce(&RendererWrapper::OnEnabledAudioTracksChanged,
+ base::Unretained(renderer_wrapper_.get()),
+ enabled_track_ids,
+ BindToCurrentLoop(std::move(change_completed_cb))));
}
void PipelineImpl::RendererWrapper::OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabled_track_ids) {
+ const std::vector<MediaTrack::Id>& enabled_track_ids,
+ base::OnceClosure change_completed_cb) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
// If the pipeline has been created, but not started yet, we may still receive
@@ -606,51 +619,86 @@ void PipelineImpl::RendererWrapper::OnEnabledAudioTracksChanged(
// status is in sync with blink after pipeline is started.
if (state_ == kCreated) {
DCHECK(!demuxer_);
+ std::move(change_completed_cb).Run();
return;
}
// Track status notifications might be delivered asynchronously. If we receive
// a notification when pipeline is stopped/shut down, it's safe to ignore it.
if (state_ == kStopping || state_ == kStopped) {
+ std::move(change_completed_cb).Run();
return;
}
+ demuxer_->OnEnabledAudioTracksChanged(
+ enabled_track_ids, GetCurrentTimestamp(),
+ base::BindOnce(&RendererWrapper::OnDemuxerCompletedTrackChange,
+ weak_this_, base::Passed(&change_completed_cb)));
+}
- DCHECK(demuxer_);
- DCHECK(shared_state_.renderer || (state_ != kPlaying));
-
- base::TimeDelta curr_time = (state_ == kPlaying)
- ? shared_state_.renderer->GetMediaTime()
- : demuxer_->GetStartTime();
- demuxer_->OnEnabledAudioTracksChanged(enabled_track_ids, curr_time);
+void PipelineImpl::OnSelectedVideoTrackChanged(
+ base::Optional<MediaTrack::Id> selected_track_id,
+ base::OnceClosure change_completed_cb) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&RendererWrapper::OnSelectedVideoTrackChanged,
+ base::Unretained(renderer_wrapper_.get()),
+ selected_track_id,
+ BindToCurrentLoop(std::move(change_completed_cb))));
}
void PipelineImpl::RendererWrapper::OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id) {
+ base::Optional<MediaTrack::Id> selected_track_id,
+ base::OnceClosure change_completed_cb) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
- // If the pipeline has been created, but not started yet, we may still receive
- // track notifications from blink level (e.g. when video track gets deselected
- // due to player/pipeline belonging to a background tab). We can safely ignore
- // these, since WebMediaPlayerImpl will ensure that demuxer stream / track
- // status is in sync with blink after pipeline is started.
+ // See RenderWrapper::OnEnabledAudioTracksChanged.
if (state_ == kCreated) {
DCHECK(!demuxer_);
+ std::move(change_completed_cb).Run();
return;
}
- // Track status notifications might be delivered asynchronously. If we receive
- // a notification when pipeline is stopped/shut down, it's safe to ignore it.
if (state_ == kStopping || state_ == kStopped) {
+ std::move(change_completed_cb).Run();
return;
}
- DCHECK(demuxer_);
- DCHECK(shared_state_.renderer || (state_ != kPlaying));
+ std::vector<MediaTrack::Id> tracks;
+ if (selected_track_id)
+ tracks.push_back(*selected_track_id);
+
+ demuxer_->OnSelectedVideoTrackChanged(
+ tracks, GetCurrentTimestamp(),
+ base::BindOnce(&RendererWrapper::OnDemuxerCompletedTrackChange,
+ weak_this_, base::Passed(&change_completed_cb)));
+}
- base::TimeDelta curr_time = (state_ == kPlaying)
- ? shared_state_.renderer->GetMediaTime()
- : demuxer_->GetStartTime();
- demuxer_->OnSelectedVideoTrackChanged(selected_track_id, curr_time);
+void PipelineImpl::RendererWrapper::OnDemuxerCompletedTrackChange(
+ base::OnceClosure change_completed_cb,
+ DemuxerStream::Type stream_type,
+ const std::vector<DemuxerStream*>& streams) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ if (!shared_state_.renderer) {
+ // This can happen if the pipeline has been suspended.
+ std::move(change_completed_cb).Run();
+ return;
+ }
+
+ switch (stream_type) {
+ case DemuxerStream::AUDIO:
+ shared_state_.renderer->OnEnabledAudioTracksChanged(
+ streams, std::move(change_completed_cb));
+ break;
+ case DemuxerStream::VIDEO:
+ shared_state_.renderer->OnSelectedVideoTracksChanged(
+ streams, std::move(change_completed_cb));
+ break;
+ // TODO(tmathmeyer): Look into text track switching.
+ case DemuxerStream::TEXT:
+ case DemuxerStream::UNKNOWN: // Fail on unknown type.
+ NOTREACHED();
+ }
}
void PipelineImpl::RendererWrapper::OnStatisticsUpdate(
diff --git a/chromium/media/base/pipeline_impl.h b/chromium/media/base/pipeline_impl.h
index 9171ef5e14d..543f768b7b9 100644
--- a/chromium/media/base/pipeline_impl.h
+++ b/chromium/media/base/pipeline_impl.h
@@ -35,9 +35,13 @@ class MediaLog;
// | |
// V V
// [ Playing ] <---------. [ Stopped ]
-// | | Seek() |
-// | V |
-// | [ Seeking ] ----'
+// | | | Seek() |
+// | | V |
+// | | [ Seeking ] ---'
+// | | ^
+// | | *TrackChange() |
+// | V |
+// | [ Switching ] ----'
// | ^
// | Suspend() |
// V |
@@ -99,12 +103,14 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
// |enabled_track_ids| contains track ids of enabled audio tracks.
void OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabled_track_ids) override;
+ const std::vector<MediaTrack::Id>& enabled_track_ids,
+ base::OnceClosure change_completed_cb) override;
// |selected_track_id| is either empty, which means no video track is
// selected, or contains the selected video track id.
void OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> selected_track_id) override;
+ base::Optional<MediaTrack::Id> selected_track_id,
+ base::OnceClosure change_completed_cb) override;
private:
friend class MediaLog;
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index 9d1f0fe4e3d..07229dbfd81 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -123,7 +123,7 @@ class PipelineImplTest : public ::testing::Test {
EXPECT_CALL(*demuxer_, GetStartTime()).WillRepeatedly(Return(start_time_));
}
- virtual ~PipelineImplTest() {
+ ~PipelineImplTest() override {
if (pipeline_->IsRunning()) {
ExpectDemuxerStop();
diff --git a/chromium/media/base/renderer.cc b/chromium/media/base/renderer.cc
index f348d75de56..c2e1736fdc0 100644
--- a/chromium/media/base/renderer.cc
+++ b/chromium/media/base/renderer.cc
@@ -10,4 +10,18 @@ Renderer::Renderer() = default;
Renderer::~Renderer() = default;
+void Renderer::OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ std::move(change_completed_cb).Run();
+ DLOG(WARNING) << "Track changes are not supported.";
+}
+
+void Renderer::OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ std::move(change_completed_cb).Run();
+ DLOG(WARNING) << "Track changes are not supported.";
+}
+
} // namespace media
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index 7b460d93278..075ae48ff90 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -11,6 +11,7 @@
#include "base/time/time.h"
#include "media/base/buffering_state.h"
#include "media/base/cdm_context.h"
+#include "media/base/demuxer_stream.h"
#include "media/base/media_export.h"
#include "media/base/pipeline_status.h"
@@ -57,6 +58,18 @@ class MEDIA_EXPORT Renderer {
// Returns the current media time.
virtual base::TimeDelta GetMediaTime() = 0;
+ // Provides a list of DemuxerStreams correlating to the tracks which should
+ // be played. An empty list would mean that any playing track of the same
+ // type should be flushed and disabled. Any provided Streams should be played
+ // by whatever mechanism the subclass of Renderer choses for managing it's AV
+ // playback.
+ virtual void OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb);
+ virtual void OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb);
+
private:
DISALLOW_COPY_AND_ASSIGN(Renderer);
};
diff --git a/chromium/media/base/renderer_factory_selector.cc b/chromium/media/base/renderer_factory_selector.cc
index 4e506122e92..ef2b579cca9 100644
--- a/chromium/media/base/renderer_factory_selector.cc
+++ b/chromium/media/base/renderer_factory_selector.cc
@@ -35,6 +35,9 @@ RendererFactory* RendererFactorySelector::GetCurrentFactory() {
if (query_is_remoting_active_cb_ && query_is_remoting_active_cb_.Run())
next_factory_type = FactoryType::COURIER;
+ if (query_is_flinging_active_cb_ && query_is_flinging_active_cb_.Run())
+ next_factory_type = FactoryType::FLINGING;
+
DVLOG(1) << __func__ << " Selecting factory type: " << next_factory_type;
RendererFactory* current_factory = factories_[next_factory_type].get();
@@ -56,4 +59,10 @@ void RendererFactorySelector::SetQueryIsRemotingActiveCB(
query_is_remoting_active_cb_ = query_is_remoting_active_cb;
}
+void RendererFactorySelector::SetQueryIsFlingingActiveCB(
+ QueryIsFlingingActiveCB query_is_flinging_active_cb) {
+ DCHECK(!query_is_flinging_active_cb_);
+ query_is_flinging_active_cb_ = query_is_flinging_active_cb;
+}
+
} // namespace media
diff --git a/chromium/media/base/renderer_factory_selector.h b/chromium/media/base/renderer_factory_selector.h
index c57943483a1..6d929b1d4c7 100644
--- a/chromium/media/base/renderer_factory_selector.h
+++ b/chromium/media/base/renderer_factory_selector.h
@@ -17,13 +17,15 @@ namespace media {
class MEDIA_EXPORT RendererFactorySelector {
public:
using QueryIsRemotingActiveCB = base::Callback<bool()>;
+ using QueryIsFlingingActiveCB = base::Callback<bool()>;
enum FactoryType {
DEFAULT, // DefaultRendererFactory.
MOJO, // MojoRendererFactory.
MEDIA_PLAYER, // MediaPlayerRendererClientFactory.
COURIER, // CourierRendererFactory.
- FACTORY_TYPE_MAX = COURIER,
+ FLINGING, // FlingingRendererClientFactory
+ FACTORY_TYPE_MAX = FLINGING,
};
RendererFactorySelector();
@@ -54,10 +56,16 @@ class MEDIA_EXPORT RendererFactorySelector {
void SetQueryIsRemotingActiveCB(
QueryIsRemotingActiveCB query_is_remoting_active_cb);
+ // Sets the callback to query whether we are currently flinging media, and if
+ // we should temporarily use the FLINGING factory.
+ void SetQueryIsFlingingActiveCB(
+ QueryIsFlingingActiveCB query_is_flinging_active_cb);
+
private:
bool use_media_player_ = false;
QueryIsRemotingActiveCB query_is_remoting_active_cb_;
+ QueryIsFlingingActiveCB query_is_flinging_active_cb_;
base::Optional<FactoryType> base_factory_type_;
std::unique_ptr<RendererFactory> factories_[FACTORY_TYPE_MAX + 1];
diff --git a/chromium/media/base/sample_format.cc b/chromium/media/base/sample_format.cc
index e6141a69b36..97492ccf418 100644
--- a/chromium/media/base/sample_format.cc
+++ b/chromium/media/base/sample_format.cc
@@ -15,6 +15,7 @@ int SampleFormatToBytesPerChannel(SampleFormat sample_format) {
case kSampleFormatU8:
case kSampleFormatAc3:
case kSampleFormatEac3:
+ case kSampleFormatMpegHAudio:
return 1;
case kSampleFormatS16:
case kSampleFormatPlanarS16:
@@ -31,6 +32,10 @@ int SampleFormatToBytesPerChannel(SampleFormat sample_format) {
return 0;
}
+int SampleFormatToBitsPerChannel(SampleFormat sample_format) {
+ return SampleFormatToBytesPerChannel(sample_format) * 8;
+}
+
const char* SampleFormatToString(SampleFormat sample_format) {
switch(sample_format) {
case kUnknownSampleFormat:
@@ -55,6 +60,8 @@ const char* SampleFormatToString(SampleFormat sample_format) {
return "Compressed AC3 bitstream";
case kSampleFormatEac3:
return "Compressed E-AC3 bitstream";
+ case kSampleFormatMpegHAudio:
+ return "Compressed MPEG-H audio bitstream";
}
NOTREACHED() << "Invalid sample format provided: " << sample_format;
return "";
@@ -74,6 +81,7 @@ bool IsPlanar(SampleFormat sample_format) {
case kSampleFormatF32:
case kSampleFormatAc3:
case kSampleFormatEac3:
+ case kSampleFormatMpegHAudio:
return false;
}
@@ -90,6 +98,7 @@ bool IsInterleaved(SampleFormat sample_format) {
case kSampleFormatF32:
case kSampleFormatAc3:
case kSampleFormatEac3:
+ case kSampleFormatMpegHAudio:
return true;
case kUnknownSampleFormat:
case kSampleFormatPlanarS16:
@@ -106,6 +115,7 @@ bool IsBitstream(SampleFormat sample_format) {
switch (sample_format) {
case kSampleFormatAc3:
case kSampleFormatEac3:
+ case kSampleFormatMpegHAudio:
return true;
case kUnknownSampleFormat:
case kSampleFormatU8:
diff --git a/chromium/media/base/sample_format.h b/chromium/media/base/sample_format.h
index 121b98310f3..6951ad6600d 100644
--- a/chromium/media/base/sample_format.h
+++ b/chromium/media/base/sample_format.h
@@ -5,7 +5,7 @@
#ifndef MEDIA_BASE_SAMPLE_FORMAT_H_
#define MEDIA_BASE_SAMPLE_FORMAT_H_
-#include "media/base/media_export.h"
+#include "media/base/media_shmem_export.h"
namespace media {
@@ -15,36 +15,39 @@ enum SampleFormat {
// adding a sample format, do so at the bottom before kSampleFormatMax, and
// update the value of kSampleFormatMax.
kUnknownSampleFormat = 0,
- kSampleFormatU8, // Unsigned 8-bit w/ bias of 128.
- kSampleFormatS16, // Signed 16-bit.
- kSampleFormatS32, // Signed 32-bit.
- kSampleFormatF32, // Float 32-bit.
- kSampleFormatPlanarS16, // Signed 16-bit planar.
- kSampleFormatPlanarF32, // Float 32-bit planar.
- kSampleFormatPlanarS32, // Signed 32-bit planar.
- kSampleFormatS24, // Signed 24-bit.
- kSampleFormatAc3, // Compressed AC3 bitstream.
- kSampleFormatEac3, // Compressed E-AC3 bitstream.
+ kSampleFormatU8, // Unsigned 8-bit w/ bias of 128.
+ kSampleFormatS16, // Signed 16-bit.
+ kSampleFormatS32, // Signed 32-bit.
+ kSampleFormatF32, // Float 32-bit.
+ kSampleFormatPlanarS16, // Signed 16-bit planar.
+ kSampleFormatPlanarF32, // Float 32-bit planar.
+ kSampleFormatPlanarS32, // Signed 32-bit planar.
+ kSampleFormatS24, // Signed 24-bit.
+ kSampleFormatAc3, // Compressed AC3 bitstream.
+ kSampleFormatEac3, // Compressed E-AC3 bitstream.
+ kSampleFormatMpegHAudio, // Compressed MPEG-H audio bitstream.
// Must always be equal to largest value ever logged.
- kSampleFormatMax = kSampleFormatEac3,
+ kSampleFormatMax = kSampleFormatMpegHAudio,
};
// Returns the number of bytes used per channel for the specified
// |sample_format|.
-MEDIA_EXPORT int SampleFormatToBytesPerChannel(SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT int SampleFormatToBytesPerChannel(
+ SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT int SampleFormatToBitsPerChannel(SampleFormat sample_format);
// Returns the name of the sample format as a string
-MEDIA_EXPORT const char* SampleFormatToString(SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT const char* SampleFormatToString(SampleFormat sample_format);
// Returns true if |sample_format| is planar, false otherwise.
-MEDIA_EXPORT bool IsPlanar(SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT bool IsPlanar(SampleFormat sample_format);
// Returns true if |sample_format| is interleaved, false otherwise.
-MEDIA_EXPORT bool IsInterleaved(SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT bool IsInterleaved(SampleFormat sample_format);
// Returns true if |sample_format| is compressed bitstream, false otherwise.
-MEDIA_EXPORT bool IsBitstream(SampleFormat sample_format);
+MEDIA_SHMEM_EXPORT bool IsBitstream(SampleFormat sample_format);
} // namespace media
diff --git a/chromium/media/base/silent_sink_suspender_unittest.cc b/chromium/media/base/silent_sink_suspender_unittest.cc
index 50a7c13a2f7..1e0c2c9e783 100644
--- a/chromium/media/base/silent_sink_suspender_unittest.cc
+++ b/chromium/media/base/silent_sink_suspender_unittest.cc
@@ -22,7 +22,6 @@ class SilentSinkSuspenderTest : public testing::Test {
: params_(AudioParameters::AUDIO_FAKE,
CHANNEL_LAYOUT_MONO,
44100,
- 8,
128),
mock_sink_(new testing::StrictMock<MockAudioRendererSink>()),
fake_callback_(0.1, params_.sample_rate()),
diff --git a/chromium/media/base/subsample_entry.cc b/chromium/media/base/subsample_entry.cc
new file mode 100644
index 00000000000..85975ce2005
--- /dev/null
+++ b/chromium/media/base/subsample_entry.cc
@@ -0,0 +1,29 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/subsample_entry.h"
+
+#include "base/logging.h"
+#include "base/numerics/safe_math.h"
+
+namespace media {
+
+bool VerifySubsamplesMatchSize(const std::vector<SubsampleEntry>& subsamples,
+ size_t input_size) {
+ base::CheckedNumeric<size_t> total_size = 0;
+ for (const auto& subsample : subsamples) {
+ // Add each entry separately to avoid the compiler doing the wrong thing.
+ total_size += subsample.clear_bytes;
+ total_size += subsample.cypher_bytes;
+ }
+
+ if (!total_size.IsValid() || total_size.ValueOrDie() != input_size) {
+ DVLOG(1) << "Subsample sizes do not equal input size";
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/base/subsample_entry.h b/chromium/media/base/subsample_entry.h
index 07c5eee5ab3..48f8ea70b7f 100644
--- a/chromium/media/base/subsample_entry.h
+++ b/chromium/media/base/subsample_entry.h
@@ -7,6 +7,10 @@
#include <stdint.h>
+#include <vector>
+
+#include "media/base/media_export.h"
+
namespace media {
// The Common Encryption spec provides for subsample encryption, where portions
@@ -26,6 +30,13 @@ struct SubsampleEntry {
uint32_t cypher_bytes;
};
+// Verifies that |subsamples| correctly specifies a buffer of length
+// |input_size|. Returns false if the total of bytes specified in |subsamples|
+// does not match |input_size|.
+MEDIA_EXPORT bool VerifySubsamplesMatchSize(
+ const std::vector<SubsampleEntry>& subsamples,
+ size_t input_size);
+
} // namespace media
#endif // MEDIA_BASE_SUBSAMPLE_ENTRY_H_
diff --git a/chromium/media/base/subsample_entry_unittest.cc b/chromium/media/base/subsample_entry_unittest.cc
new file mode 100644
index 00000000000..84b0d28c6c0
--- /dev/null
+++ b/chromium/media/base/subsample_entry_unittest.cc
@@ -0,0 +1,38 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/subsample_entry.h"
+
+#include <limits>
+
+#include "base/numerics/safe_conversions.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+constexpr uint32_t kMax = std::numeric_limits<uint32_t>::max();
+
+TEST(SubsampleEntryTest, NoEntries) {
+ EXPECT_TRUE(VerifySubsamplesMatchSize({}, 0));
+ EXPECT_FALSE(VerifySubsamplesMatchSize({}, 100));
+}
+
+TEST(SubsampleEntryTest, OneEntry) {
+ EXPECT_TRUE(VerifySubsamplesMatchSize({{0, 50}}, 50));
+ EXPECT_TRUE(VerifySubsamplesMatchSize({{100, 00}}, 100));
+ EXPECT_TRUE(VerifySubsamplesMatchSize({{150, 200}}, 350));
+}
+
+TEST(SubsampleEntryTest, MultipleEntries) {
+ EXPECT_TRUE(VerifySubsamplesMatchSize({{0, 50}, {100, 00}, {150, 200}}, 500));
+}
+
+TEST(SubsampleEntryTest, NoOverflow) {
+ EXPECT_TRUE(
+ VerifySubsamplesMatchSize({{kMax, 0}}, base::strict_cast<size_t>(kMax)));
+ EXPECT_TRUE(
+ VerifySubsamplesMatchSize({{0, kMax}}, base::strict_cast<size_t>(kMax)));
+}
+
+} // namespace media
diff --git a/chromium/media/base/test_data_util.cc b/chromium/media/base/test_data_util.cc
index b62990d721d..6ca9aa9fe3a 100644
--- a/chromium/media/base/test_data_util.cc
+++ b/chromium/media/base/test_data_util.cc
@@ -43,7 +43,7 @@ const base::FilePath::CharType kTestDataPath[] =
base::FilePath GetTestDataFilePath(const std::string& name) {
base::FilePath file_path;
- CHECK(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
+ CHECK(base::PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
return file_path.Append(GetTestDataPath()).AppendASCII(name);
}
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index 01599dde247..36e6965ee62 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -126,6 +126,7 @@ void WaitableMessageLoopEvent::OnTimeout() {
}
static VideoDecoderConfig GetTestConfig(VideoCodec codec,
+ VideoCodecProfile config,
VideoRotation rotation,
gfx::Size coded_size,
bool is_encrypted) {
@@ -133,8 +134,8 @@ static VideoDecoderConfig GetTestConfig(VideoCodec codec,
gfx::Size natural_size = coded_size;
return VideoDecoderConfig(
- codec, VIDEO_CODEC_PROFILE_UNKNOWN, PIXEL_FORMAT_I420, COLOR_SPACE_JPEG,
- rotation, coded_size, visible_rect, natural_size, EmptyExtraData(),
+ codec, config, PIXEL_FORMAT_I420, COLOR_SPACE_JPEG, rotation, coded_size,
+ visible_rect, natural_size, EmptyExtraData(),
is_encrypted ? AesCtrEncryptionScheme() : Unencrypted());
}
@@ -143,38 +144,44 @@ static const gfx::Size kLargeSize(640, 480);
// static
VideoDecoderConfig TestVideoConfig::Invalid() {
- return GetTestConfig(kUnknownVideoCodec, VIDEO_ROTATION_0, kNormalSize,
- false);
+ return GetTestConfig(kUnknownVideoCodec, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VIDEO_ROTATION_0, kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::Normal(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_ROTATION_0, kNormalSize, false);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
+ kNormalSize, false);
}
// static
-VideoDecoderConfig TestVideoConfig::NormalH264() {
- return GetTestConfig(kCodecH264, VIDEO_ROTATION_0, kNormalSize, false);
+VideoDecoderConfig TestVideoConfig::NormalH264(VideoCodecProfile config) {
+ return GetTestConfig(kCodecH264, config, VIDEO_ROTATION_0, kNormalSize,
+ false);
}
// static
VideoDecoderConfig TestVideoConfig::NormalEncrypted(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_ROTATION_0, kNormalSize, true);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
+ kNormalSize, true);
}
// static
VideoDecoderConfig TestVideoConfig::NormalRotated(VideoRotation rotation) {
- return GetTestConfig(kCodecVP8, rotation, kNormalSize, false);
+ return GetTestConfig(kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN, rotation,
+ kNormalSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::Large(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_ROTATION_0, kLargeSize, false);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
+ kLargeSize, false);
}
// static
VideoDecoderConfig TestVideoConfig::LargeEncrypted(VideoCodec codec) {
- return GetTestConfig(codec, VIDEO_ROTATION_0, kLargeSize, true);
+ return GetTestConfig(codec, VIDEO_CODEC_PROFILE_UNKNOWN, VIDEO_ROTATION_0,
+ kLargeSize, true);
}
// static
@@ -196,7 +203,7 @@ AudioDecoderConfig TestAudioConfig::Normal() {
// static
AudioParameters TestAudioParameters::Normal() {
return AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, 48000, 16, 2048);
+ CHANNEL_LAYOUT_STEREO, 48000, 2048);
}
template <class T>
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index 10031546fd7..ff34a05abcd 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -88,7 +88,8 @@ class TestVideoConfig {
static VideoDecoderConfig Invalid();
static VideoDecoderConfig Normal(VideoCodec codec = kCodecVP8);
- static VideoDecoderConfig NormalH264();
+ static VideoDecoderConfig NormalH264(
+ VideoCodecProfile = VIDEO_CODEC_PROFILE_UNKNOWN);
static VideoDecoderConfig NormalEncrypted(VideoCodec codec = kCodecVP8);
static VideoDecoderConfig NormalRotated(VideoRotation rotation);
@@ -330,6 +331,12 @@ MATCHER_P2(SkippingSpliceTooLittleOverlap,
"result in loss of A/V sync.");
}
+// Prefer WebMSimpleBlockDurationEstimated over this matcher, unless the actual
+// estimated duration value is unimportant to the test.
+MATCHER(WebMSimpleBlockDurationEstimatedAny, "") {
+ return CONTAINS_STRING(arg, "Estimating WebM block duration=");
+}
+
MATCHER_P(WebMSimpleBlockDurationEstimated, estimated_duration_ms, "") {
return CONTAINS_STRING(arg, "Estimating WebM block duration=" +
base::IntToString(estimated_duration_ms));
diff --git a/chromium/media/base/text_renderer_unittest.cc b/chromium/media/base/text_renderer_unittest.cc
index 4190174556c..e6348ea884f 100644
--- a/chromium/media/base/text_renderer_unittest.cc
+++ b/chromium/media/base/text_renderer_unittest.cc
@@ -29,20 +29,16 @@ namespace media {
// Local implementation of the TextTrack interface.
class FakeTextTrack : public TextTrack {
public:
- FakeTextTrack(const base::Closure& destroy_cb,
- const TextTrackConfig& config)
- : destroy_cb_(destroy_cb),
- config_(config) {
- }
- virtual ~FakeTextTrack() {
- destroy_cb_.Run();
- }
+ FakeTextTrack(const base::Closure& destroy_cb, const TextTrackConfig& config)
+ : destroy_cb_(destroy_cb), config_(config) {}
+ ~FakeTextTrack() override { destroy_cb_.Run(); }
- MOCK_METHOD5(addWebVTTCue, void(const base::TimeDelta& start,
- const base::TimeDelta& end,
- const std::string& id,
- const std::string& content,
- const std::string& settings));
+ MOCK_METHOD5(addWebVTTCue,
+ void(base::TimeDelta start,
+ base::TimeDelta end,
+ const std::string& id,
+ const std::string& content,
+ const std::string& settings));
const base::Closure destroy_cb_;
const TextTrackConfig config_;
@@ -58,12 +54,11 @@ class TextRendererTest : public testing::Test {
void CreateTextRenderer() {
DCHECK(!text_renderer_);
- text_renderer_.reset(
- new TextRenderer(message_loop_.task_runner(),
- base::Bind(&TextRendererTest::OnAddTextTrack,
- base::Unretained(this))));
- text_renderer_->Initialize(base::Bind(&TextRendererTest::OnEnd,
- base::Unretained(this)));
+ text_renderer_.reset(new TextRenderer(
+ message_loop_.task_runner(),
+ base::Bind(&TextRendererTest::OnAddTextTrack, base::Unretained(this))));
+ text_renderer_->Initialize(
+ base::Bind(&TextRendererTest::OnEnd, base::Unretained(this)));
}
void Destroy() {
@@ -95,8 +90,7 @@ class TextRendererTest : public testing::Test {
const AddTextTrackDoneCB& done_cb) {
base::Closure destroy_cb =
base::Bind(&TextRendererTest::OnDestroyTextTrack,
- base::Unretained(this),
- text_tracks_.size());
+ base::Unretained(this), text_tracks_.size());
// Text track objects are owned by the text renderer, but we cache them
// here so we can inspect them. They get removed from our cache when the
// text renderer deallocates them.
@@ -111,8 +105,8 @@ class TextRendererTest : public testing::Test {
EXPECT_FALSE(text_tracks_[idx]);
}
- void SatisfyPendingReads(const base::TimeDelta& start,
- const base::TimeDelta& duration,
+ void SatisfyPendingReads(base::TimeDelta start,
+ base::TimeDelta duration,
const std::string& id,
const std::string& content,
const std::string& settings) {
@@ -157,11 +151,8 @@ class TextRendererTest : public testing::Test {
if (expect_cue) {
FakeTextTrack* const text_track = text_tracks_[idx];
- EXPECT_CALL(*text_track, addWebVTTCue(start,
- start + duration,
- id,
- content,
- settings));
+ EXPECT_CALL(*text_track,
+ addWebVTTCue(start, start + duration, id, content, settings));
}
text_stream->SatisfyPendingRead(start, duration, id, content, settings);
@@ -174,24 +165,20 @@ class TextRendererTest : public testing::Test {
}
}
- void OnDestroyTextTrack(unsigned idx) {
- text_tracks_[idx] = NULL;
- }
+ void OnDestroyTextTrack(unsigned idx) { text_tracks_[idx] = NULL; }
- void Play() {
- text_renderer_->StartPlaying();
- }
+ void Play() { text_renderer_->StartPlaying(); }
void Pause() {
- text_renderer_->Pause(base::Bind(&TextRendererTest::OnPause,
- base::Unretained(this)));
+ text_renderer_->Pause(
+ base::Bind(&TextRendererTest::OnPause, base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
void Flush() {
EXPECT_CALL(*this, OnFlush());
- text_renderer_->Flush(base::Bind(&TextRendererTest::OnFlush,
- base::Unretained(this)));
+ text_renderer_->Flush(
+ base::Bind(&TextRendererTest::OnFlush, base::Unretained(this)));
}
void ExpectRead(size_t idx) {
@@ -218,10 +205,9 @@ class TextRendererTest : public testing::Test {
};
TEST_F(TextRendererTest, CreateTextRendererNoInit) {
- text_renderer_.reset(
- new TextRenderer(message_loop_.task_runner(),
- base::Bind(&TextRendererTest::OnAddTextTrack,
- base::Unretained(this))));
+ text_renderer_.reset(new TextRenderer(
+ message_loop_.task_runner(),
+ base::Bind(&TextRendererTest::OnAddTextTrack, base::Unretained(this))));
text_renderer_.reset();
}
@@ -744,7 +730,6 @@ TEST_F(TextRendererTest, PlayPauseRemove_SplitCancel) {
EXPECT_FALSE(text_renderer_->HasTracks());
}
-
TEST_F(TextRendererTest, PlayPauseRemove_PauseLast) {
CreateTextRenderer();
AddTextTrack(kTextSubtitles, "1", "", true);
diff --git a/chromium/media/base/text_track.h b/chromium/media/base/text_track.h
index 03e52cf67cb..f7ee5ee36d6 100644
--- a/chromium/media/base/text_track.h
+++ b/chromium/media/base/text_track.h
@@ -18,8 +18,8 @@ class TextTrackConfig;
class TextTrack {
public:
virtual ~TextTrack() {}
- virtual void addWebVTTCue(const base::TimeDelta& start,
- const base::TimeDelta& end,
+ virtual void addWebVTTCue(base::TimeDelta start,
+ base::TimeDelta end,
const std::string& id,
const std::string& content,
const std::string& settings) = 0;
diff --git a/chromium/media/base/unaligned_shared_memory.cc b/chromium/media/base/unaligned_shared_memory.cc
new file mode 100644
index 00000000000..e84bef4273d
--- /dev/null
+++ b/chromium/media/base/unaligned_shared_memory.cc
@@ -0,0 +1,60 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/unaligned_shared_memory.h"
+
+#include <limits>
+
+#include "base/logging.h"
+#include "base/sys_info.h"
+
+namespace media {
+
+UnalignedSharedMemory::UnalignedSharedMemory(
+ const base::SharedMemoryHandle& handle,
+ bool read_only)
+ : shm_(handle, read_only), misalignment_(0) {}
+
+UnalignedSharedMemory::~UnalignedSharedMemory() = default;
+
+bool UnalignedSharedMemory::MapAt(off_t offset, size_t size) {
+ if (offset < 0) {
+ DLOG(ERROR) << "Invalid offset";
+ return false;
+ }
+
+ /* | | | | | | shm pages
+ * | offset (may exceed max size_t)
+ * |-----------| size
+ * |-| misalignment
+ * | adjusted offset
+ * |-------------| requested mapping
+ */
+ // Note: result of % computation may be off_t or size_t, depending on the
+ // relative ranks of those types. In any case we assume that
+ // VMAllocationGranularity() fits in both types, so the final result does too.
+ size_t misalignment = offset % base::SysInfo::VMAllocationGranularity();
+
+ // Above this |size|, |size| + |misalignment| overflows.
+ size_t max_size = std::numeric_limits<size_t>::max() - misalignment;
+ if (size > max_size) {
+ DLOG(ERROR) << "Invalid size";
+ return false;
+ }
+
+ off_t adjusted_offset = offset - static_cast<off_t>(misalignment);
+ if (!shm_.MapAt(adjusted_offset, size + misalignment)) {
+ DLOG(ERROR) << "Failed to map shared memory";
+ return false;
+ }
+
+ misalignment_ = misalignment;
+ return true;
+}
+
+void* UnalignedSharedMemory::memory() const {
+ return static_cast<uint8_t*>(shm_.memory()) + misalignment_;
+}
+
+} // namespace media
diff --git a/chromium/media/base/unaligned_shared_memory.h b/chromium/media/base/unaligned_shared_memory.h
new file mode 100644
index 00000000000..0f2731c9f65
--- /dev/null
+++ b/chromium/media/base/unaligned_shared_memory.h
@@ -0,0 +1,37 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_UNALIGNED_SHARED_MEMORY_H_
+#define MEDIA_BASE_UNALIGNED_SHARED_MEMORY_H_
+
+#include <stdint.h>
+
+#include "base/macros.h"
+#include "base/memory/shared_memory.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Wrapper over base::SharedMemory that can be mapped at unaligned offsets.
+class MEDIA_EXPORT UnalignedSharedMemory {
+ public:
+ UnalignedSharedMemory(const base::SharedMemoryHandle& handle, bool read_only);
+ ~UnalignedSharedMemory();
+
+ bool MapAt(off_t offset, size_t size);
+ void* memory() const;
+
+ private:
+ base::SharedMemory shm_;
+
+ // Offset withing |shm_| memory that data has been mapped; strictly less than
+ // base::SysInfo::VMAllocationGranularity().
+ size_t misalignment_;
+
+ DISALLOW_COPY_AND_ASSIGN(UnalignedSharedMemory);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_UNALIGNED_SHARED_MEMORY_H_
diff --git a/chromium/media/base/unaligned_shared_memory_unittest.cc b/chromium/media/base/unaligned_shared_memory_unittest.cc
new file mode 100644
index 00000000000..fe36edc8d95
--- /dev/null
+++ b/chromium/media/base/unaligned_shared_memory_unittest.cc
@@ -0,0 +1,86 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/unaligned_shared_memory.h"
+
+#include <stdint.h>
+#include <string.h>
+
+#include <limits>
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/memory/shared_memory.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+const uint8_t kUnalignedData[] = "XXXhello";
+const size_t kUnalignedDataSize = arraysize(kUnalignedData);
+const off_t kUnalignedOffset = 3;
+
+const uint8_t kData[] = "hello";
+const size_t kDataSize = arraysize(kData);
+
+base::SharedMemoryHandle CreateHandle(const uint8_t* data, size_t size) {
+ base::SharedMemory shm;
+ EXPECT_TRUE(shm.CreateAndMapAnonymous(size));
+ memcpy(shm.memory(), data, size);
+ return shm.TakeHandle();
+}
+
+} // namespace
+
+TEST(UnalignedSharedMemoryTest, CreateAndDestroy) {
+ auto handle = CreateHandle(kData, kDataSize);
+ UnalignedSharedMemory shm(handle, true);
+}
+
+TEST(UnalignedSharedMemoryTest, CreateAndDestroy_InvalidHandle) {
+ base::SharedMemoryHandle handle;
+ UnalignedSharedMemory shm(handle, true);
+}
+
+TEST(UnalignedSharedMemoryTest, Map) {
+ auto handle = CreateHandle(kData, kDataSize);
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_TRUE(shm.MapAt(0, kDataSize));
+ EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
+}
+
+TEST(UnalignedSharedMemoryTest, Map_Unaligned) {
+ auto handle = CreateHandle(kUnalignedData, kUnalignedDataSize);
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_TRUE(shm.MapAt(kUnalignedOffset, kDataSize));
+ EXPECT_EQ(0, memcmp(shm.memory(), kData, kDataSize));
+}
+
+TEST(UnalignedSharedMemoryTest, Map_InvalidHandle) {
+ base::SharedMemoryHandle handle;
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_FALSE(shm.MapAt(1, kDataSize));
+ EXPECT_EQ(shm.memory(), nullptr);
+}
+
+TEST(UnalignedSharedMemoryTest, Map_NegativeOffset) {
+ auto handle = CreateHandle(kData, kDataSize);
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_FALSE(shm.MapAt(-1, kDataSize));
+}
+
+TEST(UnalignedSharedMemoryTest, Map_SizeOverflow) {
+ auto handle = CreateHandle(kData, kDataSize);
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_FALSE(shm.MapAt(1, std::numeric_limits<size_t>::max()));
+}
+
+TEST(UnalignedSharedMemoryTest, UnmappedIsNullptr) {
+ auto handle = CreateHandle(kData, kDataSize);
+ UnalignedSharedMemory shm(handle, true);
+ ASSERT_EQ(shm.memory(), nullptr);
+}
+
+} // namespace media
diff --git a/chromium/media/base/user_input_monitor.cc b/chromium/media/base/user_input_monitor.cc
index 40ae87ed94b..ead5dec37be 100644
--- a/chromium/media/base/user_input_monitor.cc
+++ b/chromium/media/base/user_input_monitor.cc
@@ -4,38 +4,93 @@
#include "media/base/user_input_monitor.h"
+#include <utility>
+
+#include "base/atomicops.h"
#include "base/logging.h"
+#include "base/single_thread_task_runner.h"
namespace media {
+uint32_t ReadKeyPressMonitorCount(
+ const base::ReadOnlySharedMemoryMapping& readonly_mapping) {
+ if (!readonly_mapping.IsValid())
+ return 0;
+
+ // No ordering constraints between Load/Store operations, a temporary
+ // inconsistent value is fine.
+ return base::subtle::NoBarrier_Load(
+ reinterpret_cast<const base::subtle::Atomic32*>(
+ readonly_mapping.memory()));
+}
+
+void WriteKeyPressMonitorCount(
+ const base::WritableSharedMemoryMapping& writable_mapping,
+ uint32_t count) {
+ if (!writable_mapping.IsValid())
+ return;
+
+ // No ordering constraints between Load/Store operations, a temporary
+ // inconsistent value is fine.
+ base::subtle::NoBarrier_Store(
+ reinterpret_cast<base::subtle::Atomic32*>(writable_mapping.memory()),
+ count);
+}
+
#ifdef DISABLE_USER_INPUT_MONITOR
+// static
std::unique_ptr<UserInputMonitor> UserInputMonitor::Create(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
- const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner) {
+ scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
return nullptr;
}
#endif // DISABLE_USER_INPUT_MONITOR
+UserInputMonitor::UserInputMonitor() = default;
-UserInputMonitor::UserInputMonitor() : key_press_counter_references_(0) {}
+UserInputMonitor::~UserInputMonitor() = default;
-UserInputMonitor::~UserInputMonitor() {
- DCHECK_EQ(0u, key_press_counter_references_);
+UserInputMonitorBase::UserInputMonitorBase() {
+ DETACH_FROM_SEQUENCE(owning_sequence_);
}
-void UserInputMonitor::EnableKeyPressMonitoring() {
- base::AutoLock auto_lock(lock_);
- ++key_press_counter_references_;
- if (key_press_counter_references_ == 1) {
+UserInputMonitorBase::~UserInputMonitorBase() {
+ DCHECK_EQ(0u, references_);
+}
+
+void UserInputMonitorBase::EnableKeyPressMonitoring() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
+ if (++references_ == 1) {
StartKeyboardMonitoring();
DVLOG(2) << "Started keyboard monitoring.";
}
}
-void UserInputMonitor::DisableKeyPressMonitoring() {
- base::AutoLock auto_lock(lock_);
- DCHECK_NE(key_press_counter_references_, 0u);
- --key_press_counter_references_;
- if (key_press_counter_references_ == 0) {
+base::ReadOnlySharedMemoryRegion
+UserInputMonitorBase::EnableKeyPressMonitoringWithMapping() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
+ if (++references_ == 1) {
+ base::MappedReadOnlyRegion shmem =
+ base::ReadOnlySharedMemoryRegion::Create(sizeof(uint32_t));
+ if (!shmem.IsValid()) {
+ DVLOG(2) << "Error mapping key press count shmem.";
+ return base::ReadOnlySharedMemoryRegion();
+ }
+
+ key_press_count_region_ =
+ base::ReadOnlySharedMemoryRegion(std::move(shmem.region));
+ WriteKeyPressMonitorCount(shmem.mapping, 0u);
+ StartKeyboardMonitoring(std::move(shmem.mapping));
+ DVLOG(2) << "Started keyboard monitoring.";
+ }
+
+ return key_press_count_region_.Duplicate();
+}
+
+void UserInputMonitorBase::DisableKeyPressMonitoring() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
+ DCHECK_NE(references_, 0u);
+ if (--references_ == 0) {
+ key_press_count_region_ = base::ReadOnlySharedMemoryRegion();
StopKeyboardMonitoring();
DVLOG(2) << "Stopped keyboard monitoring.";
}
diff --git a/chromium/media/base/user_input_monitor.h b/chromium/media/base/user_input_monitor.h
index 4c27021d37a..7b774949d71 100644
--- a/chromium/media/base/user_input_monitor.h
+++ b/chromium/media/base/user_input_monitor.h
@@ -10,6 +10,7 @@
#include <memory>
#include "base/macros.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
#include "media/base/media_export.h"
@@ -20,24 +21,29 @@ class SingleThreadTaskRunner;
namespace media {
-// Monitors and notifies about keyboard events.
-// Thread safe.
+// Utility functions for correctly and atomically reading from/writing to a
+// shared memory mapping containing key press count.
+uint32_t MEDIA_EXPORT
+ReadKeyPressMonitorCount(const base::ReadOnlySharedMemoryMapping& shmem);
+void MEDIA_EXPORT
+WriteKeyPressMonitorCount(const base::WritableSharedMemoryMapping& shmem,
+ uint32_t count);
+
+// Base class for audio:: and media:: UserInputMonitor implementations.
class MEDIA_EXPORT UserInputMonitor {
public:
UserInputMonitor();
virtual ~UserInputMonitor();
- // Creates a platform-specific instance of UserInputMonitor.
+ // Creates a platform-specific instance of UserInputMonitorBase.
// |io_task_runner| is the task runner for an IO thread.
// |ui_task_runner| is the task runner for a UI thread.
static std::unique_ptr<UserInputMonitor> Create(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
- const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner);
+ scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
- // A caller must call EnableKeyPressMonitoring and
- // DisableKeyPressMonitoring in pair.
- void EnableKeyPressMonitoring();
- void DisableKeyPressMonitoring();
+ virtual void EnableKeyPressMonitoring() = 0;
+ virtual void DisableKeyPressMonitoring() = 0;
// Returns the number of keypresses. The starting point from when it is
// counted is not guaranteed, but consistent within the pair of calls of
@@ -45,16 +51,41 @@ class MEDIA_EXPORT UserInputMonitor {
// use the difference between the values returned at two times to get the
// number of keypresses happened within that time period, but should not make
// any assumption on the initial value.
- virtual size_t GetKeyPressCount() const = 0;
+ virtual uint32_t GetKeyPressCount() const = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(UserInputMonitor);
+};
+
+// Monitors and notifies about keyboard events.
+class MEDIA_EXPORT UserInputMonitorBase : public UserInputMonitor {
+ public:
+ UserInputMonitorBase();
+ ~UserInputMonitorBase() override;
+
+ // A caller must call EnableKeyPressMonitoring(WithMapping) and
+ // DisableKeyPressMonitoring in pair on the same sequence.
+ void EnableKeyPressMonitoring() override;
+ void DisableKeyPressMonitoring() override;
+
+ // Initializes a MappedReadOnlyRegion storing key press count. Returns a
+ // readonly region to the mapping and passes the writable mapping to platform
+ // specific implementation, to update key press count. If monitoring is
+ // already enabled, it only returns a handle to readonly region.
+ base::ReadOnlySharedMemoryRegion EnableKeyPressMonitoringWithMapping();
private:
virtual void StartKeyboardMonitoring() = 0;
+ virtual void StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) = 0;
virtual void StopKeyboardMonitoring() = 0;
- base::Lock lock_;
- size_t key_press_counter_references_;
+ size_t references_ = 0;
+ base::ReadOnlySharedMemoryRegion key_press_count_region_;
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitor);
+ SEQUENCE_CHECKER(owning_sequence_);
+
+ DISALLOW_COPY_AND_ASSIGN(UserInputMonitorBase);
};
} // namespace media
diff --git a/chromium/media/base/user_input_monitor_linux.cc b/chromium/media/base/user_input_monitor_linux.cc
index 662a9f5de10..b52df992d79 100644
--- a/chromium/media/base/user_input_monitor_linux.cc
+++ b/chromium/media/base/user_input_monitor_linux.cc
@@ -16,7 +16,7 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
#include "base/single_thread_task_runner.h"
#include "base/synchronization/lock.h"
#include "media/base/keyboard_event_counter.h"
@@ -32,7 +32,7 @@ namespace {
// UserInputMonitorLinux since it needs to be deleted on the IO thread.
class UserInputMonitorLinuxCore
: public base::SupportsWeakPtr<UserInputMonitorLinuxCore>,
- public base::MessageLoop::DestructionObserver {
+ public base::MessageLoopCurrent::DestructionObserver {
public:
explicit UserInputMonitorLinuxCore(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
@@ -41,8 +41,9 @@ class UserInputMonitorLinuxCore
// DestructionObserver overrides.
void WillDestroyCurrentMessageLoop() override;
- size_t GetKeyPressCount() const;
+ uint32_t GetKeyPressCount() const;
void StartMonitor();
+ void StartMonitorWithMapping(base::WritableSharedMemoryMapping mapping);
void StopMonitor();
private:
@@ -54,6 +55,9 @@ class UserInputMonitorLinuxCore
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
+ // Used for sharing key press count value.
+ std::unique_ptr<base::WritableSharedMemoryMapping> key_press_count_mapping_;
+
//
// The following members should only be accessed on the IO thread.
//
@@ -67,18 +71,20 @@ class UserInputMonitorLinuxCore
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorLinuxCore);
};
-class UserInputMonitorLinux : public UserInputMonitor {
+class UserInputMonitorLinux : public UserInputMonitorBase {
public:
explicit UserInputMonitorLinux(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
~UserInputMonitorLinux() override;
// Public UserInputMonitor overrides.
- size_t GetKeyPressCount() const override;
+ uint32_t GetKeyPressCount() const override;
private:
// Private UserInputMonitor overrides.
void StartKeyboardMonitoring() override;
+ void StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) override;
void StopKeyboardMonitoring() override;
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
@@ -107,7 +113,7 @@ void UserInputMonitorLinuxCore::WillDestroyCurrentMessageLoop() {
StopMonitor();
}
-size_t UserInputMonitorLinuxCore::GetKeyPressCount() const {
+uint32_t UserInputMonitorLinuxCore::GetKeyPressCount() const {
return counter_.GetKeyPressCount();
}
@@ -186,12 +192,19 @@ void UserInputMonitorLinuxCore::StartMonitor() {
// Start observing message loop destruction if we start monitoring the first
// event.
- base::MessageLoop::current()->AddDestructionObserver(this);
+ base::MessageLoopCurrent::Get()->AddDestructionObserver(this);
// Fetch pending events if any.
OnXEvent();
}
+void UserInputMonitorLinuxCore::StartMonitorWithMapping(
+ base::WritableSharedMemoryMapping mapping) {
+ StartMonitor();
+ key_press_count_mapping_ =
+ std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
+}
+
void UserInputMonitorLinuxCore::StopMonitor() {
DCHECK(io_task_runner_->BelongsToCurrentThread());
@@ -218,8 +231,11 @@ void UserInputMonitorLinuxCore::StopMonitor() {
XCloseDisplay(x_control_display_);
x_control_display_ = NULL;
}
+
+ key_press_count_mapping_.reset();
+
// Stop observing message loop destruction if no event is being monitored.
- base::MessageLoop::current()->RemoveDestructionObserver(this);
+ base::MessageLoopCurrent::Get()->RemoveDestructionObserver(this);
}
void UserInputMonitorLinuxCore::OnXEvent() {
@@ -242,6 +258,10 @@ void UserInputMonitorLinuxCore::ProcessXEvent(xEvent* event) {
XkbKeycodeToKeysym(x_control_display_, event->u.u.detail, 0, 0);
ui::KeyboardCode key_code = ui::KeyboardCodeFromXKeysym(key_sym);
counter_.OnKeyboardEvent(type, key_code);
+
+ // Update count value in shared memory.
+ if (key_press_count_mapping_)
+ WriteKeyPressMonitorCount(*key_press_count_mapping_, GetKeyPressCount());
}
// static
@@ -268,27 +288,35 @@ UserInputMonitorLinux::~UserInputMonitorLinux() {
delete core_;
}
-size_t UserInputMonitorLinux::GetKeyPressCount() const {
+uint32_t UserInputMonitorLinux::GetKeyPressCount() const {
return core_->GetKeyPressCount();
}
void UserInputMonitorLinux::StartKeyboardMonitoring() {
io_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&UserInputMonitorLinuxCore::StartMonitor,
+ core_->AsWeakPtr()));
+}
+
+void UserInputMonitorLinux::StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) {
+ io_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&UserInputMonitorLinuxCore::StartMonitor, core_->AsWeakPtr()));
+ base::BindOnce(&UserInputMonitorLinuxCore::StartMonitorWithMapping,
+ core_->AsWeakPtr(), std::move(mapping)));
}
void UserInputMonitorLinux::StopKeyboardMonitoring() {
io_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&UserInputMonitorLinuxCore::StopMonitor, core_->AsWeakPtr()));
+ FROM_HERE, base::BindOnce(&UserInputMonitorLinuxCore::StopMonitor,
+ core_->AsWeakPtr()));
}
} // namespace
std::unique_ptr<UserInputMonitor> UserInputMonitor::Create(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
- const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner) {
+ scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
return std::make_unique<UserInputMonitorLinux>(io_task_runner);
}
diff --git a/chromium/media/base/user_input_monitor_mac.cc b/chromium/media/base/user_input_monitor_mac.cc
index 96fd3bb5c19..585cb01b895 100644
--- a/chromium/media/base/user_input_monitor_mac.cc
+++ b/chromium/media/base/user_input_monitor_mac.cc
@@ -9,21 +9,37 @@
#include <memory>
#include "base/macros.h"
+#include "base/timer/timer.h"
namespace media {
namespace {
-class UserInputMonitorMac : public UserInputMonitor {
+// Update key press count in shared memory twice as frequent as
+// AudioInputController::AudioCallback::OnData() callback for WebRTC.
+constexpr base::TimeDelta kUpdateKeyPressCountIntervalMs =
+ base::TimeDelta::FromMilliseconds(5);
+
+class UserInputMonitorMac : public UserInputMonitorBase {
public:
UserInputMonitorMac();
~UserInputMonitorMac() override;
- size_t GetKeyPressCount() const override;
+ uint32_t GetKeyPressCount() const override;
private:
void StartKeyboardMonitoring() override;
+ void StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) override;
void StopKeyboardMonitoring() override;
+ void UpdateKeyPressCountShmem();
+
+ // Used for sharing key press count value.
+ std::unique_ptr<base::WritableSharedMemoryMapping> key_press_count_mapping_;
+
+ // Timer for updating key press count in |key_press_count_mapping_|.
+ base::RepeatingTimer key_press_count_timer_;
+
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorMac);
};
@@ -31,7 +47,7 @@ UserInputMonitorMac::UserInputMonitorMac() {}
UserInputMonitorMac::~UserInputMonitorMac() {}
-size_t UserInputMonitorMac::GetKeyPressCount() const {
+uint32_t UserInputMonitorMac::GetKeyPressCount() const {
// Use |kCGEventSourceStateHIDSystemState| since we only want to count
// hardware generated events.
return CGEventSourceCounterForEventType(kCGEventSourceStateHIDSystemState,
@@ -40,13 +56,32 @@ size_t UserInputMonitorMac::GetKeyPressCount() const {
void UserInputMonitorMac::StartKeyboardMonitoring() {}
-void UserInputMonitorMac::StopKeyboardMonitoring() {}
+void UserInputMonitorMac::StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) {
+ key_press_count_mapping_ =
+ std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
+ key_press_count_timer_.Start(FROM_HERE, kUpdateKeyPressCountIntervalMs, this,
+ &UserInputMonitorMac::UpdateKeyPressCountShmem);
+}
+
+void UserInputMonitorMac::StopKeyboardMonitoring() {
+ if (!key_press_count_mapping_)
+ return;
+
+ key_press_count_timer_.AbandonAndStop();
+ key_press_count_mapping_.reset();
+}
+
+void UserInputMonitorMac::UpdateKeyPressCountShmem() {
+ DCHECK(key_press_count_mapping_);
+ WriteKeyPressMonitorCount(*key_press_count_mapping_, GetKeyPressCount());
+}
} // namespace
std::unique_ptr<UserInputMonitor> UserInputMonitor::Create(
- const scoped_refptr<base::SingleThreadTaskRunner>& input_task_runner,
- const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner) {
+ scoped_refptr<base::SingleThreadTaskRunner> input_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
return std::make_unique<UserInputMonitorMac>();
}
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index 126665bcf94..e6b7e7b685d 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -5,15 +5,12 @@
#include "media/base/user_input_monitor.h"
#include <memory>
+#include <utility>
-#include "base/logging.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "build/build_config.h"
-#include "media/base/keyboard_event_counter.h"
-#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/skia/include/core/SkPoint.h"
#if defined(OS_LINUX)
#include "base/files/file_descriptor_watcher_posix.h"
@@ -29,22 +26,57 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
base::MessageLoopForUI message_loop;
#endif // defined(OS_LINUX)
- base::RunLoop run_loop;
std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
message_loop.task_runner(), message_loop.task_runner());
if (!monitor)
return;
- MockMouseListener listener;
- // Ignore any callbacks.
- EXPECT_CALL(listener, OnMouseMoved(testing::_)).Times(testing::AnyNumber());
-
monitor->EnableKeyPressMonitoring();
monitor->DisableKeyPressMonitoring();
monitor.reset();
- run_loop.RunUntilIdle();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
+#if defined(OS_LINUX)
+ base::MessageLoopForIO message_loop;
+ base::FileDescriptorWatcher file_descriptor_watcher(&message_loop);
+#else
+ base::MessageLoopForUI message_loop;
+#endif // defined(OS_LINUX)
+
+ std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
+ message_loop.task_runner(), message_loop.task_runner());
+
+ if (!monitor)
+ return;
+
+ base::ReadOnlySharedMemoryMapping readonly_mapping =
+ static_cast<UserInputMonitorBase*>(monitor.get())
+ ->EnableKeyPressMonitoringWithMapping()
+ .Map();
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+ monitor->DisableKeyPressMonitoring();
+
+ monitor.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // Check that read only region remains valid after disable.
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+}
+
+TEST(UserInputMonitorTest, ReadWriteKeyPressMonitorCount) {
+ std::unique_ptr<base::MappedReadOnlyRegion> shmem =
+ std::make_unique<base::MappedReadOnlyRegion>(
+ base::ReadOnlySharedMemoryRegion::Create(sizeof(uint32_t)));
+ ASSERT_TRUE(shmem->IsValid());
+
+ constexpr uint32_t count = 10;
+ WriteKeyPressMonitorCount(shmem->mapping, count);
+ base::ReadOnlySharedMemoryMapping readonly_mapping = shmem->region.Map();
+ EXPECT_EQ(count, ReadKeyPressMonitorCount(readonly_mapping));
}
} // namespace media
diff --git a/chromium/media/base/user_input_monitor_win.cc b/chromium/media/base/user_input_monitor_win.cc
index b8e92317b34..d42f98a2d3f 100644
--- a/chromium/media/base/user_input_monitor_win.cc
+++ b/chromium/media/base/user_input_monitor_win.cc
@@ -12,7 +12,7 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/stringprintf.h"
#include "base/synchronization/lock.h"
@@ -41,7 +41,7 @@ std::unique_ptr<RAWINPUTDEVICE> GetRawInputDevices(HWND hwnd, DWORD flags) {
// UserInputMonitorWin since it needs to be deleted on the UI thread.
class UserInputMonitorWinCore
: public base::SupportsWeakPtr<UserInputMonitorWinCore>,
- public base::MessageLoop::DestructionObserver {
+ public base::MessageLoopCurrent::DestructionObserver {
public:
enum EventBitMask {
MOUSE_EVENT_MASK = 1,
@@ -55,8 +55,9 @@ class UserInputMonitorWinCore
// DestructionObserver overrides.
void WillDestroyCurrentMessageLoop() override;
- size_t GetKeyPressCount() const;
+ uint32_t GetKeyPressCount() const;
void StartMonitor();
+ void StartMonitorWithMapping(base::WritableSharedMemoryMapping mapping);
void StopMonitor();
private:
@@ -71,6 +72,9 @@ class UserInputMonitorWinCore
// Task runner on which |window_| is created.
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
+ // Used for sharing key press count value.
+ std::unique_ptr<base::WritableSharedMemoryMapping> key_press_count_mapping_;
+
// These members are only accessed on the UI thread.
std::unique_ptr<base::win::MessageWindow> window_;
KeyboardEventCounter counter_;
@@ -78,18 +82,20 @@ class UserInputMonitorWinCore
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorWinCore);
};
-class UserInputMonitorWin : public UserInputMonitor {
+class UserInputMonitorWin : public UserInputMonitorBase {
public:
explicit UserInputMonitorWin(
const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner);
~UserInputMonitorWin() override;
// Public UserInputMonitor overrides.
- size_t GetKeyPressCount() const override;
+ uint32_t GetKeyPressCount() const override;
private:
// Private UserInputMonitor overrides.
void StartKeyboardMonitoring() override;
+ void StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) override;
void StopKeyboardMonitoring() override;
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
@@ -111,7 +117,7 @@ void UserInputMonitorWinCore::WillDestroyCurrentMessageLoop() {
StopMonitor();
}
-size_t UserInputMonitorWinCore::GetKeyPressCount() const {
+uint32_t UserInputMonitorWinCore::GetKeyPressCount() const {
return counter_.GetKeyPressCount();
}
@@ -123,8 +129,8 @@ void UserInputMonitorWinCore::StartMonitor() {
std::unique_ptr<base::win::MessageWindow> window =
std::make_unique<base::win::MessageWindow>();
- if (!window->Create(base::Bind(&UserInputMonitorWinCore::HandleMessage,
- base::Unretained(this)))) {
+ if (!window->Create(base::BindRepeating(
+ &UserInputMonitorWinCore::HandleMessage, base::Unretained(this)))) {
PLOG(ERROR) << "Failed to create the raw input window";
return;
}
@@ -140,7 +146,14 @@ void UserInputMonitorWinCore::StartMonitor() {
window_ = std::move(window);
// Start observing message loop destruction if we start monitoring the first
// event.
- base::MessageLoop::current()->AddDestructionObserver(this);
+ base::MessageLoopCurrent::Get()->AddDestructionObserver(this);
+}
+
+void UserInputMonitorWinCore::StartMonitorWithMapping(
+ base::WritableSharedMemoryMapping mapping) {
+ StartMonitor();
+ key_press_count_mapping_ =
+ std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
}
void UserInputMonitorWinCore::StopMonitor() {
@@ -159,8 +172,10 @@ void UserInputMonitorWinCore::StopMonitor() {
window_ = nullptr;
+ key_press_count_mapping_.reset();
+
// Stop observing message loop destruction if no event is being monitored.
- base::MessageLoop::current()->RemoveDestructionObserver(this);
+ base::MessageLoopCurrent::Get()->RemoveDestructionObserver(this);
}
LRESULT UserInputMonitorWinCore::OnInput(HRAWINPUT input_handle) {
@@ -196,6 +211,10 @@ LRESULT UserInputMonitorWinCore::OnInput(HRAWINPUT input_handle) {
ui::KeyboardCode key_code =
ui::KeyboardCodeForWindowsKeyCode(input->data.keyboard.VKey);
counter_.OnKeyboardEvent(event, key_code);
+
+ // Update count value in shared memory.
+ if (key_press_count_mapping_)
+ WriteKeyPressMonitorCount(*key_press_count_mapping_, GetKeyPressCount());
}
return DefRawInputProc(&input, 1, sizeof(RAWINPUTHEADER));
@@ -231,27 +250,35 @@ UserInputMonitorWin::~UserInputMonitorWin() {
delete core_;
}
-size_t UserInputMonitorWin::GetKeyPressCount() const {
+uint32_t UserInputMonitorWin::GetKeyPressCount() const {
return core_->GetKeyPressCount();
}
void UserInputMonitorWin::StartKeyboardMonitoring() {
ui_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&UserInputMonitorWinCore::StartMonitor,
+ core_->AsWeakPtr()));
+}
+
+void UserInputMonitorWin::StartKeyboardMonitoring(
+ base::WritableSharedMemoryMapping mapping) {
+ ui_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&UserInputMonitorWinCore::StartMonitor, core_->AsWeakPtr()));
+ base::BindOnce(&UserInputMonitorWinCore::StartMonitorWithMapping,
+ core_->AsWeakPtr(), std::move(mapping)));
}
void UserInputMonitorWin::StopKeyboardMonitoring() {
ui_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&UserInputMonitorWinCore::StopMonitor, core_->AsWeakPtr()));
+ FROM_HERE, base::BindOnce(&UserInputMonitorWinCore::StopMonitor,
+ core_->AsWeakPtr()));
}
} // namespace
std::unique_ptr<UserInputMonitor> UserInputMonitor::Create(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner,
- const scoped_refptr<base::SingleThreadTaskRunner>& ui_task_runner) {
+ scoped_refptr<base::SingleThreadTaskRunner> io_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
return std::make_unique<UserInputMonitorWin>(ui_task_runner);
}
diff --git a/chromium/media/base/video_bitrate_allocation.cc b/chromium/media/base/video_bitrate_allocation.cc
new file mode 100644
index 00000000000..629092eedf9
--- /dev/null
+++ b/chromium/media/base/video_bitrate_allocation.cc
@@ -0,0 +1,54 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "video_bitrate_allocation.h"
+
+#include <limits>
+#include <numeric>
+
+#include "base/logging.h"
+
+namespace media {
+
+constexpr size_t VideoBitrateAllocation::kMaxSpatialLayers;
+constexpr size_t VideoBitrateAllocation::kMaxTemporalLayers;
+
+VideoBitrateAllocation::VideoBitrateAllocation() : bitrates_{} {}
+
+bool VideoBitrateAllocation::SetBitrate(size_t spatial_index,
+ size_t temporal_index,
+ int bitrate_bps) {
+ CHECK_LT(spatial_index, kMaxSpatialLayers);
+ CHECK_LT(temporal_index, kMaxTemporalLayers);
+ CHECK_GE(bitrate_bps, 0);
+
+ if (GetSumBps() - bitrates_[spatial_index][temporal_index] >
+ std::numeric_limits<int>::max() - bitrate_bps) {
+ return false; // Would cause overflow of the sum.
+ }
+
+ bitrates_[spatial_index][temporal_index] = bitrate_bps;
+ return true;
+}
+
+int VideoBitrateAllocation::GetBitrateBps(size_t spatial_index,
+ size_t temporal_index) const {
+ CHECK_LT(spatial_index, kMaxSpatialLayers);
+ CHECK_LT(temporal_index, kMaxTemporalLayers);
+ return bitrates_[spatial_index][temporal_index];
+}
+
+int VideoBitrateAllocation::GetSumBps() const {
+ int sum = 0;
+ for (size_t spatial_index = 0; spatial_index < kMaxSpatialLayers;
+ ++spatial_index) {
+ for (size_t temporal_index = 0; temporal_index < kMaxTemporalLayers;
+ ++temporal_index) {
+ sum += bitrates_[spatial_index][temporal_index];
+ }
+ }
+ return sum;
+}
+
+} // namespace media
diff --git a/chromium/media/base/video_bitrate_allocation.h b/chromium/media/base/video_bitrate_allocation.h
new file mode 100644
index 00000000000..6341ce3f751
--- /dev/null
+++ b/chromium/media/base/video_bitrate_allocation.h
@@ -0,0 +1,43 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_VIDEO_BITRATE_ALLOCATION_H_
+#define MEDIA_BASE_VIDEO_BITRATE_ALLOCATION_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "base/macros.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// Class that describes how video bitrate, in bps, is allocated across temporal
+// and spatial layers. Not that bitrates are NOT cumulative. Depending on if
+// layers are dependent or not, it is up to the user to aggregate.
+class MEDIA_EXPORT VideoBitrateAllocation {
+ public:
+ static constexpr size_t kMaxSpatialLayers = 5;
+ static constexpr size_t kMaxTemporalLayers = 4;
+
+ VideoBitrateAllocation();
+ ~VideoBitrateAllocation() = default;
+
+ // Returns if this bitrate can't be set (sum exceeds int max value).
+ bool SetBitrate(size_t spatial_index, size_t temporal_index, int bitrate_bps);
+
+ // Returns the bitrate for specified spatial/temporal index, or 0 if not set.
+ int GetBitrateBps(size_t spatial_index, size_t temporal_index) const;
+
+ // Sum of all bitrates.
+ int32_t GetSumBps() const;
+
+ private:
+ int bitrates_[kMaxSpatialLayers][kMaxTemporalLayers];
+ DISALLOW_COPY_AND_ASSIGN(VideoBitrateAllocation);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_VIDEO_BITRATE_ALLOCATION_H_
diff --git a/chromium/media/base/video_bitrate_allocation_unittest.cc b/chromium/media/base/video_bitrate_allocation_unittest.cc
new file mode 100644
index 00000000000..ce9e8acaaf9
--- /dev/null
+++ b/chromium/media/base/video_bitrate_allocation_unittest.cc
@@ -0,0 +1,70 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <set>
+
+#include "base/logging.h"
+#include "media/base/video_bitrate_allocation.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+TEST(VideoBitrateAllocationTest, SetAndGet) {
+ int sum = 0;
+ int layer_rate = 0;
+ VideoBitrateAllocation allocation;
+ for (size_t spatial_index = 0;
+ spatial_index < VideoBitrateAllocation::kMaxSpatialLayers;
+ ++spatial_index) {
+ for (size_t temporal_index = 0;
+ temporal_index < VideoBitrateAllocation::kMaxTemporalLayers;
+ ++temporal_index) {
+ sum += layer_rate;
+ EXPECT_TRUE(
+ allocation.SetBitrate(spatial_index, temporal_index, layer_rate++));
+ }
+ }
+ EXPECT_EQ(sum, allocation.GetSumBps());
+
+ layer_rate = 0;
+ for (size_t spatial_index = 0;
+ spatial_index < VideoBitrateAllocation::kMaxSpatialLayers;
+ ++spatial_index) {
+ for (size_t temporal_index = 0;
+ temporal_index < VideoBitrateAllocation::kMaxTemporalLayers;
+ ++temporal_index) {
+ EXPECT_EQ(allocation.GetBitrateBps(spatial_index, temporal_index),
+ layer_rate++);
+ }
+ }
+}
+
+TEST(VideoBitrateAllocationTest, CanSetMaxValue) {
+ VideoBitrateAllocation allocation;
+ // Single cell containing max value.
+ EXPECT_TRUE(allocation.SetBitrate(0, 0, std::numeric_limits<int>::max()));
+ // Setting to 0 is OK. Doesn't increase sum.
+ EXPECT_TRUE(allocation.SetBitrate(0, 1, 0));
+ // Adding 1 will overflow.
+ EXPECT_FALSE(allocation.SetBitrate(0, 2, 1));
+
+ EXPECT_EQ(std::numeric_limits<int>::max(), allocation.GetSumBps());
+}
+
+TEST(VideoBitrateAllocationTest, ValidatesSumWhenOverwriting) {
+ VideoBitrateAllocation allocation;
+ // Fill up to max sum.
+ EXPECT_TRUE(allocation.SetBitrate(0, 0, std::numeric_limits<int>::max() - 2));
+ EXPECT_TRUE(allocation.SetBitrate(0, 1, 2));
+ // This will overflow, old value kept.
+ EXPECT_FALSE(allocation.SetBitrate(0, 1, 3));
+ EXPECT_EQ(allocation.GetBitrateBps(0, 1), 2);
+ // OK only since we subtract the previous 2.
+ EXPECT_TRUE(allocation.SetBitrate(0, 1, 1));
+ EXPECT_EQ(allocation.GetBitrateBps(0, 1), 1);
+
+ EXPECT_EQ(std::numeric_limits<int>::max() - 1, allocation.GetSumBps());
+}
+
+} // namespace media
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index e763506511b..8d879d7b549 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -10,6 +10,7 @@
#include "media/base/media_util.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
+#include "media/base/video_util.h"
namespace media {
@@ -136,11 +137,10 @@ void VideoDecoderConfig::Initialize(VideoCodec codec,
}
bool VideoDecoderConfig::IsValidConfig() const {
- return codec_ != kUnknownVideoCodec &&
- natural_size_.width() > 0 &&
- natural_size_.height() > 0 &&
- VideoFrame::IsValidConfig(format_, VideoFrame::STORAGE_UNOWNED_MEMORY,
- coded_size_, visible_rect_, natural_size_);
+ return codec_ != kUnknownVideoCodec && natural_size_.width() > 0 &&
+ natural_size_.height() > 0 &&
+ VideoFrame::IsValidConfig(format_, VideoFrame::STORAGE_UNOWNED_MEMORY,
+ coded_size_, visible_rect_, natural_size_);
}
bool VideoDecoderConfig::Matches(const VideoDecoderConfig& config) const {
@@ -166,11 +166,15 @@ std::string VideoDecoderConfig::AsHumanReadableString() const {
<< " natural size: [" << natural_size().width() << ","
<< natural_size().height() << "]"
<< " has extra data? " << (extra_data().empty() ? "false" : "true")
- << " encrypted? " << (is_encrypted() ? "true" : "false")
+ << " encryption scheme: " << encryption_scheme()
<< " rotation: " << VideoRotationToString(video_rotation());
return s.str();
}
+double VideoDecoderConfig::GetPixelAspectRatio() const {
+ return ::media::GetPixelAspectRatio(visible_rect_, natural_size_);
+}
+
void VideoDecoderConfig::SetExtraData(const std::vector<uint8_t>& extra_data) {
extra_data_ = extra_data;
}
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 6811d261913..26f6f7864f6 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -103,6 +103,11 @@ class MEDIA_EXPORT VideoDecoderConfig {
// into account.
const gfx::Size& natural_size() const { return natural_size_; }
+ // TODO(crbug.com/837337): This should be explicitly set (replacing
+ // |natural_size|). It should also be possible to determine whether it was set
+ // at all, since in-stream information may override it if it was not.
+ double GetPixelAspectRatio() const;
+
// Optional byte data required to initialize video decoders, such as H.264
// AVCC data.
void SetExtraData(const std::vector<uint8_t>& extra_data);
diff --git a/chromium/media/base/video_facing.h b/chromium/media/base/video_facing.h
index bf354c2a70c..ecd84835002 100644
--- a/chromium/media/base/video_facing.h
+++ b/chromium/media/base/video_facing.h
@@ -8,6 +8,8 @@
namespace media {
// Facing mode for video capture.
+// A Java counterpart will be generated for this enum.
+// GENERATED_JAVA_ENUM_PACKAGE: org.chromium.media
enum VideoFacingMode {
MEDIA_VIDEO_FACING_NONE = 0,
MEDIA_VIDEO_FACING_USER,
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index 5d840144770..b68ba405ce2 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -212,7 +212,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTextures(
if (format != PIXEL_FORMAT_ARGB && format != PIXEL_FORMAT_XRGB &&
format != PIXEL_FORMAT_RGB32 && format != PIXEL_FORMAT_UYVY &&
format != PIXEL_FORMAT_NV12 && format != PIXEL_FORMAT_I420) {
- LOG(DFATAL) << "Unsupported pixel format supported, got "
+ LOG(DFATAL) << "Unsupported pixel format: "
<< VideoPixelFormatToString(format);
return nullptr;
}
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index c25fd3d7ec1..cabe173609a 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -109,7 +109,7 @@ class MEDIA_EXPORT VideoFrameMetadata {
// if ALLOW_OVERLAY is set. However, it allows us to process the overlay
// to see if it would have been promoted, if it were backed by a SurfaceView
// instead. This lets us figure out when SurfaceViews are appropriate.
- SURFACE_TEXTURE,
+ TEXTURE_OWNER,
// Android only: if set, then this frame's resource would like to be
// notified about its promotability to an overlay.
@@ -119,9 +119,22 @@ class MEDIA_EXPORT VideoFrameMetadata {
// rather than being composited into the framebuffer.
REQUIRE_OVERLAY,
+ // Windows only: this video has protected content.
+ PROTECTED_VIDEO,
+
// Whether this frame was decoded in a power efficient way.
POWER_EFFICIENT,
+ // CompositorFrameMetadata variables associated with this frame. Used for
+ // remote debugging.
+ // Use Get/SetDouble() for these keys.
+ // TODO(crbug.com/832220): Use a customized dictionary value instead of
+ // using these keys directly.
+ DEVICE_SCALE_FACTOR,
+ PAGE_SCALE_FACTOR,
+ ROOT_SCROLL_OFFSET_X,
+ ROOT_SCROLL_OFFSET_Y,
+
NUM_KEYS
};
diff --git a/chromium/media/base/video_util.cc b/chromium/media/base/video_util.cc
index 425a0ba0ff4..cb29dcb1128 100644
--- a/chromium/media/base/video_util.cc
+++ b/chromium/media/base/video_util.cc
@@ -51,26 +51,43 @@ void FillRegionOutsideVisibleRect(uint8_t* data,
} // namespace
-gfx::Size GetNaturalSize(const gfx::Size& visible_size,
- int aspect_ratio_numerator,
- int aspect_ratio_denominator) {
- if (aspect_ratio_denominator <= 0 || aspect_ratio_numerator <= 0) {
- return gfx::Size();
- }
+double GetPixelAspectRatio(const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size) {
+ double visible_width = visible_rect.width();
+ double visible_height = visible_rect.height();
+ double natural_width = natural_size.width();
+ double natural_height = natural_size.height();
+ return (visible_height * natural_width) / (visible_width * natural_height);
+}
- double aspect_ratio = aspect_ratio_numerator /
- static_cast<double>(aspect_ratio_denominator);
+gfx::Size GetNaturalSize(const gfx::Rect& visible_rect,
+ double pixel_aspect_ratio) {
+ // TODO(sandersd): Also handle conversion back to integers overflowing.
+ if (!std::isfinite(pixel_aspect_ratio) || pixel_aspect_ratio <= 0.0)
+ return gfx::Size();
// The HTML spec requires that we always grow a dimension to match aspect
// ratio, rather than modify just the width:
// github.com/whatwg/html/commit/2e94aa64fcf9adbd2f70d8c2aecd192c8678e298
- if (aspect_ratio_numerator > aspect_ratio_denominator) {
- return gfx::Size(round(visible_size.width() * aspect_ratio),
- visible_size.height());
+ if (pixel_aspect_ratio >= 1.0) {
+ return gfx::Size(std::round(visible_rect.width() * pixel_aspect_ratio),
+ visible_rect.height());
}
- return gfx::Size(visible_size.width(),
- round(visible_size.height() / aspect_ratio));
+ return gfx::Size(visible_rect.width(),
+ std::round(visible_rect.height() / pixel_aspect_ratio));
+}
+
+gfx::Size GetNaturalSize(const gfx::Size& visible_size,
+ int aspect_ratio_numerator,
+ int aspect_ratio_denominator) {
+ if (aspect_ratio_denominator <= 0 || aspect_ratio_numerator <= 0)
+ return gfx::Size();
+
+ double pixel_aspect_ratio =
+ aspect_ratio_numerator / static_cast<double>(aspect_ratio_denominator);
+
+ return GetNaturalSize(gfx::Rect(visible_size), pixel_aspect_ratio);
}
void FillYUV(VideoFrame* frame, uint8_t y, uint8_t u, uint8_t v) {
diff --git a/chromium/media/base/video_util.h b/chromium/media/base/video_util.h
index 6db9f79cd87..2d5d47b2ac5 100644
--- a/chromium/media/base/video_util.h
+++ b/chromium/media/base/video_util.h
@@ -16,7 +16,34 @@ namespace media {
class VideoFrame;
-// Computes the size of |visible_size| for a given aspect ratio.
+// Computes the pixel aspect ratio of a given |visible_rect| from its
+// |natural_size|. This describes the shape of a coded pixel as the ratio
+// of its width to its height.
+//
+// See https://en.wikipedia.org/wiki/Pixel_aspect_ratio for a detailed
+// definition.
+//
+// Returns NaN or Infinity if |visible_rect| or |natural_size| are empty.
+//
+// Note: Something has probably gone wrong if you need to call this function;
+// pixel aspect ratios should be the source of truth.
+//
+// TODO(crbug.com/837337): Decide how to encode 'not provided' for pixel aspect
+// ratios, and return that if one of the inputs is empty.
+MEDIA_EXPORT double GetPixelAspectRatio(const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size);
+
+// Increases (at most) one of the dimensions of |visible_rect| to produce
+// a |natural_size| with the given pixel aspect ratio.
+//
+// Returns gfx::Size() if |pixel_aspect_ratio| is not finite and positive.
+MEDIA_EXPORT gfx::Size GetNaturalSize(const gfx::Rect& visible_rect,
+ double pixel_aspect_ratio);
+
+// Overload that takes the pixel aspect ratio as an integer fraction (and
+// |visible_size| instead of |visible_rect|).
+//
+// Returns gfx::Size() if numerator or denominator are not positive.
MEDIA_EXPORT gfx::Size GetNaturalSize(const gfx::Size& visible_size,
int aspect_ratio_numerator,
int aspect_ratio_denominator);
diff --git a/chromium/media/base/video_util_unittest.cc b/chromium/media/base/video_util_unittest.cc
index f11828c05fe..7433ad162c5 100644
--- a/chromium/media/base/video_util_unittest.cc
+++ b/chromium/media/base/video_util_unittest.cc
@@ -6,6 +6,7 @@
#include <stdint.h>
+#include <cmath>
#include <memory>
#include "base/macros.h"
@@ -190,7 +191,56 @@ class VideoUtilTest : public testing::Test {
DISALLOW_COPY_AND_ASSIGN(VideoUtilTest);
};
-TEST_F(VideoUtilTest, GetNaturalSize) {
+TEST_F(VideoUtilTest, GetPixelAspectRatio) {
+ gfx::Rect visible_rect(320, 240);
+
+ // Test empty or invalid combinations.
+ EXPECT_TRUE(std::isnan(GetPixelAspectRatio(gfx::Rect(), gfx::Size())));
+ EXPECT_TRUE(std::isnan(GetPixelAspectRatio(gfx::Rect(1, 1), gfx::Size())));
+ EXPECT_TRUE(std::isnan(GetPixelAspectRatio(gfx::Rect(), gfx::Size(1, 1))));
+ EXPECT_TRUE(
+ std::isinf(GetPixelAspectRatio(gfx::Rect(1, 1), gfx::Size(1, 0))));
+ EXPECT_EQ(0.0, GetPixelAspectRatio(gfx::Rect(1, 1), gfx::Size(0, 1)));
+ EXPECT_EQ(0.0, GetPixelAspectRatio(gfx::Rect(1, 0), gfx::Size(1, 1)));
+ EXPECT_TRUE(
+ std::isinf(GetPixelAspectRatio(gfx::Rect(0, 1), gfx::Size(1, 1))));
+
+ // Some normal ratios.
+ EXPECT_DOUBLE_EQ(1.0, GetPixelAspectRatio(visible_rect, gfx::Size(320, 240)));
+ EXPECT_DOUBLE_EQ(2.0, GetPixelAspectRatio(visible_rect, gfx::Size(640, 240)));
+ EXPECT_DOUBLE_EQ(0.5, GetPixelAspectRatio(visible_rect, gfx::Size(320, 480)));
+}
+
+TEST_F(VideoUtilTest, GetNaturalSize_Double) {
+ gfx::Rect visible_rect(320, 240);
+
+ // Test 0 sizes.
+ EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(gfx::Rect(0, 0), 1.0));
+ EXPECT_EQ(gfx::Size(0, 1), GetNaturalSize(gfx::Rect(0, 1), 1.0));
+ EXPECT_EQ(gfx::Size(1, 0), GetNaturalSize(gfx::Rect(1, 0), 1.0));
+
+ // Test abnormal ratios.
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_rect, NAN));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_rect, 0.0));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_rect, INFINITY));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_rect, -INFINITY));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_rect, -1.0));
+
+ // Test normal sizes and ratios.
+ EXPECT_EQ(gfx::Size(320, 240), GetNaturalSize(visible_rect, 1.0 / 1.0));
+ EXPECT_EQ(gfx::Size(640, 240), GetNaturalSize(visible_rect, 2.0 / 1.0));
+ EXPECT_EQ(gfx::Size(320, 480), GetNaturalSize(visible_rect, 1.0 / 2.0));
+ EXPECT_EQ(gfx::Size(427, 240), GetNaturalSize(visible_rect, 4.0 / 3.0));
+ EXPECT_EQ(gfx::Size(320, 320), GetNaturalSize(visible_rect, 3.0 / 4.0));
+ EXPECT_EQ(gfx::Size(569, 240), GetNaturalSize(visible_rect, 16.0 / 9.0));
+ EXPECT_EQ(gfx::Size(320, 427), GetNaturalSize(visible_rect, 9.0 / 16.0));
+
+ // Test some random ratios.
+ EXPECT_EQ(gfx::Size(495, 240), GetNaturalSize(visible_rect, 17.0 / 11.0));
+ EXPECT_EQ(gfx::Size(320, 371), GetNaturalSize(visible_rect, 11.0 / 17.0));
+}
+
+TEST_F(VideoUtilTest, GetNaturalSize_Fraction) {
gfx::Size visible_size(320, 240);
// Test 0 sizes.
@@ -199,11 +249,11 @@ TEST_F(VideoUtilTest, GetNaturalSize) {
EXPECT_EQ(gfx::Size(1, 0), GetNaturalSize(gfx::Size(1, 0), 1, 1));
// Test abnormal ratios.
- EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(visible_size, 0, 0));
- EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(visible_size, 0, 1));
- EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(visible_size, 1, 0));
- EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(visible_size, 1, -1));
- EXPECT_EQ(gfx::Size(0, 0), GetNaturalSize(visible_size, -1, 1));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_size, 0, 0));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_size, 0, 1));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_size, 1, 0));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_size, 1, -1));
+ EXPECT_EQ(gfx::Size(), GetNaturalSize(visible_size, -1, 1));
// Test normal sizes and ratios.
EXPECT_EQ(gfx::Size(320, 240), GetNaturalSize(visible_size, 1, 1));
diff --git a/chromium/media/base/watch_time_keys.cc b/chromium/media/base/watch_time_keys.cc
index 824388333a8..90f301cb55e 100644
--- a/chromium/media/base/watch_time_keys.cc
+++ b/chromium/media/base/watch_time_keys.cc
@@ -75,38 +75,15 @@ static const char kWatchTimeAudioVideoBackgroundAc[] =
static const char kWatchTimeAudioVideoBackgroundEmbeddedExperience[] =
"Media.WatchTime.AudioVideo.Background.EmbeddedExperience";
-static const char kWatchTimeVideoAll[] = "Media.WatchTime.Video.All";
-static const char kWatchTimeVideoMse[] = "Media.WatchTime.Video.MSE";
-static const char kWatchTimeVideoEme[] = "Media.WatchTime.Video.EME";
-static const char kWatchTimeVideoSrc[] = "Media.WatchTime.Video.SRC";
-static const char kWatchTimeVideoBattery[] = "Media.WatchTime.Video.Battery";
-static const char kWatchTimeVideoAc[] = "Media.WatchTime.Video.AC";
-static const char kWatchTimeVideoDisplayFullscreen[] =
- "Media.WatchTime.Video.DisplayFullscreen";
-static const char kWatchTimeVideoDisplayInline[] =
- "Media.WatchTime.Video.DisplayInline";
-static const char kWatchTimeVideoDisplayPictureInPicture[] =
- "Media.WatchTime.Video.DisplayPictureInPicture";
-static const char kWatchTimeVideoEmbeddedExperience[] =
- "Media.WatchTime.Video.EmbeddedExperience";
-static const char kWatchTimeVideoNativeControlsOn[] =
- "Media.WatchTime.Video.NativeControlsOn";
-static const char kWatchTimeVideoNativeControlsOff[] =
- "Media.WatchTime.Video.NativeControlsOff";
-static const char kWatchTimeVideoBackgroundAll[] =
- "Media.WatchTime.Video.Background.All";
-static const char kWatchTimeVideoBackgroundMse[] =
- "Media.WatchTime.Video.Background.MSE";
-static const char kWatchTimeVideoBackgroundEme[] =
- "Media.WatchTime.Video.Background.EME";
-static const char kWatchTimeVideoBackgroundSrc[] =
- "Media.WatchTime.Video.Background.SRC";
-static const char kWatchTimeVideoBackgroundBattery[] =
- "Media.WatchTime.Video.Background.Battery";
-static const char kWatchTimeVideoBackgroundAc[] =
- "Media.WatchTime.Video.Background.AC";
-static const char kWatchTimeVideoBackgroundEmbeddedExperience[] =
- "Media.WatchTime.Video.Background.EmbeddedExperience";
+// Audio+video muted watch time metrics.
+static const char kWatchTimeAudioVideoMutedAll[] =
+ "Media.WatchTime.AudioVideo.Muted.All";
+static const char kWatchTimeAudioVideoMutedMse[] =
+ "Media.WatchTime.AudioVideo.Muted.MSE";
+static const char kWatchTimeAudioVideoMutedEme[] =
+ "Media.WatchTime.AudioVideo.Muted.EME";
+static const char kWatchTimeAudioVideoMutedSrc[] =
+ "Media.WatchTime.AudioVideo.Muted.SRC";
const char kWatchTimeUnderflowCount[] = "UnderflowCount";
@@ -146,7 +123,8 @@ const char kDiscardedWatchTimeAudioVideoMse[] =
const char kDiscardedWatchTimeAudioVideoEme[] =
"Media.WatchTime.AudioVideo.Discarded.EME";
-base::StringPiece WatchTimeKeyToString(WatchTimeKey key) {
+base::StringPiece ConvertWatchTimeKeyToStringForUma(WatchTimeKey key) {
+ // WARNING: Returning a non-empty value will log the key to UMA.
switch (key) {
case WatchTimeKey::kAudioAll:
return kWatchTimeAudioAll;
@@ -218,44 +196,47 @@ base::StringPiece WatchTimeKeyToString(WatchTimeKey key) {
return kWatchTimeAudioVideoBackgroundAc;
case WatchTimeKey::kAudioVideoBackgroundEmbeddedExperience:
return kWatchTimeAudioVideoBackgroundEmbeddedExperience;
+ case WatchTimeKey::kAudioVideoMutedAll:
+ return kWatchTimeAudioVideoMutedAll;
+ case WatchTimeKey::kAudioVideoMutedMse:
+ return kWatchTimeAudioVideoMutedMse;
+ case WatchTimeKey::kAudioVideoMutedEme:
+ return kWatchTimeAudioVideoMutedEme;
+ case WatchTimeKey::kAudioVideoMutedSrc:
+ return kWatchTimeAudioVideoMutedSrc;
+ // WARNING: Returning a non-empty value will log the key to UMA.
+
+ // The following keys are not reported to UMA and thus have no conversion.
+ // We don't report keys to UMA that we don't have a strong use case for
+ // since UMA requires us to break out each state manually (ac, inline, etc).
+ case WatchTimeKey::kAudioVideoMutedBattery:
+ case WatchTimeKey::kAudioVideoMutedAc:
+ case WatchTimeKey::kAudioVideoMutedEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoMutedDisplayFullscreen:
+ case WatchTimeKey::kAudioVideoMutedDisplayInline:
+ case WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOn:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOff:
case WatchTimeKey::kVideoAll:
- return kWatchTimeVideoAll;
case WatchTimeKey::kVideoMse:
- return kWatchTimeVideoMse;
case WatchTimeKey::kVideoEme:
- return kWatchTimeVideoEme;
case WatchTimeKey::kVideoSrc:
- return kWatchTimeVideoSrc;
case WatchTimeKey::kVideoBattery:
- return kWatchTimeVideoBattery;
case WatchTimeKey::kVideoAc:
- return kWatchTimeVideoAc;
case WatchTimeKey::kVideoDisplayFullscreen:
- return kWatchTimeVideoDisplayFullscreen;
case WatchTimeKey::kVideoDisplayInline:
- return kWatchTimeVideoDisplayInline;
case WatchTimeKey::kVideoDisplayPictureInPicture:
- return kWatchTimeVideoDisplayPictureInPicture;
case WatchTimeKey::kVideoEmbeddedExperience:
- return kWatchTimeVideoEmbeddedExperience;
case WatchTimeKey::kVideoNativeControlsOn:
- return kWatchTimeVideoNativeControlsOn;
case WatchTimeKey::kVideoNativeControlsOff:
- return kWatchTimeVideoNativeControlsOff;
case WatchTimeKey::kVideoBackgroundAll:
- return kWatchTimeVideoBackgroundAll;
case WatchTimeKey::kVideoBackgroundMse:
- return kWatchTimeVideoBackgroundMse;
case WatchTimeKey::kVideoBackgroundEme:
- return kWatchTimeVideoBackgroundEme;
case WatchTimeKey::kVideoBackgroundSrc:
- return kWatchTimeVideoBackgroundSrc;
case WatchTimeKey::kVideoBackgroundBattery:
- return kWatchTimeVideoBackgroundBattery;
case WatchTimeKey::kVideoBackgroundAc:
- return kWatchTimeVideoBackgroundAc;
case WatchTimeKey::kVideoBackgroundEmbeddedExperience:
- return kWatchTimeVideoBackgroundEmbeddedExperience;
+ return base::StringPiece();
};
NOTREACHED();
diff --git a/chromium/media/base/watch_time_keys.h b/chromium/media/base/watch_time_keys.h
index 655fca20509..4007bcc9c15 100644
--- a/chromium/media/base/watch_time_keys.h
+++ b/chromium/media/base/watch_time_keys.h
@@ -47,6 +47,18 @@ enum class WatchTimeKey : int {
kAudioVideoBackgroundBattery,
kAudioVideoBackgroundAc,
kAudioVideoBackgroundEmbeddedExperience,
+ kAudioVideoMutedAll,
+ kAudioVideoMutedMse,
+ kAudioVideoMutedEme,
+ kAudioVideoMutedSrc,
+ kAudioVideoMutedBattery,
+ kAudioVideoMutedAc,
+ kAudioVideoMutedEmbeddedExperience,
+ kAudioVideoMutedDisplayFullscreen,
+ kAudioVideoMutedDisplayInline,
+ kAudioVideoMutedDisplayPictureInPicture,
+ kAudioVideoMutedNativeControlsOn,
+ kAudioVideoMutedNativeControlsOff,
kVideoAll,
kVideoMse,
kVideoEme,
@@ -98,7 +110,10 @@ MEDIA_EXPORT extern const char kDiscardedWatchTimeAudioVideoSrc[];
MEDIA_EXPORT extern const char kDiscardedWatchTimeAudioVideoMse[];
MEDIA_EXPORT extern const char kDiscardedWatchTimeAudioVideoEme[];
-MEDIA_EXPORT base::StringPiece WatchTimeKeyToString(WatchTimeKey key);
+// Returns the UMA key name associated with a given WatchTimeKey or an empty
+// string if they key should not be logged to UMA.
+MEDIA_EXPORT base::StringPiece ConvertWatchTimeKeyToStringForUma(
+ WatchTimeKey key);
} // namespace media
diff --git a/chromium/media/blink/BUILD.gn b/chromium/media/blink/BUILD.gn
index 3d215fd1d84..ce6a5393312 100644
--- a/chromium/media/blink/BUILD.gn
+++ b/chromium/media/blink/BUILD.gn
@@ -32,6 +32,8 @@ component("blink") {
"multibuffer_reader.h",
"new_session_cdm_result_promise.cc",
"new_session_cdm_result_promise.h",
+ "remote_playback_client_wrapper_impl.cc",
+ "remote_playback_client_wrapper_impl.h",
"resource_fetch_context.h",
"resource_multibuffer_data_provider.cc",
"resource_multibuffer_data_provider.h",
@@ -75,7 +77,6 @@ component("blink") {
deps = [
"//base",
"//cc",
- "//cc/blink",
"//gpu",
"//media",
"//media:shared_memory_support",
@@ -110,7 +111,6 @@ test("media_blink_unittests") {
"//base",
"//base/test:test_support",
"//cc",
- "//cc/blink",
"//gin",
"//media:test_support",
"//media/mojo/interfaces",
@@ -168,5 +168,8 @@ test("media_blink_unittests") {
} else {
deps += [ "//v8:v8_external_startup_data_assets" ]
}
+
+ # KeySystemConfigSelectorTest fails on Android (crbug.com/608541).
+ sources -= [ "key_system_config_selector_unittest.cc" ]
}
}
diff --git a/chromium/media/blink/DEPS b/chromium/media/blink/DEPS
index 0a96ffcf497..ac48ebad4be 100644
--- a/chromium/media/blink/DEPS
+++ b/chromium/media/blink/DEPS
@@ -1,5 +1,5 @@
include_rules = [
- "+cc/blink/web_layer_impl.h",
+ "+cc/layers/layer.h",
"+cc/layers/video_frame_provider.h",
"+cc/layers/video_layer.h",
"+components/scheduler", # Only allowed in tests.
diff --git a/chromium/media/blink/key_system_config_selector.cc b/chromium/media/blink/key_system_config_selector.cc
index 33bcbb3228c..127c1bb714f 100644
--- a/chromium/media/blink/key_system_config_selector.cc
+++ b/chromium/media/blink/key_system_config_selector.cc
@@ -20,7 +20,6 @@
#include "media/blink/webmediaplayer_util.h"
#include "third_party/blink/public/platform/url_conversion.h"
#include "third_party/blink/public/platform/web_media_key_system_configuration.h"
-#include "third_party/blink/public/platform/web_security_origin.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/public/platform/web_vector.h"
#include "url/gurl.h"
@@ -155,7 +154,6 @@ struct KeySystemConfigSelector::SelectionRequest {
std::string key_system;
blink::WebVector<blink::WebMediaKeySystemConfiguration>
candidate_configurations;
- blink::WebSecurityOrigin security_origin;
base::Callback<void(const blink::WebMediaKeySystemConfiguration&,
const CdmConfig&)> succeeded_cb;
base::Closure not_supported_cb;
@@ -352,8 +350,10 @@ bool KeySystemConfigSelector::IsSupportedContentType(
SplitCodecsToVector(codecs, &stripped_codec_vector, true);
EmeConfigRule codecs_rule = key_systems_->GetContentTypeConfigRule(
key_system, media_type, container_lower, stripped_codec_vector);
- if (!config_state->IsRuleSupported(codecs_rule))
+ if (!config_state->IsRuleSupported(codecs_rule)) {
+ DVLOG(3) << "Container mime type and codecs are not supported by CDM";
return false;
+ }
config_state->AddRule(codecs_rule);
return true;
@@ -849,7 +849,6 @@ void KeySystemConfigSelector::SelectConfig(
const blink::WebString& key_system,
const blink::WebVector<blink::WebMediaKeySystemConfiguration>&
candidate_configurations,
- const blink::WebSecurityOrigin& security_origin,
base::Callback<void(const blink::WebMediaKeySystemConfiguration&,
const CdmConfig&)> succeeded_cb,
base::Closure not_supported_cb) {
@@ -897,7 +896,6 @@ void KeySystemConfigSelector::SelectConfig(
std::unique_ptr<SelectionRequest> request(new SelectionRequest());
request->key_system = key_system_ascii;
request->candidate_configurations = candidate_configurations;
- request->security_origin = security_origin;
request->succeeded_cb = succeeded_cb;
request->not_supported_cb = not_supported_cb;
SelectConfigInternal(std::move(request));
@@ -936,16 +934,11 @@ void KeySystemConfigSelector::SelectConfigInternal(
<< "permission was denied.";
continue;
}
- {
- // Note: the GURL must not be constructed inline because
- // base::Passed(&request) sets |request| to null.
- GURL security_origin(url::Origin(request->security_origin).GetURL());
- DVLOG(3) << "Request permission.";
- media_permission_->RequestPermission(
- MediaPermission::PROTECTED_MEDIA_IDENTIFIER,
- base::Bind(&KeySystemConfigSelector::OnPermissionResult,
- weak_factory_.GetWeakPtr(), base::Passed(&request)));
- }
+ DVLOG(3) << "Request permission.";
+ media_permission_->RequestPermission(
+ MediaPermission::PROTECTED_MEDIA_IDENTIFIER,
+ base::Bind(&KeySystemConfigSelector::OnPermissionResult,
+ weak_factory_.GetWeakPtr(), base::Passed(&request)));
return;
case CONFIGURATION_SUPPORTED:
cdm_config.allow_distinctive_identifier =
diff --git a/chromium/media/blink/key_system_config_selector.h b/chromium/media/blink/key_system_config_selector.h
index e305412fa13..83bc6680635 100644
--- a/chromium/media/blink/key_system_config_selector.h
+++ b/chromium/media/blink/key_system_config_selector.h
@@ -21,7 +21,6 @@ namespace blink {
struct WebMediaKeySystemConfiguration;
struct WebMediaKeySystemMediaCapability;
-class WebSecurityOrigin;
class WebString;
} // namespace blink
@@ -43,7 +42,6 @@ class MEDIA_BLINK_EXPORT KeySystemConfigSelector {
const blink::WebString& key_system,
const blink::WebVector<blink::WebMediaKeySystemConfiguration>&
candidate_configurations,
- const blink::WebSecurityOrigin& security_origin,
base::Callback<void(const blink::WebMediaKeySystemConfiguration&,
const CdmConfig&)> succeeded_cb,
base::Closure not_supported_cb);
diff --git a/chromium/media/blink/key_system_config_selector_unittest.cc b/chromium/media/blink/key_system_config_selector_unittest.cc
index d99fb89bdca..25d2be09bf9 100644
--- a/chromium/media/blink/key_system_config_selector_unittest.cc
+++ b/chromium/media/blink/key_system_config_selector_unittest.cc
@@ -7,7 +7,6 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "media/base/eme_constants.h"
#include "media/base/key_systems.h"
@@ -16,7 +15,6 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_encrypted_media_types.h"
#include "third_party/blink/public/platform/web_media_key_system_configuration.h"
-#include "third_party/blink/public/platform/web_security_origin.h"
#include "third_party/blink/public/platform/web_string.h"
#include "url/gurl.h"
@@ -43,8 +41,6 @@ const char kUnsupportedCodec[] = "foo";
const char kUnsupportedCodecs[] = "vp8,foo";
const char kSupportedVideoCodecs[] = "vp8,vp8";
-const char kDefaultSecurityOrigin[] = "https://example.com/";
-
const char kClearKey[] = "org.w3.clearkey";
// The IDL for MediaKeySystemConfiguration specifies some defaults, so
@@ -107,6 +103,14 @@ class FakeKeySystems : public KeySystems {
return false;
}
+ bool IsEncryptionSchemeSupported(
+ const std::string& key_system,
+ EncryptionMode encryption_scheme) const override {
+ // TODO(crbug.com/658026): Implement this once value passed from blink.
+ NOTREACHED();
+ return false;
+ }
+
// TODO(sandersd): Secure codec simulation.
EmeConfigRule GetContentTypeConfigRule(
const std::string& key_system,
@@ -228,7 +232,7 @@ class KeySystemConfigSelectorTest : public testing::Test {
succeeded_count_ = 0;
not_supported_count_ = 0;
KeySystemConfigSelector(key_systems_.get(), media_permission_.get())
- .SelectConfig(key_system_, configs_, security_origin_,
+ .SelectConfig(key_system_, configs_,
base::Bind(&KeySystemConfigSelectorTest::OnSucceeded,
base::Unretained(this)),
base::Bind(&KeySystemConfigSelectorTest::OnNotSupported,
@@ -281,8 +285,6 @@ class KeySystemConfigSelectorTest : public testing::Test {
// Held values for the call to SelectConfig().
blink::WebString key_system_ = blink::WebString::FromUTF8(kSupported);
std::vector<blink::WebMediaKeySystemConfiguration> configs_;
- blink::WebSecurityOrigin security_origin_ =
- blink::WebSecurityOrigin::CreateFromString(kDefaultSecurityOrigin);
// Holds the last successful accumulated configuration.
blink::WebMediaKeySystemConfiguration config_;
diff --git a/chromium/media/blink/mock_webassociatedurlloader.h b/chromium/media/blink/mock_webassociatedurlloader.h
index fccb7b3006f..6b7c320eb83 100644
--- a/chromium/media/blink/mock_webassociatedurlloader.h
+++ b/chromium/media/blink/mock_webassociatedurlloader.h
@@ -15,7 +15,7 @@ namespace media {
class MockWebAssociatedURLLoader : public blink::WebAssociatedURLLoader {
public:
MockWebAssociatedURLLoader();
- virtual ~MockWebAssociatedURLLoader();
+ ~MockWebAssociatedURLLoader() override;
MOCK_METHOD2(LoadAsynchronously,
void(const blink::WebURLRequest& request,
diff --git a/chromium/media/blink/multibuffer_data_source.cc b/chromium/media/blink/multibuffer_data_source.cc
index bb36c6cd1ab..6932acd8349 100644
--- a/chromium/media/blink/multibuffer_data_source.cc
+++ b/chromium/media/blink/multibuffer_data_source.cc
@@ -357,8 +357,17 @@ void MultibufferDataSource::OnBufferingHaveEnough(bool always_cancel) {
if (reader_ && (always_cancel || (preload_ == METADATA &&
!media_has_played_ && !IsStreaming()))) {
cancel_on_defer_ = true;
- if (!loading_)
- SetReader(nullptr);
+ if (!loading_) {
+ base::AutoLock auto_lock(lock_);
+ if (read_op_) {
+ // We can't destroy the reader if a read operation is pending.
+ // UpdateLoadingState_Locked will take care of it after the
+ // operation is done.
+ return;
+ }
+ // Already locked, no need to use SetReader().
+ reader_.reset(nullptr);
+ }
}
}
diff --git a/chromium/media/blink/multibuffer_data_source_unittest.cc b/chromium/media/blink/multibuffer_data_source_unittest.cc
index d8975fa4b2a..89793c796b4 100644
--- a/chromium/media/blink/multibuffer_data_source_unittest.cc
+++ b/chromium/media/blink/multibuffer_data_source_unittest.cc
@@ -7,7 +7,6 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "media/base/media_log.h"
@@ -161,7 +160,7 @@ class TestUrlIndex : public UrlIndex {
class MockBufferedDataSourceHost : public BufferedDataSourceHost {
public:
MockBufferedDataSourceHost() = default;
- virtual ~MockBufferedDataSourceHost() = default;
+ ~MockBufferedDataSourceHost() override = default;
MOCK_METHOD1(SetTotalBytes, void(int64_t total_bytes));
MOCK_METHOD2(AddBufferedByteRange, void(int64_t start, int64_t end));
@@ -226,8 +225,8 @@ class MultibufferDataSourceTest : public testing::Test {
size_t file_size = kFileSize) {
GURL gurl(url);
data_source_.reset(new MockMultibufferDataSource(
- message_loop_.task_runner(), url_index_->GetByUrl(gurl, cors_mode),
- &host_));
+ base::ThreadTaskRunnerHandle::Get(),
+ url_index_->GetByUrl(gurl, cors_mode), &host_));
data_source_->SetPreload(preload_);
response_generator_.reset(new TestResponseGenerator(gurl, file_size));
@@ -320,7 +319,7 @@ class MultibufferDataSourceTest : public testing::Test {
void FinishLoading() {
EXPECT_TRUE(active_loader());
- data_provider()->DidFinishLoading(0);
+ data_provider()->DidFinishLoading();
base::RunLoop().RunUntilIdle();
}
@@ -449,7 +448,6 @@ class MultibufferDataSourceTest : public testing::Test {
}
protected:
- base::MessageLoop message_loop_;
MultibufferDataSource::Preload preload_;
NiceMock<MockResourceFetchContext> fetch_context_;
std::unique_ptr<TestUrlIndex> url_index_;
@@ -989,7 +987,7 @@ TEST_F(MultibufferDataSourceTest, Http_ShareData) {
StrictMock<MockBufferedDataSourceHost> host2;
MockMultibufferDataSource source2(
- message_loop_.task_runner(),
+ base::ThreadTaskRunnerHandle::Get(),
url_index_->GetByUrl(GURL(kHttpUrl), UrlData::CORS_UNSPECIFIED), &host2);
source2.SetPreload(preload_);
@@ -1253,6 +1251,41 @@ TEST_F(MultibufferDataSourceTest,
EXPECT_FALSE(active_loader_allownull());
}
+// This test tries to trigger an edge case where the read callback
+// never happens because the reader is deleted before that happens.
+TEST_F(MultibufferDataSourceTest,
+ ExternalResource_Response206_CancelAfterDefer3) {
+ set_preload(MultibufferDataSource::METADATA);
+ InitializeWith206Response();
+
+ EXPECT_EQ(MultibufferDataSource::METADATA, preload());
+ EXPECT_FALSE(is_local_source());
+
+ EXPECT_TRUE(data_source_->range_supported());
+ CheckReadThenDefer();
+
+ ReadAt(kDataSize);
+ ASSERT_TRUE(active_loader());
+
+ EXPECT_CALL(*this, ReadCallback(kDataSize));
+ EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 2));
+ ReceiveData(kDataSize);
+
+ EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3));
+ ReceiveData(kDataSize);
+
+ EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 4));
+ ReceiveData(kDataSize);
+ EXPECT_EQ(data_source_->downloading(), false);
+ data_source_->Read(kDataSize * 10, kDataSize, buffer_,
+ base::Bind(&MultibufferDataSourceTest::ReadCallback,
+ base::Unretained(this)));
+ data_source_->OnBufferingHaveEnough(false);
+ EXPECT_TRUE(active_loader_allownull());
+ EXPECT_CALL(*this, ReadCallback(-1));
+ Stop();
+}
+
TEST_F(MultibufferDataSourceTest,
ExternalResource_Response206_CancelAfterPlay) {
set_preload(MultibufferDataSource::METADATA);
@@ -1297,7 +1330,7 @@ TEST_F(MultibufferDataSourceTest,
TEST_F(MultibufferDataSourceTest, SeekPastEOF) {
GURL gurl(kHttpUrl);
data_source_.reset(new MockMultibufferDataSource(
- message_loop_.task_runner(),
+ base::ThreadTaskRunnerHandle::Get(),
url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED), &host_));
data_source_->SetPreload(preload_);
@@ -1604,7 +1637,7 @@ TEST_F(MultibufferDataSourceTest, CheckBufferSizeAfterReadingALot) {
TEST_F(MultibufferDataSourceTest, Http_CheckLoadingTransition) {
GURL gurl(kHttpUrl);
data_source_.reset(new MockMultibufferDataSource(
- message_loop_.task_runner(),
+ base::ThreadTaskRunnerHandle::Get(),
url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED), &host_));
data_source_->SetPreload(preload_);
@@ -1626,7 +1659,7 @@ TEST_F(MultibufferDataSourceTest, Http_CheckLoadingTransition) {
EXPECT_CALL(host_, AddBufferedByteRange(kDataSize, kDataSize + 1));
ReceiveDataLow(1);
EXPECT_CALL(host_, AddBufferedByteRange(0, kDataSize * 3));
- data_provider()->DidFinishLoading(0);
+ data_provider()->DidFinishLoading();
EXPECT_CALL(*this, ReadCallback(1));
data_source_->Read(kDataSize, 2, buffer_,
diff --git a/chromium/media/blink/multibuffer_unittest.cc b/chromium/media/blink/multibuffer_unittest.cc
index 9ecce0cffcc..715adf11f3a 100644
--- a/chromium/media/blink/multibuffer_unittest.cc
+++ b/chromium/media/blink/multibuffer_unittest.cc
@@ -11,7 +11,6 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
-#include "base/message_loop/message_loop.h"
#include "base/test/simple_test_tick_clock.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/base/test_random.h"
@@ -254,9 +253,6 @@ class MultiBufferTest : public testing::Test {
scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
scoped_refptr<MultiBuffer::GlobalLRU> lru_;
TestMultiBuffer multibuffer_;
-
- // TODO(hubbe): Make MultiBufferReader take a task_runner_
- base::MessageLoop message_loop_;
};
TEST_F(MultiBufferTest, ReadAll) {
diff --git a/chromium/media/blink/remote_playback_client_wrapper_impl.cc b/chromium/media/blink/remote_playback_client_wrapper_impl.cc
new file mode 100644
index 00000000000..66d9950553e
--- /dev/null
+++ b/chromium/media/blink/remote_playback_client_wrapper_impl.cc
@@ -0,0 +1,30 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/blink/remote_playback_client_wrapper_impl.h"
+
+#include "third_party/blink/public/platform/modules/remoteplayback/web_remote_playback_client.h"
+#include "third_party/blink/public/platform/web_media_player_client.h"
+#include "third_party/blink/public/platform/web_string.h"
+
+namespace media {
+
+RemotePlaybackClientWrapperImpl::RemotePlaybackClientWrapperImpl(
+ blink::WebMediaPlayerClient* client)
+ : remote_playback_client_(client->RemotePlaybackClient()) {}
+
+RemotePlaybackClientWrapperImpl::~RemotePlaybackClientWrapperImpl() = default;
+
+std::string RemotePlaybackClientWrapperImpl::GetActivePresentationId() {
+ if (!remote_playback_client_)
+ return std::string();
+
+ // The presentation ID is essentially a GUID preceeded by the "mr_" prefix,
+ // which makes it ASCII compatible.
+ // If MediaRouterBase::CreatePresentationId() were changed, this line might
+ // need to be updated.
+ return remote_playback_client_->GetPresentationId().Ascii();
+}
+
+} // namespace media
diff --git a/chromium/media/blink/remote_playback_client_wrapper_impl.h b/chromium/media/blink/remote_playback_client_wrapper_impl.h
new file mode 100644
index 00000000000..ef453aba3f8
--- /dev/null
+++ b/chromium/media/blink/remote_playback_client_wrapper_impl.h
@@ -0,0 +1,36 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BLINK_REMOTE_PLAYBACK_CLIENT_WRAPPER_IMPL_H_
+#define MEDIA_BLINK_REMOTE_PLAYBACK_CLIENT_WRAPPER_IMPL_H_
+
+#include <string>
+
+#include "media/blink/media_blink_export.h"
+#include "media/renderers/remote_playback_client_wrapper.h"
+
+namespace blink {
+class WebMediaPlayerClient;
+class WebRemotePlaybackClient;
+} // namespace blink
+
+namespace media {
+
+// Wraps a WebRemotePlaybackClient to expose only the methods used by the
+// FlingingRendererClientFactory. This avoids dependencies on the blink layer.
+class MEDIA_BLINK_EXPORT RemotePlaybackClientWrapperImpl
+ : public RemotePlaybackClientWrapper {
+ public:
+ explicit RemotePlaybackClientWrapperImpl(blink::WebMediaPlayerClient* client);
+ ~RemotePlaybackClientWrapperImpl() override;
+
+ std::string GetActivePresentationId() override;
+
+ private:
+ blink::WebRemotePlaybackClient* remote_playback_client_;
+};
+
+} // namespace media
+
+#endif // MEDIA_BLINK_REMOTE_PLAYBACK_CLIENT_WRAPPER_IMPL_H_
diff --git a/chromium/media/blink/resource_multibuffer_data_provider.cc b/chromium/media/blink/resource_multibuffer_data_provider.cc
index 244a5fd6401..98ca8d3c347 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider.cc
+++ b/chromium/media/blink/resource_multibuffer_data_provider.cc
@@ -439,7 +439,7 @@ void ResourceMultiBufferDataProvider::DidReceiveCachedMetadata(
NOTIMPLEMENTED();
}
-void ResourceMultiBufferDataProvider::DidFinishLoading(double finishTime) {
+void ResourceMultiBufferDataProvider::DidFinishLoading() {
DVLOG(1) << "didFinishLoading";
DCHECK(active_loader_.get());
DCHECK(!Available());
diff --git a/chromium/media/blink/resource_multibuffer_data_provider.h b/chromium/media/blink/resource_multibuffer_data_provider.h
index 03eeef76ae0..1260e70e24d 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider.h
+++ b/chromium/media/blink/resource_multibuffer_data_provider.h
@@ -58,7 +58,7 @@ class MEDIA_BLINK_EXPORT ResourceMultiBufferDataProvider
void DidDownloadData(int data_length) override;
void DidReceiveData(const char* data, int data_length) override;
void DidReceiveCachedMetadata(const char* data, int dataLength) override;
- void DidFinishLoading(double finishTime) override;
+ void DidFinishLoading() override;
void DidFail(const blink::WebURLError&) override;
// Use protected instead of private for testing purposes.
diff --git a/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc b/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
index 98c3ca90110..ac2a64f0412 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
+++ b/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
@@ -12,7 +12,6 @@
#include "base/bind.h"
#include "base/format_macros.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "media/base/media_log.h"
@@ -215,7 +214,6 @@ class ResourceMultiBufferDataProviderTest : public testing::Test {
return url_loader;
}
- base::MessageLoop message_loop_;
GURL gurl_;
int64_t first_position_;
diff --git a/chromium/media/blink/run_all_unittests.cc b/chromium/media/blink/run_all_unittests.cc
index 99e0fb89781..fbdf9140204 100644
--- a/chromium/media/blink/run_all_unittests.cc
+++ b/chromium/media/blink/run_all_unittests.cc
@@ -2,16 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include <stddef.h>
-
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
+#include "base/macros.h"
#include "base/test/launcher/unit_test_launcher.h"
+#include "base/test/scoped_task_environment.h"
#include "base/test/test_suite.h"
#include "build/build_config.h"
#include "media/base/media.h"
#include "services/service_manager/public/cpp/binder_registry.h"
-#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
#include "third_party/blink/public/platform/scheduler/web_main_thread_scheduler.h"
#include "third_party/blink/public/platform/web_thread.h"
@@ -25,68 +23,46 @@
#include "mojo/edk/embedder/embedder.h"
#endif
-#ifdef V8_USE_EXTERNAL_STARTUP_DATA
+#if defined(V8_USE_EXTERNAL_STARTUP_DATA)
#include "gin/v8_initializer.h"
-#endif
-namespace {
-#if defined(V8_USE_EXTERNAL_STARTUP_DATA)
-#if defined(USE_V8_CONTEXT_SNAPSHOT)
constexpr gin::V8Initializer::V8SnapshotFileType kSnapshotType =
+#if defined(USE_V8_CONTEXT_SNAPSHOT)
gin::V8Initializer::V8SnapshotFileType::kWithAdditionalContext;
#else
-constexpr gin::V8Initializer::V8SnapshotFileType kSnapshotType =
gin::V8Initializer::V8SnapshotFileType::kDefault;
-#endif
-#endif
-}
+#endif // defined(USE_V8_CONTEXT_SNAPSHOT)
+#endif // defined(V8_USE_EXTERNAL_STARTUP_DATA)
-class TestBlinkPlatformSupport : public blink::Platform {
+// We must use a custom blink::Platform that ensures the main thread scheduler
+// knows about the ScopedTaskEnvironment.
+class BlinkPlatformWithTaskEnvironment : public blink::Platform {
public:
- TestBlinkPlatformSupport()
+ BlinkPlatformWithTaskEnvironment()
: main_thread_scheduler_(
blink::scheduler::CreateWebMainThreadSchedulerForTests()),
main_thread_(main_thread_scheduler_->CreateMainThread()) {}
- ~TestBlinkPlatformSupport() override;
+ ~BlinkPlatformWithTaskEnvironment() override {
+ main_thread_scheduler_->Shutdown();
+ }
+
+ protected:
blink::WebThread* CurrentThread() override {
- EXPECT_TRUE(main_thread_->IsCurrentThread());
+ CHECK(main_thread_->IsCurrentThread());
return main_thread_.get();
}
private:
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
std::unique_ptr<blink::scheduler::WebMainThreadScheduler>
main_thread_scheduler_;
std::unique_ptr<blink::WebThread> main_thread_;
-};
-TestBlinkPlatformSupport::~TestBlinkPlatformSupport() {
- main_thread_scheduler_->Shutdown();
-}
-
-class BlinkMediaTestSuite : public base::TestSuite {
- public:
- BlinkMediaTestSuite(int argc, char** argv);
- ~BlinkMediaTestSuite() override;
-
- protected:
- void Initialize() override;
-
- private:
- std::unique_ptr<TestBlinkPlatformSupport> blink_platform_support_;
+ DISALLOW_COPY_AND_ASSIGN(BlinkPlatformWithTaskEnvironment);
};
-BlinkMediaTestSuite::BlinkMediaTestSuite(int argc, char** argv)
- : TestSuite(argc, argv),
- blink_platform_support_(new TestBlinkPlatformSupport()) {
-}
-
-BlinkMediaTestSuite::~BlinkMediaTestSuite() = default;
-
-void BlinkMediaTestSuite::Initialize() {
- // Run TestSuite::Initialize first so that logging is initialized.
- base::TestSuite::Initialize();
-
+static int RunTests(base::TestSuite* test_suite) {
#if defined(OS_ANDROID)
if (media::MediaCodecUtil::IsMediaCodecAvailable())
media::EnablePlatformDecoderSupport();
@@ -96,29 +72,26 @@ void BlinkMediaTestSuite::Initialize() {
// present.
media::InitializeMediaLibrary();
-#ifdef V8_USE_EXTERNAL_STARTUP_DATA
+#if defined(V8_USE_EXTERNAL_STARTUP_DATA)
gin::V8Initializer::LoadV8Snapshot(kSnapshotType);
gin::V8Initializer::LoadV8Natives();
#endif
-// Initialize mojo firstly to enable Blink initialization to use it.
#if !defined(OS_IOS)
+ // Initialize mojo firstly to enable Blink initialization to use it.
mojo::edk::Init();
#endif
- // Dummy task runner is initialized here because the blink::initialize creates
- // IsolateHolder which needs the current task runner handle. There should be
- // no task posted to this task runner.
- std::unique_ptr<base::MessageLoop> message_loop;
- if (!base::MessageLoop::current())
- message_loop.reset(new base::MessageLoop());
+
+ BlinkPlatformWithTaskEnvironment platform_;
service_manager::BinderRegistry empty_registry;
- blink::Initialize(blink_platform_support_.get(), &empty_registry);
+ blink::Initialize(&platform_, &empty_registry);
+
+ return test_suite->Run();
}
int main(int argc, char** argv) {
- BlinkMediaTestSuite test_suite(argc, argv);
-
+ base::TestSuite test_suite(argc, argv);
return base::LaunchUnitTests(
- argc, argv, base::Bind(&BlinkMediaTestSuite::Run,
- base::Unretained(&test_suite)));
+ argc, argv,
+ base::BindRepeating(&RunTests, base::Unretained(&test_suite)));
}
diff --git a/chromium/media/blink/texttrack_impl.cc b/chromium/media/blink/texttrack_impl.cc
index bad911b7177..e976c247968 100644
--- a/chromium/media/blink/texttrack_impl.cc
+++ b/chromium/media/blink/texttrack_impl.cc
@@ -27,29 +27,24 @@ TextTrackImpl::TextTrackImpl(
}
TextTrackImpl::~TextTrackImpl() {
- task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&TextTrackImpl::OnRemoveTrack,
- client_,
- base::Passed(&text_track_)));
+ task_runner_->PostTask(FROM_HERE,
+ base::Bind(&TextTrackImpl::OnRemoveTrack, client_,
+ base::Passed(&text_track_)));
}
-void TextTrackImpl::addWebVTTCue(const base::TimeDelta& start,
- const base::TimeDelta& end,
+void TextTrackImpl::addWebVTTCue(base::TimeDelta start,
+ base::TimeDelta end,
const std::string& id,
const std::string& content,
const std::string& settings) {
task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&TextTrackImpl::OnAddCue,
- text_track_.get(),
- start, end,
- id, content, settings));
+ FROM_HERE, base::Bind(&TextTrackImpl::OnAddCue, text_track_.get(), start,
+ end, id, content, settings));
}
void TextTrackImpl::OnAddCue(WebInbandTextTrackImpl* text_track,
- const base::TimeDelta& start,
- const base::TimeDelta& end,
+ base::TimeDelta start,
+ base::TimeDelta end,
const std::string& id,
const std::string& content,
const std::string& settings) {
diff --git a/chromium/media/blink/texttrack_impl.h b/chromium/media/blink/texttrack_impl.h
index cd90394b3bd..f83c13edeac 100644
--- a/chromium/media/blink/texttrack_impl.h
+++ b/chromium/media/blink/texttrack_impl.h
@@ -32,16 +32,16 @@ class TextTrackImpl : public TextTrack {
~TextTrackImpl() override;
- void addWebVTTCue(const base::TimeDelta& start,
- const base::TimeDelta& end,
+ void addWebVTTCue(base::TimeDelta start,
+ base::TimeDelta end,
const std::string& id,
const std::string& content,
const std::string& settings) override;
private:
static void OnAddCue(WebInbandTextTrackImpl* text_track,
- const base::TimeDelta& start,
- const base::TimeDelta& end,
+ base::TimeDelta start,
+ base::TimeDelta end,
const std::string& id,
const std::string& content,
const std::string& settings);
diff --git a/chromium/media/blink/url_index_unittest.cc b/chromium/media/blink/url_index_unittest.cc
index b8b23bfdb4e..9a309cb8d39 100644
--- a/chromium/media/blink/url_index_unittest.cc
+++ b/chromium/media/blink/url_index_unittest.cc
@@ -8,7 +8,6 @@
#include <string>
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/strings/stringprintf.h"
#include "media/blink/url_index.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -27,8 +26,6 @@ class UrlIndexTest : public testing::Test {
return ret;
}
- // TODO(hubbe): Make UrlIndex take a task_runner_
- base::MessageLoop message_loop_;
UrlIndex url_index_;
};
diff --git a/chromium/media/blink/video_decode_stats_reporter.h b/chromium/media/blink/video_decode_stats_reporter.h
index e8fd8937dd9..6ce269076a7 100644
--- a/chromium/media/blink/video_decode_stats_reporter.h
+++ b/chromium/media/blink/video_decode_stats_reporter.h
@@ -7,6 +7,7 @@
#include "base/macros.h"
#include "base/memory/ptr_util.h"
+#include "base/single_thread_task_runner.h"
#include "base/time/default_tick_clock.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
diff --git a/chromium/media/blink/video_decode_stats_reporter_unittest.cc b/chromium/media/blink/video_decode_stats_reporter_unittest.cc
index 313e0ebe472..9b8d7d39161 100644
--- a/chromium/media/blink/video_decode_stats_reporter_unittest.cc
+++ b/chromium/media/blink/video_decode_stats_reporter_unittest.cc
@@ -6,7 +6,8 @@
#include "base/memory/ptr_util.h"
#include "base/memory/ref_counted.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
+#include "base/single_thread_task_runner.h"
#include "base/test/test_mock_time_task_runner.h"
#include "base/time/time.h"
#include "media/base/media_util.h"
@@ -71,7 +72,7 @@ class RecordInterceptor : public mojom::VideoDecodeStatsRecorder {
~RecordInterceptor() override = default;
// Until move-only types work.
- void StartNewRecord(mojom::PredictionFeaturesPtr features) {
+ void StartNewRecord(mojom::PredictionFeaturesPtr features) override {
MockStartNewRecord(features->profile, features->video_size,
features->frames_per_sec);
}
@@ -81,7 +82,7 @@ class RecordInterceptor : public mojom::VideoDecodeStatsRecorder {
const gfx::Size& natural_size,
int frames_per_sec));
- void UpdateRecord(mojom::PredictionTargetsPtr targets) {
+ void UpdateRecord(mojom::PredictionTargetsPtr targets) override {
MockUpdateRecord(targets->frames_decoded, targets->frames_dropped,
targets->frames_decoded_power_efficient);
}
@@ -102,8 +103,10 @@ class VideoDecodeStatsReporterTest : public ::testing::Test {
void SetUp() override {
// Do this first. Lots of pieces depend on the task runner.
+ auto message_loop = base::MessageLoopCurrent::Get();
+ original_task_runner_ = message_loop.task_runner();
task_runner_ = new base::TestMockTimeTaskRunner();
- message_loop_.SetTaskRunner(task_runner_);
+ message_loop.SetTaskRunner(task_runner_);
// Make reporter with default configuration. Connects RecordInterceptor as
// remote mojo VideoDecodeStatsRecorder.
@@ -118,12 +121,11 @@ class VideoDecodeStatsReporterTest : public ::testing::Test {
void TearDown() override {
// Break the IPC connection if reporter still around.
- if (reporter_.get()) {
- reporter_.reset();
- }
+ reporter_.reset();
// Run task runner to have Mojo cleanup interceptor_.
task_runner_->RunUntilIdle();
+ base::MessageLoopCurrent::Get().SetTaskRunner(original_task_runner_);
}
PipelineStatistics MakeAdvancingDecodeStats() {
@@ -344,13 +346,11 @@ class VideoDecodeStatsReporterTest : public ::testing::Test {
// Placed as a class member to avoid static initialization costs.
const gfx::Size kDefaultSize_;
- // Put first so it will be destructed *last*. We must let users of the
- // message loop (e.g. reporter_) destruct before destructing the loop itself.
- base::MessageLoop message_loop_;
-
- // Task runner that allows for manual advancing of time. Instantiated and
- // used by message_loop_ in Setup().
+ // Task runner that allows for manual advancing of time. Instantiated during
+ // Setup(). |original_task_runner_| is a copy of the TaskRunner in place prior
+ // to the start of this test. It's restored after the test completes.
scoped_refptr<base::TestMockTimeTaskRunner> task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> original_task_runner_;
// Points to the interceptor that acts as a VideoDecodeStatsRecorder. The
// object is owned by VideoDecodeStatsRecorderPtr, which is itself owned by
diff --git a/chromium/media/blink/video_frame_compositor.cc b/chromium/media/blink/video_frame_compositor.cc
index 5cd81acb392..b71fbc5ec67 100644
--- a/chromium/media/blink/video_frame_compositor.cc
+++ b/chromium/media/blink/video_frame_compositor.cc
@@ -6,7 +6,6 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
-#include "base/message_loop/message_loop.h"
#include "base/time/default_tick_clock.h"
#include "base/trace_event/auto_open_close_event.h"
#include "base/trace_event/trace_event.h"
@@ -65,11 +64,13 @@ VideoFrameCompositor::~VideoFrameCompositor() {
client_->StopUsingProvider();
}
-void VideoFrameCompositor::EnableSubmission(const viz::FrameSinkId& id,
- media::VideoRotation rotation) {
+void VideoFrameCompositor::EnableSubmission(
+ const viz::FrameSinkId& id,
+ media::VideoRotation rotation,
+ blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback) {
DCHECK(task_runner_->BelongsToCurrentThread());
submitter_->SetRotation(rotation);
- submitter_->StartSubmitting(id);
+ submitter_->EnableSubmission(id, std::move(frame_sink_destroyed_callback));
client_ = submitter_.get();
}
diff --git a/chromium/media/blink/video_frame_compositor.h b/chromium/media/blink/video_frame_compositor.h
index 4772acaf22e..bb2e58c352b 100644
--- a/chromium/media/blink/video_frame_compositor.h
+++ b/chromium/media/blink/video_frame_compositor.h
@@ -80,8 +80,10 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Signals the VideoFrameSubmitter to prepare to receive BeginFrames and
// submit video frames given by VideoFrameCompositor.
- virtual void EnableSubmission(const viz::FrameSinkId& id,
- media::VideoRotation rotation);
+ virtual void EnableSubmission(
+ const viz::FrameSinkId& id,
+ media::VideoRotation rotation,
+ blink::WebFrameSinkDestroyedCallback frame_sink_destroyed_callback);
// cc::VideoFrameProvider implementation. These methods must be called on the
// |task_runner_|.
diff --git a/chromium/media/blink/video_frame_compositor_unittest.cc b/chromium/media/blink/video_frame_compositor_unittest.cc
index ca4f7e893ff..8585ce97874 100644
--- a/chromium/media/blink/video_frame_compositor_unittest.cc
+++ b/chromium/media/blink/video_frame_compositor_unittest.cc
@@ -5,7 +5,6 @@
#include "media/blink/video_frame_compositor.h"
#include "base/bind.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/simple_test_tick_clock.h"
@@ -28,7 +27,8 @@ class MockWebVideoFrameSubmitter : public blink::WebVideoFrameSubmitter {
public:
// blink::WebVideoFrameSubmitter implementation.
void StopUsingProvider() override {}
- MOCK_METHOD1(StartSubmitting, void(const viz::FrameSinkId&));
+ MOCK_METHOD2(EnableSubmission,
+ void(viz::FrameSinkId, blink::WebFrameSinkDestroyedCallback));
MOCK_METHOD0(StartRendering, void());
MOCK_METHOD0(StopRendering, void());
MOCK_METHOD1(Initialize, void(cc::VideoFrameProvider*));
@@ -47,7 +47,7 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
VideoFrameCompositorTest()
: client_(new StrictMock<MockWebVideoFrameSubmitter>()) {}
- void SetUp() {
+ void SetUp() override {
if (IsSurfaceLayerForVideoEnabled()) {
feature_list_.InitFromCommandLine("UseSurfaceLayerForVideo", "");
@@ -60,18 +60,19 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
if (!IsSurfaceLayerForVideoEnabled()) {
compositor_ = std::make_unique<VideoFrameCompositor>(
- message_loop.task_runner(), nullptr);
+ base::ThreadTaskRunnerHandle::Get(), nullptr);
compositor_->SetVideoFrameProviderClient(client_.get());
} else {
EXPECT_CALL(*submitter_, Initialize(_));
compositor_ = std::make_unique<VideoFrameCompositor>(
- message_loop.task_runner(), std::move(client_));
+ base::ThreadTaskRunnerHandle::Get(), std::move(client_));
base::RunLoop().RunUntilIdle();
EXPECT_CALL(*submitter_,
SetRotation(Eq(media::VideoRotation::VIDEO_ROTATION_90)));
- EXPECT_CALL(*submitter_, StartSubmitting(_));
+ EXPECT_CALL(*submitter_, EnableSubmission(Eq(viz::FrameSinkId(1, 1)), _));
compositor_->EnableSubmission(viz::FrameSinkId(1, 1),
- media::VideoRotation::VIDEO_ROTATION_90);
+ media::VideoRotation::VIDEO_ROTATION_90,
+ base::BindRepeating([] {}));
}
compositor_->set_tick_clock_for_testing(&tick_clock_);
@@ -125,7 +126,6 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
compositor()->PutCurrentFrame();
}
- base::MessageLoop message_loop;
base::SimpleTestTickClock tick_clock_;
StrictMock<MockWebVideoFrameSubmitter>* submitter_;
std::unique_ptr<StrictMock<MockWebVideoFrameSubmitter>> client_;
diff --git a/chromium/media/blink/watch_time_reporter.cc b/chromium/media/blink/watch_time_reporter.cc
index 968393cae91..49a4ebf7b24 100644
--- a/chromium/media/blink/watch_time_reporter.cc
+++ b/chromium/media/blink/watch_time_reporter.cc
@@ -22,33 +22,54 @@ WatchTimeReporter::WatchTimeReporter(
mojom::PlaybackPropertiesPtr properties,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner)
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ const base::TickClock* tick_clock)
: WatchTimeReporter(std::move(properties),
false /* is_background */,
+ false /* is_muted */,
std::move(get_media_time_cb),
provider,
- task_runner) {}
+ task_runner,
+ tick_clock) {}
WatchTimeReporter::WatchTimeReporter(
mojom::PlaybackPropertiesPtr properties,
bool is_background,
+ bool is_muted,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner)
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ const base::TickClock* tick_clock)
: properties_(std::move(properties)),
is_background_(is_background),
- get_media_time_cb_(std::move(get_media_time_cb)) {
+ is_muted_(is_muted),
+ get_media_time_cb_(std::move(get_media_time_cb)),
+ reporting_timer_(tick_clock) {
DCHECK(!get_media_time_cb_.is_null());
DCHECK(properties_->has_audio || properties_->has_video);
DCHECK_EQ(is_background, properties_->is_background);
+ // The background reporter receives play/pause events instead of visibility
+ // changes, so it must always be visible to function correctly.
+ if (is_background_)
+ DCHECK(is_visible_);
+
+ // The muted reporter receives play/pause events instead of volume changes, so
+ // its volume must always be audible to function correctly.
+ if (is_muted_)
+ DCHECK_EQ(volume_, 1.0);
+
if (base::PowerMonitor* pm = base::PowerMonitor::Get())
pm->AddObserver(this);
provider->AcquireWatchTimeRecorder(properties_->Clone(),
mojo::MakeRequest(&recorder_));
- if (is_background_ || !ShouldReportWatchTime())
+ reporting_timer_.SetTaskRunner(task_runner);
+
+ // If this is a sub-reporter or we shouldn't report watch time, we're done. We
+ // don't support muted+background reporting currently.
+ if (is_background_ || is_muted_ || !ShouldReportWatchTime())
return;
// Background watch time is reported by creating an background only watch time
@@ -56,15 +77,26 @@ WatchTimeReporter::WatchTimeReporter(
// unnecessary complexity inside the UpdateWatchTime() for handling this case.
auto prop_copy = properties_.Clone();
prop_copy->is_background = true;
- background_reporter_.reset(
- new WatchTimeReporter(std::move(prop_copy), true /* is_background */,
- get_media_time_cb_, provider, task_runner));
+ background_reporter_.reset(new WatchTimeReporter(
+ std::move(prop_copy), true /* is_background */, false /* is_muted */,
+ get_media_time_cb_, provider, task_runner, tick_clock));
- reporting_timer_.SetTaskRunner(task_runner);
+ // Muted watch time is only reported for audio+video playback.
+ if (!properties_->has_video || !properties_->has_audio)
+ return;
+
+ // Similar to the above, muted watch time is reported by creating a muted only
+ // watch time reporter which receives play when muted and pause when audible.
+ prop_copy = properties_.Clone();
+ prop_copy->is_muted = true;
+ muted_reporter_.reset(new WatchTimeReporter(
+ std::move(prop_copy), false /* is_background */, true /* is_muted */,
+ get_media_time_cb_, provider, task_runner, tick_clock));
}
WatchTimeReporter::~WatchTimeReporter() {
background_reporter_.reset();
+ muted_reporter_.reset();
// This is our last chance, so finalize now if there's anything remaining.
MaybeFinalizeWatchTime(FinalizeTime::IMMEDIATELY);
@@ -75,6 +107,8 @@ WatchTimeReporter::~WatchTimeReporter() {
void WatchTimeReporter::OnPlaying() {
if (background_reporter_ && !is_visible_)
background_reporter_->OnPlaying();
+ if (muted_reporter_ && !volume_)
+ muted_reporter_->OnPlaying();
is_playing_ = true;
MaybeStartReportingTimer(get_media_time_cb_.Run());
@@ -83,6 +117,8 @@ void WatchTimeReporter::OnPlaying() {
void WatchTimeReporter::OnPaused() {
if (background_reporter_)
background_reporter_->OnPaused();
+ if (muted_reporter_)
+ muted_reporter_->OnPaused();
is_playing_ = false;
MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
@@ -91,6 +127,8 @@ void WatchTimeReporter::OnPaused() {
void WatchTimeReporter::OnSeeking() {
if (background_reporter_)
background_reporter_->OnSeeking();
+ if (muted_reporter_)
+ muted_reporter_->OnSeeking();
// Seek is a special case that does not have hysteresis, when this is called
// the seek is imminent, so finalize the previous playback immediately.
@@ -101,27 +139,45 @@ void WatchTimeReporter::OnVolumeChange(double volume) {
if (background_reporter_)
background_reporter_->OnVolumeChange(volume);
+ // The muted reporter should never receive volume changes.
+ DCHECK(!is_muted_);
+
const double old_volume = volume_;
volume_ = volume;
// We're only interesting in transitions in and out of the muted state.
- if (!old_volume && volume)
+ if (!old_volume && volume) {
+ if (muted_reporter_)
+ muted_reporter_->OnPaused();
MaybeStartReportingTimer(get_media_time_cb_.Run());
- else if (old_volume && !volume_)
+ } else if (old_volume && !volume_) {
+ if (muted_reporter_ && is_playing_)
+ muted_reporter_->OnPlaying();
MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
+ }
}
void WatchTimeReporter::OnShown() {
+ // The background reporter should never receive visibility changes.
+ DCHECK(!is_background_);
+
if (background_reporter_)
background_reporter_->OnPaused();
+ if (muted_reporter_)
+ muted_reporter_->OnShown();
is_visible_ = true;
MaybeStartReportingTimer(get_media_time_cb_.Run());
}
void WatchTimeReporter::OnHidden() {
+ // The background reporter should never receive visibility changes.
+ DCHECK(!is_background_);
+
if (background_reporter_ && is_playing_)
background_reporter_->OnPlaying();
+ if (muted_reporter_)
+ muted_reporter_->OnHidden();
is_visible_ = false;
MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
@@ -134,11 +190,15 @@ void WatchTimeReporter::OnError(PipelineStatus status) {
recorder_->OnError(status);
if (background_reporter_)
background_reporter_->OnError(status);
+ if (muted_reporter_)
+ muted_reporter_->OnError(status);
}
void WatchTimeReporter::OnUnderflow() {
if (background_reporter_)
background_reporter_->OnUnderflow();
+ if (muted_reporter_)
+ muted_reporter_->OnUnderflow();
if (!reporting_timer_.IsRunning())
return;
@@ -150,6 +210,9 @@ void WatchTimeReporter::OnUnderflow() {
}
void WatchTimeReporter::OnNativeControlsEnabled() {
+ if (muted_reporter_)
+ muted_reporter_->OnNativeControlsEnabled();
+
if (!reporting_timer_.IsRunning()) {
has_native_controls_ = true;
return;
@@ -166,6 +229,9 @@ void WatchTimeReporter::OnNativeControlsEnabled() {
}
void WatchTimeReporter::OnNativeControlsDisabled() {
+ if (muted_reporter_)
+ muted_reporter_->OnNativeControlsDisabled();
+
if (!reporting_timer_.IsRunning()) {
has_native_controls_ = false;
return;
@@ -198,6 +264,8 @@ void WatchTimeReporter::SetAudioDecoderName(const std::string& name) {
recorder_->SetAudioDecoderName(name);
if (background_reporter_)
background_reporter_->SetAudioDecoderName(name);
+ if (muted_reporter_)
+ muted_reporter_->SetAudioDecoderName(name);
}
void WatchTimeReporter::SetVideoDecoderName(const std::string& name) {
@@ -205,12 +273,16 @@ void WatchTimeReporter::SetVideoDecoderName(const std::string& name) {
recorder_->SetVideoDecoderName(name);
if (background_reporter_)
background_reporter_->SetVideoDecoderName(name);
+ if (muted_reporter_)
+ muted_reporter_->SetVideoDecoderName(name);
}
void WatchTimeReporter::SetAutoplayInitiated(bool autoplay_initiated) {
recorder_->SetAutoplayInitiated(autoplay_initiated);
if (background_reporter_)
background_reporter_->SetAutoplayInitiated(autoplay_initiated);
+ if (muted_reporter_)
+ muted_reporter_->SetAutoplayInitiated(autoplay_initiated);
}
void WatchTimeReporter::OnPowerStateChange(bool on_battery_power) {
@@ -248,6 +320,9 @@ void WatchTimeReporter::MaybeStartReportingTimer(
// Don't start the timer if any of our state indicates we shouldn't; this
// check is important since the various event handlers do not have to care
// about the state of other events.
+ //
+ // TODO(dalecurtis): We should only consider |volume_| when there is actually
+ // an audio track; requires updating lots of tests to fix.
if (!ShouldReportWatchTime() || !is_playing_ || !volume_ || !is_visible_) {
// If we reach this point the timer should already have been stopped or
// there is a pending finalize in flight.
@@ -329,8 +404,10 @@ void WatchTimeReporter::UpdateWatchTime() {
do { \
recorder_->RecordWatchTime( \
(properties_->has_video && properties_->has_audio) \
- ? (is_background_ ? WatchTimeKey::kAudioVideoBackground##key \
- : WatchTimeKey::kAudioVideo##key) \
+ ? (is_background_ \
+ ? WatchTimeKey::kAudioVideoBackground##key \
+ : (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key)) \
: properties_->has_video \
? (is_background_ ? WatchTimeKey::kVideoBackground##key \
: WatchTimeKey::kVideo##key) \
@@ -390,7 +467,8 @@ void WatchTimeReporter::UpdateWatchTime() {
DCHECK(!is_background_); \
recorder_->RecordWatchTime( \
(properties_->has_video && properties_->has_audio) \
- ? WatchTimeKey::kAudioVideo##key \
+ ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key) \
: properties_->has_audio ? WatchTimeKey::kAudio##key \
: WatchTimeKey::kVideo##key, \
value); \
@@ -415,14 +493,16 @@ void WatchTimeReporter::UpdateWatchTime() {
}
// Similar to RECORD_WATCH_TIME but ignores background and audio watch time.
-#define RECORD_DISPLAY_WATCH_TIME(key, value) \
- do { \
- DCHECK(properties_->has_video); \
- DCHECK(!is_background_); \
- recorder_->RecordWatchTime(properties_->has_audio \
- ? WatchTimeKey::kAudioVideo##key \
- : WatchTimeKey::kVideo##key, \
- value); \
+#define RECORD_DISPLAY_WATCH_TIME(key, value) \
+ do { \
+ DCHECK(properties_->has_video); \
+ DCHECK(!is_background_); \
+ recorder_->RecordWatchTime( \
+ properties_->has_audio \
+ ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
+ : WatchTimeKey::kAudioVideo##key) \
+ : WatchTimeKey::kVideo##key, \
+ value); \
} while (0)
// Similar to the block above for display type.
@@ -488,7 +568,9 @@ void WatchTimeReporter::UpdateWatchTime() {
WatchTimeKey::kAudioBackgroundAc, WatchTimeKey::kAudioVideoBattery,
WatchTimeKey::kAudioVideoAc,
WatchTimeKey::kAudioVideoBackgroundBattery,
- WatchTimeKey::kAudioVideoBackgroundAc, WatchTimeKey::kVideoBattery,
+ WatchTimeKey::kAudioVideoBackgroundAc,
+ WatchTimeKey::kAudioVideoMutedBattery,
+ WatchTimeKey::kAudioVideoMutedAc, WatchTimeKey::kVideoBattery,
WatchTimeKey::kVideoAc, WatchTimeKey::kVideoBackgroundAc,
WatchTimeKey::kVideoBackgroundBattery});
}
@@ -499,18 +581,24 @@ void WatchTimeReporter::UpdateWatchTime() {
WatchTimeKey::kAudioNativeControlsOff,
WatchTimeKey::kAudioVideoNativeControlsOn,
WatchTimeKey::kAudioVideoNativeControlsOff,
+ WatchTimeKey::kAudioVideoMutedNativeControlsOn,
+ WatchTimeKey::kAudioVideoMutedNativeControlsOff,
WatchTimeKey::kVideoNativeControlsOn,
WatchTimeKey::kVideoNativeControlsOff});
}
if (is_display_type_change_pending) {
- keys_to_finalize.insert(keys_to_finalize.end(),
- {WatchTimeKey::kAudioVideoDisplayFullscreen,
- WatchTimeKey::kAudioVideoDisplayInline,
- WatchTimeKey::kAudioVideoDisplayPictureInPicture,
- WatchTimeKey::kVideoDisplayFullscreen,
- WatchTimeKey::kVideoDisplayInline,
- WatchTimeKey::kVideoDisplayPictureInPicture});
+ keys_to_finalize.insert(
+ keys_to_finalize.end(),
+ {WatchTimeKey::kAudioVideoDisplayFullscreen,
+ WatchTimeKey::kAudioVideoDisplayInline,
+ WatchTimeKey::kAudioVideoDisplayPictureInPicture,
+ WatchTimeKey::kAudioVideoMutedDisplayFullscreen,
+ WatchTimeKey::kAudioVideoMutedDisplayInline,
+ WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture,
+ WatchTimeKey::kVideoDisplayFullscreen,
+ WatchTimeKey::kVideoDisplayInline,
+ WatchTimeKey::kVideoDisplayPictureInPicture});
}
if (!keys_to_finalize.empty())
@@ -550,6 +638,9 @@ void WatchTimeReporter::UpdateWatchTime() {
void WatchTimeReporter::OnDisplayTypeChanged(
blink::WebMediaPlayer::DisplayType display_type) {
+ if (muted_reporter_)
+ muted_reporter_->OnDisplayTypeChanged(display_type);
+
display_type_ = display_type;
if (!reporting_timer_.IsRunning())
diff --git a/chromium/media/blink/watch_time_reporter.h b/chromium/media/blink/watch_time_reporter.h
index 40d8644731c..76c508e4c7b 100644
--- a/chromium/media/blink/watch_time_reporter.h
+++ b/chromium/media/blink/watch_time_reporter.h
@@ -9,6 +9,7 @@
#include "base/callback.h"
#include "base/power_monitor/power_observer.h"
+#include "base/sequenced_task_runner.h"
#include "base/time/time.h"
#include "base/timer/timer.h"
#include "media/base/audio_codecs.h"
@@ -43,6 +44,10 @@ namespace media {
// collection starts. As with other events, there is hysteresis on change
// between the foreground and background.
//
+// Similarly, there are both muted and unmuted buckets for watch time. E.g., if
+// a playback is muted the unmuted collection stops and muted collection starts.
+// As with other events, there is hysteresis between mute and unmute.
+//
// Power events (on/off battery power), native controls changes, or display type
// changes have a similar hysteresis, but unlike the aforementioned properties,
// will not stop metric collection.
@@ -75,7 +80,8 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner);
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ const base::TickClock* tick_clock = nullptr);
~WatchTimeReporter() override;
// These methods are used to ensure that watch time is only reported for media
@@ -154,9 +160,11 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
// Internal constructor for marking background status.
WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
bool is_background,
+ bool is_muted,
GetMediaTimeCB get_media_time_cb,
mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner);
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ const base::TickClock* tick_clock);
// base::PowerObserver implementation.
//
@@ -175,6 +183,7 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
// Initialized during construction.
const mojom::PlaybackPropertiesPtr properties_;
const bool is_background_;
+ const bool is_muted_;
const GetMediaTimeCB get_media_time_cb_;
mojom::WatchTimeRecorderPtr recorder_;
@@ -228,6 +237,10 @@ class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
// as an audio only WatchTimeReporter with |is_background_| set to true.
std::unique_ptr<WatchTimeReporter> background_reporter_;
+ // Similar to the above, but for muted audio+video watch time. Configured as
+ // an audio+video WatchTimeReporter with |is_muted_| set to true.
+ std::unique_ptr<WatchTimeReporter> muted_reporter_;
+
DISALLOW_COPY_AND_ASSIGN(WatchTimeReporter);
};
diff --git a/chromium/media/blink/watch_time_reporter_unittest.cc b/chromium/media/blink/watch_time_reporter_unittest.cc
index 0330a10cdf4..d194003a1c6 100644
--- a/chromium/media/blink/watch_time_reporter_unittest.cc
+++ b/chromium/media/blink/watch_time_reporter_unittest.cc
@@ -6,9 +6,10 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
-#include "base/debug/stack_trace.h"
+#include "base/message_loop/message_loop_current.h"
#include "base/run_loop.h"
-#include "base/test/test_message_loop.h"
+#include "base/single_thread_task_runner.h"
+#include "base/test/test_mock_time_task_runner.h"
#include "media/base/mock_media_log.h"
#include "media/base/watch_time_keys.h"
#include "media/blink/watch_time_reporter.h"
@@ -36,6 +37,15 @@ using blink::WebMediaPlayer;
.RetiresOnSaturation(); \
} while (0)
+#define EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(key, value) \
+ do { \
+ if (!has_video_ || !has_audio_) \
+ break; \
+ EXPECT_CALL(*this, \
+ OnWatchTimeUpdate(WatchTimeKey::kAudioVideoMuted##key, value)) \
+ .RetiresOnSaturation(); \
+ } while (0)
+
#define EXPECT_WATCH_TIME_IF_VIDEO(key, value) \
do { \
if (!has_video_) \
@@ -66,17 +76,17 @@ using blink::WebMediaPlayer;
// finalize event is expected to finalize.
#define EXPECT_POWER_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnPowerWatchTimeFinalized()) \
- .Times(12) \
+ .Times(14) \
.RetiresOnSaturation();
#define EXPECT_CONTROLS_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnControlsWatchTimeFinalized()) \
- .Times(6) \
+ .Times(8) \
.RetiresOnSaturation();
#define EXPECT_DISPLAY_WATCH_TIME_FINALIZED() \
EXPECT_CALL(*this, OnDisplayWatchTimeFinalized()) \
- .Times(6) \
+ .Times(9) \
.RetiresOnSaturation();
using WatchTimeReporterTestData = std::tuple<bool, bool>;
@@ -106,6 +116,8 @@ class WatchTimeReporterTest
case WatchTimeKey::kAudioBackgroundAc:
case WatchTimeKey::kAudioVideoBattery:
case WatchTimeKey::kAudioVideoAc:
+ case WatchTimeKey::kAudioVideoMutedBattery:
+ case WatchTimeKey::kAudioVideoMutedAc:
case WatchTimeKey::kAudioVideoBackgroundBattery:
case WatchTimeKey::kAudioVideoBackgroundAc:
case WatchTimeKey::kVideoBattery:
@@ -119,6 +131,8 @@ class WatchTimeReporterTest
case WatchTimeKey::kAudioNativeControlsOff:
case WatchTimeKey::kAudioVideoNativeControlsOn:
case WatchTimeKey::kAudioVideoNativeControlsOff:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOn:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOff:
case WatchTimeKey::kVideoNativeControlsOn:
case WatchTimeKey::kVideoNativeControlsOff:
parent_->OnControlsWatchTimeFinalized();
@@ -127,6 +141,9 @@ class WatchTimeReporterTest
case WatchTimeKey::kAudioVideoDisplayFullscreen:
case WatchTimeKey::kAudioVideoDisplayInline:
case WatchTimeKey::kAudioVideoDisplayPictureInPicture:
+ case WatchTimeKey::kAudioVideoMutedDisplayFullscreen:
+ case WatchTimeKey::kAudioVideoMutedDisplayInline:
+ case WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture:
case WatchTimeKey::kVideoDisplayFullscreen:
case WatchTimeKey::kVideoDisplayInline:
case WatchTimeKey::kVideoDisplayPictureInPicture:
@@ -148,6 +165,11 @@ class WatchTimeReporterTest
case WatchTimeKey::kAudioVideoEme:
case WatchTimeKey::kAudioVideoSrc:
case WatchTimeKey::kAudioVideoEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoMutedAll:
+ case WatchTimeKey::kAudioVideoMutedMse:
+ case WatchTimeKey::kAudioVideoMutedEme:
+ case WatchTimeKey::kAudioVideoMutedSrc:
+ case WatchTimeKey::kAudioVideoMutedEmbeddedExperience:
case WatchTimeKey::kAudioVideoBackgroundAll:
case WatchTimeKey::kAudioVideoBackgroundMse:
case WatchTimeKey::kAudioVideoBackgroundEme:
@@ -228,8 +250,19 @@ class WatchTimeReporterTest
WatchTimeReporterTest()
: has_video_(std::get<0>(GetParam())),
has_audio_(std::get<1>(GetParam())),
- fake_metrics_provider_(this) {}
- ~WatchTimeReporterTest() override = default;
+ fake_metrics_provider_(this) {
+ // Do this first. Lots of pieces depend on the task runner.
+ auto message_loop = base::MessageLoopCurrent::Get();
+ original_task_runner_ = message_loop.task_runner();
+ task_runner_ = new base::TestMockTimeTaskRunner();
+ message_loop.SetTaskRunner(task_runner_);
+ }
+
+ ~WatchTimeReporterTest() override {
+ CycleReportingTimer();
+ task_runner_->RunUntilIdle();
+ base::MessageLoopCurrent::Get().SetTaskRunner(original_task_runner_);
+ }
protected:
void Initialize(bool is_mse,
@@ -239,25 +272,19 @@ class WatchTimeReporterTest
EXPECT_WATCH_TIME_FINALIZED();
wtr_.reset(new WatchTimeReporter(
- mojom::PlaybackProperties::New(kUnknownAudioCodec, kUnknownVideoCodec,
- has_audio_, has_video_, false, is_mse,
- is_encrypted, false, initial_video_size),
+ mojom::PlaybackProperties::New(
+ kUnknownAudioCodec, kUnknownVideoCodec, has_audio_, has_video_,
+ false, false, is_mse, is_encrypted, false, initial_video_size),
base::BindRepeating(&WatchTimeReporterTest::GetCurrentMediaTime,
base::Unretained(this)),
&fake_metrics_provider_,
- blink::scheduler::GetSequencedTaskRunnerForTesting()));
-
- // Setup the reporting interval to be immediate to avoid spinning real time
- // within the unit test.
- wtr_->reporting_interval_ = base::TimeDelta();
- if (wtr_->background_reporter_)
- wtr_->background_reporter_->reporting_interval_ = base::TimeDelta();
+ blink::scheduler::GetSequencedTaskRunnerForTesting(),
+ task_runner_->GetMockTickClock()));
+ reporting_interval_ = wtr_->reporting_interval_;
}
void CycleReportingTimer() {
- base::RunLoop run_loop;
- message_loop_.task_runner()->PostTask(FROM_HERE, run_loop.QuitClosure());
- run_loop.Run();
+ task_runner_->FastForwardBy(reporting_interval_);
}
bool IsMonitoring() const { return wtr_->reporting_timer_.IsRunning(); }
@@ -266,6 +293,13 @@ class WatchTimeReporterTest
return wtr_->background_reporter_->reporting_timer_.IsRunning();
}
+ bool IsMutedMonitoring() const {
+ return wtr_->muted_reporter_ &&
+ wtr_->muted_reporter_->reporting_timer_.IsRunning();
+ }
+
+ void DisableMutedReporting() { wtr_->muted_reporter_.reset(); }
+
// We call directly into the reporter for this instead of using an actual
// PowerMonitorTestSource since that results in a posted tasks which interfere
// with our ability to test the timer.
@@ -277,6 +311,8 @@ class WatchTimeReporterTest
wtr_->OnPowerStateChange(on_battery_power);
if (wtr_->background_reporter_)
wtr_->background_reporter_->OnPowerStateChange(on_battery_power);
+ if (wtr_->muted_reporter_)
+ wtr_->muted_reporter_->OnPowerStateChange(on_battery_power);
}
void OnNativeControlsEnabled(bool enabled) {
@@ -348,8 +384,9 @@ class WatchTimeReporterTest
void RunHysteresisTest(HysteresisTestCallback test_callback_func) {
Initialize(false, false, kSizeJustRight);
- // Disable background reporting for the hysteresis tests.
+ // Disable nested reporters for the hysteresis tests.
wtr_->background_reporter_.reset();
+ wtr_->muted_reporter_.reset();
if (TestFlags & kStartWithNativeControls)
OnNativeControlsEnabled(true);
@@ -544,9 +581,16 @@ class WatchTimeReporterTest
const bool has_video_;
const bool has_audio_;
+
+ // Task runner that allows for manual advancing of time. Instantiated during
+ // construction. |original_task_runner_| is a copy of the TaskRunner in place
+ // prior to the start of this test. It's restored after the test completes.
+ scoped_refptr<base::TestMockTimeTaskRunner> task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> original_task_runner_;
+
FakeMediaMetricsProvider fake_metrics_provider_;
- base::TestMessageLoop message_loop_;
std::unique_ptr<WatchTimeReporter> wtr_;
+ base::TimeDelta reporting_interval_;
private:
DISALLOW_COPY_AND_ASSIGN(WatchTimeReporterTest);
@@ -582,7 +626,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporter) {
wtr_->OnPlaying();
EXPECT_TRUE(IsMonitoring());
- EXPECT_CALL(*this, OnError(PIPELINE_ERROR_DECODE)).Times(2);
+ EXPECT_CALL(*this, OnError(PIPELINE_ERROR_DECODE))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->OnError(PIPELINE_ERROR_DECODE);
Initialize(true, true, gfx::Size());
@@ -640,12 +685,23 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterUnderflow) {
constexpr base::TimeDelta kWatchTimeFirst = base::TimeDelta::FromSeconds(5);
constexpr base::TimeDelta kWatchTimeEarly = base::TimeDelta::FromSeconds(10);
constexpr base::TimeDelta kWatchTimeLate = base::TimeDelta::FromSeconds(15);
- EXPECT_CALL(*this, GetCurrentMediaTime())
- .WillOnce(testing::Return(base::TimeDelta()))
- .WillOnce(testing::Return(kWatchTimeFirst))
- .WillOnce(testing::Return(kWatchTimeEarly))
- .WillOnce(testing::Return(kWatchTimeEarly))
- .WillRepeatedly(testing::Return(kWatchTimeLate));
+ if (has_audio_ && has_video_) {
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTimeFirst))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly)) // Extra 2 for muted.
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillRepeatedly(testing::Return(kWatchTimeLate));
+ } else {
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTimeFirst))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillRepeatedly(testing::Return(kWatchTimeLate));
+ }
Initialize(true, true, kSizeJustRight);
wtr_->OnPlaying();
EXPECT_TRUE(IsMonitoring());
@@ -673,9 +729,24 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterUnderflow) {
EXPECT_WATCH_TIME(Mse, kWatchTimeEarly);
EXPECT_WATCH_TIME(NativeControlsOff, kWatchTimeEarly);
EXPECT_WATCH_TIME_IF_VIDEO(DisplayInline, kWatchTimeEarly);
- EXPECT_CALL(*this, OnUnderflowUpdate(1));
EXPECT_WATCH_TIME_FINALIZED();
+
+ // Since we're using a mute event above, we'll have some muted watch time.
+ const base::TimeDelta kWatchTime = kWatchTimeLate - kWatchTimeEarly;
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime);
+
+ EXPECT_CALL(*this, OnUnderflowUpdate(1))
+ .Times((has_audio_ && has_video_) ? 2 : 1);
CycleReportingTimer();
+
+ // Muted watch time shouldn't finalize until destruction.
+ if (has_audio_ && has_video_)
+ EXPECT_WATCH_TIME_FINALIZED();
wtr_.reset();
}
@@ -683,16 +754,18 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterDecoderNames) {
Initialize(true, true, kSizeJustRight);
// Setup the initial decoder names; these should be sent immediately as soon
- // they're called. Each should be called twice, once for foreground and once
- // for background reporting.
+ // they're called. Each should be called thrice, once for foreground, once for
+ // background, and once for muted reporting.
const std::string kAudioDecoderName = "FirstAudioDecoder";
const std::string kVideoDecoderName = "FirstVideoDecoder";
if (has_audio_) {
- EXPECT_CALL(*this, OnSetAudioDecoderName(kAudioDecoderName)).Times(2);
+ EXPECT_CALL(*this, OnSetAudioDecoderName(kAudioDecoderName))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->SetAudioDecoderName(kAudioDecoderName);
}
if (has_video_) {
- EXPECT_CALL(*this, OnSetVideoDecoderName(kVideoDecoderName)).Times(2);
+ EXPECT_CALL(*this, OnSetVideoDecoderName(kVideoDecoderName))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->SetVideoDecoderName(kVideoDecoderName);
}
}
@@ -700,7 +773,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterDecoderNames) {
TEST_P(WatchTimeReporterTest, WatchTimeReporterAutoplayInitiated) {
Initialize(true, true, kSizeJustRight);
- EXPECT_CALL(*this, OnSetAutoplayInitiated(true)).Times(2);
+ EXPECT_CALL(*this, OnSetAutoplayInitiated(true))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->SetAutoplayInitiated(true);
}
@@ -724,8 +798,10 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterShownHidden) {
EXPECT_BACKGROUND_WATCH_TIME(Mse, kExpectedWatchTime);
EXPECT_WATCH_TIME_FINALIZED();
- // One call for the background reporter and one for the foreground.
- EXPECT_CALL(*this, OnError(PIPELINE_ERROR_DECODE)).Times(2);
+ // One call for the background, one for the foreground, and one for the muted
+ // reporter if we have audio+video.
+ EXPECT_CALL(*this, OnError(PIPELINE_ERROR_DECODE))
+ .Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->OnError(PIPELINE_ERROR_DECODE);
const base::TimeDelta kExpectedForegroundWatchTime = kWatchTimeEarly;
@@ -752,6 +828,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterBackgroundHysteresis) {
.WillOnce(testing::Return(kWatchTimeEarly)) // 1x for timer cycle.
.WillRepeatedly(testing::Return(kWatchTimeLate));
Initialize(true, true, kSizeJustRight);
+ DisableMutedReporting(); // Just complicates this test.
+
wtr_->OnHidden();
wtr_->OnPlaying();
EXPECT_TRUE(IsBackgroundMonitoring());
@@ -790,6 +868,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterShownHiddenBackground) {
.WillRepeatedly(testing::Return(kWatchTimeLate));
Initialize(true, true, kSizeJustRight);
+ DisableMutedReporting(); // Just complicates this test.
+
wtr_->OnHidden();
wtr_->OnPlaying();
EXPECT_TRUE(IsBackgroundMonitoring());
@@ -801,10 +881,6 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterShownHiddenBackground) {
EXPECT_BACKGROUND_WATCH_TIME(Eme, kWatchTimeEarly);
EXPECT_BACKGROUND_WATCH_TIME(Mse, kWatchTimeEarly);
EXPECT_WATCH_TIME_FINALIZED();
- CycleReportingTimer();
-
- EXPECT_FALSE(IsBackgroundMonitoring());
- EXPECT_TRUE(IsMonitoring());
const base::TimeDelta kExpectedForegroundWatchTime =
kWatchTimeLate - kWatchTimeEarly;
@@ -814,6 +890,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterShownHiddenBackground) {
EXPECT_WATCH_TIME(Mse, kExpectedForegroundWatchTime);
EXPECT_WATCH_TIME(NativeControlsOff, kExpectedForegroundWatchTime);
EXPECT_WATCH_TIME_IF_VIDEO(DisplayInline, kExpectedForegroundWatchTime);
+ CycleReportingTimer();
+
EXPECT_WATCH_TIME_FINALIZED();
wtr_.reset();
}
@@ -973,6 +1051,64 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterHiddenDisplayTypeBackground) {
wtr_.reset();
}
+TEST_P(WatchTimeReporterTest, WatchTimeReporterHiddenMuted) {
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
+
+ // Expectations for when muted watch time is recorded and when it isn't.
+ if (has_audio_ && has_video_) {
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta())) // 2x playing.
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTime1)) // 2x muted.
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillOnce(testing::Return(kWatchTime1)) // 2x shown.
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillRepeatedly(testing::Return(kWatchTime2));
+ } else {
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta())) // 2x playing.
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTime1)) // 1x muted.
+ .WillOnce(testing::Return(kWatchTime1)) // 1x shown.
+ .WillRepeatedly(testing::Return(kWatchTime2));
+ }
+
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnHidden();
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsBackgroundMonitoring());
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnVolumeChange(0);
+ EXPECT_TRUE(IsBackgroundMonitoring());
+ EXPECT_FALSE(IsMutedMonitoring());
+
+ EXPECT_BACKGROUND_WATCH_TIME(Ac, kWatchTime1);
+ EXPECT_BACKGROUND_WATCH_TIME(All, kWatchTime1);
+ EXPECT_BACKGROUND_WATCH_TIME(Eme, kWatchTime1);
+ EXPECT_BACKGROUND_WATCH_TIME(Mse, kWatchTime1);
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ wtr_->OnShown();
+ EXPECT_FALSE(IsBackgroundMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ EXPECT_EQ(has_audio_ && has_video_, IsMutedMonitoring());
+
+ const base::TimeDelta kWatchTime = kWatchTime2 - kWatchTime1;
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime);
+ if (has_audio_ && has_video_)
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_.reset();
+}
+
TEST_P(WatchTimeReporterTest, WatchTimeReporterMultiplePartialFinalize) {
constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(16);
@@ -1516,6 +1652,271 @@ TEST_P(WatchTimeReporterTest, HysteresisPartialExitStillFinalizes) {
}
}
+class MutedWatchTimeReporterTest : public WatchTimeReporterTest {};
+
+TEST_P(MutedWatchTimeReporterTest, MutedHysteresis) {
+ constexpr base::TimeDelta kWatchTimeEarly = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTimeLate = base::TimeDelta::FromSeconds(10);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta())) // 2x for playing
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTimeEarly)) // 3x for unmute.
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly)) // 2x for mute
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly)) // 1x for timer cycle.
+ .WillRepeatedly(testing::Return(kWatchTimeLate));
+ Initialize(true, true, kSizeJustRight);
+
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnVolumeChange(1);
+ wtr_->OnVolumeChange(0);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTimeEarly);
+
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_TRUE(IsMonitoring());
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTimeLate);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTimeLate);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTimeLate);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTimeLate);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTimeLate);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTimeLate);
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MuteUnmute) {
+ constexpr base::TimeDelta kWatchTimeEarly = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTimeLate = base::TimeDelta::FromSeconds(10);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillOnce(testing::Return(kWatchTimeEarly))
+ .WillRepeatedly(testing::Return(kWatchTimeLate));
+
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnVolumeChange(1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTimeEarly);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTimeEarly);
+ EXPECT_WATCH_TIME_FINALIZED();
+
+ const base::TimeDelta kExpectedUnmutedWatchTime =
+ kWatchTimeLate - kWatchTimeEarly;
+ EXPECT_WATCH_TIME(Ac, kExpectedUnmutedWatchTime);
+ EXPECT_WATCH_TIME(All, kExpectedUnmutedWatchTime);
+ EXPECT_WATCH_TIME(Eme, kExpectedUnmutedWatchTime);
+ EXPECT_WATCH_TIME(Mse, kExpectedUnmutedWatchTime);
+ EXPECT_WATCH_TIME(NativeControlsOff, kExpectedUnmutedWatchTime);
+ EXPECT_WATCH_TIME_IF_VIDEO(DisplayInline, kExpectedUnmutedWatchTime);
+ CycleReportingTimer();
+
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MutedPaused) {
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(8);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillRepeatedly(testing::Return(kWatchTime));
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ wtr_->OnPaused();
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime);
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MutedSeeked) {
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(8);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillRepeatedly(testing::Return(kWatchTime));
+ Initialize(false, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Src, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime);
+ EXPECT_WATCH_TIME_FINALIZED();
+ wtr_->OnSeeking();
+
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MutedPower) {
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(16);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillRepeatedly(testing::Return(kWatchTime2));
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ OnPowerStateChange(true);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime1);
+ EXPECT_POWER_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ wtr_->OnPaused();
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Battery, kWatchTime2 - kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime2);
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MutedControls) {
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(16);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillRepeatedly(testing::Return(kWatchTime2));
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ OnNativeControlsEnabled(true);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime1);
+ EXPECT_CONTROLS_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ wtr_->OnPaused();
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOn,
+ kWatchTime2 - kWatchTime1);
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ wtr_.reset();
+}
+
+TEST_P(MutedWatchTimeReporterTest, MutedDisplayType) {
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(8);
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(16);
+ EXPECT_CALL(*this, GetCurrentMediaTime())
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(base::TimeDelta()))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillOnce(testing::Return(kWatchTime1))
+ .WillRepeatedly(testing::Return(kWatchTime2));
+ Initialize(true, true, kSizeJustRight);
+ wtr_->OnVolumeChange(0);
+ wtr_->OnPlaying();
+ EXPECT_TRUE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+
+ OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayInline, kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime1);
+ EXPECT_DISPLAY_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ wtr_->OnPaused();
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Mse, kWatchTime2);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(DisplayFullscreen,
+ kWatchTime2 - kWatchTime1);
+ EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(NativeControlsOff, kWatchTime2);
+ EXPECT_WATCH_TIME_FINALIZED();
+ CycleReportingTimer();
+
+ EXPECT_FALSE(IsMutedMonitoring());
+ EXPECT_FALSE(IsMonitoring());
+ wtr_.reset();
+}
+
INSTANTIATE_TEST_CASE_P(WatchTimeReporterTest,
WatchTimeReporterTest,
testing::ValuesIn({// has_video, has_audio
@@ -1525,4 +1926,12 @@ INSTANTIATE_TEST_CASE_P(WatchTimeReporterTest,
// has_video
std::make_tuple(false, true)}));
+// Separate test set since muted tests only work with audio+video.
+INSTANTIATE_TEST_CASE_P(MutedWatchTimeReporterTest,
+ MutedWatchTimeReporterTest,
+ testing::ValuesIn({
+ // has_video, has_audio
+ std::make_tuple(true, true),
+ }));
+
} // namespace media
diff --git a/chromium/media/blink/webaudiosourceprovider_impl.cc b/chromium/media/blink/webaudiosourceprovider_impl.cc
index 69587af4f0f..644e9d51c07 100644
--- a/chromium/media/blink/webaudiosourceprovider_impl.cc
+++ b/chromium/media/blink/webaudiosourceprovider_impl.cc
@@ -4,6 +4,8 @@
#include "media/blink/webaudiosourceprovider_impl.h"
+#include <atomic>
+#include <utility>
#include <vector>
#include "base/bind.h"
@@ -34,8 +36,7 @@ namespace {
class AutoTryLock {
public:
explicit AutoTryLock(base::Lock& lock)
- : lock_(lock),
- acquired_(lock_.Try()) {}
+ : lock_(lock), acquired_(lock_.Try()) {}
bool locked() const { return acquired_; }
@@ -60,7 +61,7 @@ class AutoTryLock {
class WebAudioSourceProviderImpl::TeeFilter
: public AudioRendererSink::RenderCallback {
public:
- TeeFilter() : renderer_(nullptr), channels_(0), sample_rate_(0) {}
+ TeeFilter() : copy_required_(false) {}
~TeeFilter() override = default;
void Initialize(AudioRendererSink::RenderCallback* renderer,
@@ -81,18 +82,25 @@ class WebAudioSourceProviderImpl::TeeFilter
AudioBus* dest) override;
void OnRenderError() override;
- bool IsInitialized() const { return !!renderer_; }
+ bool initialized() const { return !!renderer_; }
int channels() const { return channels_; }
int sample_rate() const { return sample_rate_; }
- void set_copy_audio_bus_callback(const CopyAudioCB& callback) {
- copy_audio_bus_callback_ = callback;
+
+ void SetCopyAudioCallback(CopyAudioCB callback) {
+ copy_required_ = !callback.is_null();
+ base::AutoLock auto_lock(copy_lock_);
+ copy_audio_bus_callback_ = std::move(callback);
}
private:
- AudioRendererSink::RenderCallback* renderer_;
- int channels_;
- int sample_rate_;
-
+ AudioRendererSink::RenderCallback* renderer_ = nullptr;
+ int channels_ = 0;
+ int sample_rate_ = 0;
+
+ // The vast majority of the time we're operating in passthrough mode. So only
+ // acquire a lock to read |copy_audio_bus_callback_| when necessary.
+ std::atomic<bool> copy_required_;
+ base::Lock copy_lock_;
CopyAudioCB copy_audio_bus_callback_;
DISALLOW_COPY_AND_ASSIGN(TeeFilter);
@@ -134,7 +142,7 @@ void WebAudioSourceProviderImpl::SetClient(
// |client_| the current format info. Otherwise |set_format_cb_| will get
// called when Initialize() is called. Note: Always using |set_format_cb_|
// ensures we have the same locking order when calling into |client_|.
- if (tee_filter_->IsInitialized())
+ if (tee_filter_->initialized())
base::ResetAndReturn(&set_format_cb_).Run();
return;
}
@@ -269,29 +277,21 @@ bool WebAudioSourceProviderImpl::CurrentThreadIsRenderingThread() {
void WebAudioSourceProviderImpl::SwitchOutputDevice(
const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) {
base::AutoLock auto_lock(sink_lock_);
if (client_ || !sink_)
callback.Run(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL);
else
- sink_->SwitchOutputDevice(device_id, security_origin, callback);
+ sink_->SwitchOutputDevice(device_id, callback);
}
-void WebAudioSourceProviderImpl::SetCopyAudioCallback(
- const CopyAudioCB& callback) {
+void WebAudioSourceProviderImpl::SetCopyAudioCallback(CopyAudioCB callback) {
DCHECK(!callback.is_null());
-
- // Use |sink_lock_| to protect |tee_filter_| too since they go in lockstep.
- base::AutoLock auto_lock(sink_lock_);
-
- DCHECK(tee_filter_);
- tee_filter_->set_copy_audio_bus_callback(callback);
+ tee_filter_->SetCopyAudioCallback(std::move(callback));
}
void WebAudioSourceProviderImpl::ClearCopyAudioCallback() {
- DCHECK(tee_filter_);
- tee_filter_->set_copy_audio_bus_callback(CopyAudioCB());
+ tee_filter_->SetCopyAudioCallback(CopyAudioCB());
}
int WebAudioSourceProviderImpl::RenderForTesting(AudioBus* audio_bus) {
@@ -319,26 +319,30 @@ int WebAudioSourceProviderImpl::TeeFilter::Render(
base::TimeTicks delay_timestamp,
int prior_frames_skipped,
AudioBus* audio_bus) {
- DCHECK(IsInitialized());
+ DCHECK(initialized());
const int num_rendered_frames = renderer_->Render(
delay, delay_timestamp, prior_frames_skipped, audio_bus);
- if (!copy_audio_bus_callback_.is_null()) {
- const int64_t frames_delayed =
- AudioTimestampHelper::TimeToFrames(delay, sample_rate_);
- std::unique_ptr<AudioBus> bus_copy =
- AudioBus::Create(audio_bus->channels(), audio_bus->frames());
- audio_bus->CopyTo(bus_copy.get());
- copy_audio_bus_callback_.Run(std::move(bus_copy), frames_delayed,
- sample_rate_);
+ // Avoid taking the copy lock for the vast majority of cases.
+ if (copy_required_) {
+ base::AutoLock auto_lock(copy_lock_);
+ if (!copy_audio_bus_callback_.is_null()) {
+ const int64_t frames_delayed =
+ AudioTimestampHelper::TimeToFrames(delay, sample_rate_);
+ std::unique_ptr<AudioBus> bus_copy =
+ AudioBus::Create(audio_bus->channels(), audio_bus->frames());
+ audio_bus->CopyTo(bus_copy.get());
+ copy_audio_bus_callback_.Run(std::move(bus_copy), frames_delayed,
+ sample_rate_);
+ }
}
return num_rendered_frames;
}
void WebAudioSourceProviderImpl::TeeFilter::OnRenderError() {
- DCHECK(IsInitialized());
+ DCHECK(initialized());
renderer_->OnRenderError();
}
diff --git a/chromium/media/blink/webaudiosourceprovider_impl.h b/chromium/media/blink/webaudiosourceprovider_impl.h
index 13a71f2be86..e6e84e69b68 100644
--- a/chromium/media/blink/webaudiosourceprovider_impl.h
+++ b/chromium/media/blink/webaudiosourceprovider_impl.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include <memory>
#include <string>
#include "base/callback.h"
@@ -44,9 +45,9 @@ class MEDIA_BLINK_EXPORT WebAudioSourceProviderImpl
: public blink::WebAudioSourceProvider,
public SwitchableAudioRendererSink {
public:
- using CopyAudioCB = base::Callback<void(std::unique_ptr<AudioBus>,
- uint32_t frames_delayed,
- int sample_rate)>;
+ using CopyAudioCB = base::RepeatingCallback<void(std::unique_ptr<AudioBus>,
+ uint32_t frames_delayed,
+ int sample_rate)>;
WebAudioSourceProviderImpl(scoped_refptr<SwitchableAudioRendererSink> sink,
MediaLog* media_log);
@@ -68,11 +69,10 @@ class MEDIA_BLINK_EXPORT WebAudioSourceProviderImpl
bool IsOptimizedForHardwareParameters() override;
bool CurrentThreadIsRenderingThread() override;
void SwitchOutputDevice(const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) override;
// These methods allow a client to get a copy of the rendered audio.
- void SetCopyAudioCallback(const CopyAudioCB& callback);
+ void SetCopyAudioCallback(CopyAudioCB callback);
void ClearCopyAudioCallback();
int RenderForTesting(AudioBus* audio_bus);
diff --git a/chromium/media/blink/webaudiosourceprovider_impl_unittest.cc b/chromium/media/blink/webaudiosourceprovider_impl_unittest.cc
index 5dc0701f77e..7f8178bbcb4 100644
--- a/chromium/media/blink/webaudiosourceprovider_impl_unittest.cc
+++ b/chromium/media/blink/webaudiosourceprovider_impl_unittest.cc
@@ -6,7 +6,6 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "media/base/audio_parameters.h"
#include "media/base/fake_audio_render_callback.h"
@@ -67,7 +66,6 @@ class WebAudioSourceProviderImplTest
: params_(AudioParameters::AUDIO_PCM_LINEAR,
CHANNEL_LAYOUT_STEREO,
kSampleRate,
- 16,
64),
fake_callback_(0.1, kSampleRate),
mock_sink_(CreateWaspMockSink(GetParam())),
@@ -153,7 +151,6 @@ class WebAudioSourceProviderImplTest
scoped_refptr<MockAudioRendererSink> mock_sink_;
scoped_refptr<WebAudioSourceProviderImplUnderTest> wasp_impl_;
MockAudioRendererSink* expected_sink_;
- base::MessageLoop message_loop_;
DISALLOW_COPY_AND_ASSIGN(WebAudioSourceProviderImplTest);
};
diff --git a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
index b8b99708505..4be9a0f7a3d 100644
--- a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
@@ -23,18 +23,14 @@
#include "media/blink/cdm_result_promise_helper.h"
#include "media/blink/cdm_session_adapter.h"
#include "media/blink/webmediaplayer_util.h"
+#include "media/cdm/cenc_utils.h"
#include "media/cdm/json_web_key.h"
-#include "media/media_buildflags.h"
#include "third_party/blink/public/platform/web_data.h"
#include "third_party/blink/public/platform/web_encrypted_media_key_information.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/public/platform/web_url.h"
#include "third_party/blink/public/platform/web_vector.h"
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-#include "media/cdm/cenc_utils.h"
-#endif
-
namespace media {
namespace {
@@ -107,17 +103,12 @@ bool SanitizeInitData(EmeInitDataType init_data_type,
return true;
case EmeInitDataType::CENC:
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
sanitized_init_data->assign(init_data, init_data + init_data_length);
if (!ValidatePsshInput(*sanitized_init_data)) {
error_message->assign("Initialization data for CENC is incorrect.");
return false;
}
return true;
-#else
- error_message->assign("Initialization data type CENC is not supported.");
- return false;
-#endif
case EmeInitDataType::KEYIDS: {
// Extract the keys and then rebuild the message. This ensures that any
diff --git a/chromium/media/blink/webencryptedmediaclient_impl.cc b/chromium/media/blink/webencryptedmediaclient_impl.cc
index 193d91ba6c1..8e3f4de11e2 100644
--- a/chromium/media/blink/webencryptedmediaclient_impl.cc
+++ b/chromium/media/blink/webencryptedmediaclient_impl.cc
@@ -97,7 +97,6 @@ void WebEncryptedMediaClientImpl::RequestMediaKeySystemAccess(
key_system_config_selector_.SelectConfig(
request.KeySystem(), request.SupportedConfigurations(),
- request.GetSecurityOrigin(),
base::Bind(&WebEncryptedMediaClientImpl::OnRequestSucceeded,
weak_factory_.GetWeakPtr(), request),
base::Bind(&WebEncryptedMediaClientImpl::OnRequestNotSupported,
diff --git a/chromium/media/blink/webmediaplayer_delegate.h b/chromium/media/blink/webmediaplayer_delegate.h
index a429708d910..112177b27c2 100644
--- a/chromium/media/blink/webmediaplayer_delegate.h
+++ b/chromium/media/blink/webmediaplayer_delegate.h
@@ -5,13 +5,21 @@
#ifndef MEDIA_BLINK_WEBMEDIAPLAYER_DELEGATE_H_
#define MEDIA_BLINK_WEBMEDIAPLAYER_DELEGATE_H_
+#include "third_party/blink/public/platform/web_media_player.h"
+
namespace blink {
-class WebMediaPlayer;
enum class WebFullscreenVideoStatus;
-}
+class WebMediaPlayer;
+} // namespace blink
+
namespace gfx {
class Size;
-}
+} // namespace gfx
+
+namespace viz {
+class SurfaceId;
+} // namespace viz
+
namespace media {
enum class MediaContentType;
@@ -64,6 +72,10 @@ class WebMediaPlayerDelegate {
// Called to set as the persistent video. A persistent video should hide its
// controls and go fullscreen.
virtual void OnBecamePersistentVideo(bool value) = 0;
+
+ // Called when Picture-in-Picture mode is terminated from the
+ // Picture-in-Picture window.
+ virtual void OnPictureInPictureModeEnded() = 0;
};
// Returns true if the host frame is hidden or closed.
@@ -100,11 +112,28 @@ class WebMediaPlayerDelegate {
// Notify that the muted status of the media player has changed.
virtual void DidPlayerMutedStatusChange(int delegate_id, bool muted) = 0;
- // Notify that the source media player of Picture-in-Picture has changed.
- virtual void DidPictureInPictureSourceChange(int delegate_id) = 0;
+ // Notify that the source media player has entered Picture-in-Picture mode.
+ virtual void DidPictureInPictureModeStart(
+ int delegate_id,
+ const viz::SurfaceId&,
+ const gfx::Size&,
+ blink::WebMediaPlayer::PipWindowOpenedCallback) = 0;
// Notify that the source media player has exited Picture-in-Picture mode.
- virtual void DidPictureInPictureModeEnd(int delegate_id) = 0;
+ virtual void DidPictureInPictureModeEnd(int delegate_id,
+ base::OnceClosure) = 0;
+
+ // Notify that the media player in Picture-in-Picture had a change of surface.
+ virtual void DidPictureInPictureSurfaceChange(int delegate_id,
+ const viz::SurfaceId&,
+ const gfx::Size&) = 0;
+
+ // Registers a callback associated with a player that will be called when
+ // receiving a notification from the browser process that the
+ // Picture-in-Picture associated to this player has been resized.
+ virtual void RegisterPictureInPictureWindowResizeCallback(
+ int player_id,
+ blink::WebMediaPlayer::PipWindowResizedCallback) = 0;
// Notify that playback is stopped. This will drop wake locks and remove any
// external controls.
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index da046abcd62..a3fc8994fdc 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -28,12 +28,12 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
-#include "cc/blink/web_layer_impl.h"
#include "cc/layers/video_layer.h"
#include "components/viz/common/gpu/context_provider.h"
#include "media/audio/null_audio_sink.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
+#include "media/base/encryption_scheme.h"
#include "media/base/limits.h"
#include "media/base/media_content_type.h"
#include "media/base/media_log.h"
@@ -94,9 +94,8 @@ namespace {
void SetSinkIdOnMediaThread(scoped_refptr<WebAudioSourceProviderImpl> sink,
const std::string& device_id,
- const url::Origin& security_origin,
const OutputDeviceStatusCB& callback) {
- sink->SwitchOutputDevice(device_id, security_origin, callback);
+ sink->SwitchOutputDevice(device_id, callback);
}
bool IsBackgroundedSuspendEnabled() {
@@ -169,6 +168,21 @@ blink::WebLocalizedString::Name GetSwitchToLocalMessage(
return blink::WebLocalizedString::kMediaRemotingStopNoText;
}
+// These values are persisted to UMA. Entries should not be renumbered and
+// numeric values should never be reused.
+// TODO(crbug.com/825041): This should use EncryptionMode when kUnencrypted
+// removed.
+enum class EncryptionSchemeUMA { kCenc = 0, kCbcs = 1, kCount };
+
+EncryptionSchemeUMA DetermineEncryptionSchemeUMAValue(
+ const EncryptionScheme& encryption_scheme) {
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC)
+ return EncryptionSchemeUMA::kCbcs;
+
+ DCHECK_EQ(encryption_scheme.mode(), EncryptionScheme::CIPHER_MODE_AES_CTR);
+ return EncryptionSchemeUMA::kCenc;
+}
+
} // namespace
class BufferedDataSourceHostImpl;
@@ -235,9 +249,7 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
surface_layer_for_video_enabled_(params->use_surface_layer_for_video()),
request_routing_token_cb_(params->request_routing_token_cb()),
overlay_routing_token_(OverlayInfo::RoutingToken()),
- media_metrics_provider_(params->take_metrics_provider()),
- pip_surface_info_cb_(params->pip_surface_info_cb()),
- exit_pip_cb_(params->exit_pip_cb()) {
+ media_metrics_provider_(params->take_metrics_provider()) {
DVLOG(1) << __func__;
DCHECK(!adjust_allocated_memory_cb_.is_null());
DCHECK(renderer_factory_selector_);
@@ -286,7 +298,7 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
observer_->SetClient(this);
memory_usage_reporting_timer_.SetTaskRunner(
- frame_->GetTaskRunner(blink::TaskType::kUnthrottled));
+ frame_->GetTaskRunner(blink::TaskType::kInternalMedia));
}
WebMediaPlayerImpl::~WebMediaPlayerImpl() {
@@ -314,15 +326,19 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
adjust_allocated_memory_cb_.Run(-last_reported_memory_usage_);
// Destruct compositor resources in the proper order.
- client_->SetWebLayer(nullptr);
+ client_->SetCcLayer(nullptr);
client_->MediaRemotingStopped(
blink::WebLocalizedString::kMediaRemotingStopNoText);
- ExitPictureInPicture();
+ // If running in Picture-in-Picture but not in auto-pip, notify the player.
+ if (client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture &&
+ !client_->IsInAutoPIP())
+ ExitPictureInPicture(base::DoNothing());
- if (!surface_layer_for_video_enabled_ && video_weblayer_) {
- static_cast<cc::VideoLayer*>(video_weblayer_->layer())->StopUsingProvider();
+ if (!surface_layer_for_video_enabled_ && video_layer_) {
+ video_layer_->StopUsingProvider();
}
vfc_task_runner_->DeleteSoon(FROM_HERE, std::move(compositor_));
@@ -384,16 +400,15 @@ void WebMediaPlayerImpl::Load(LoadType load_type,
void WebMediaPlayerImpl::OnWebLayerUpdated() {}
-void WebMediaPlayerImpl::RegisterContentsLayer(blink::WebLayer* web_layer) {
+void WebMediaPlayerImpl::RegisterContentsLayer(cc::Layer* layer) {
DCHECK(bridge_);
- bridge_->GetWebLayer()->CcLayer()->SetContentsOpaque(opaque_);
- bridge_->GetWebLayer()->SetContentsOpaqueIsFixed(true);
- client_->SetWebLayer(web_layer);
+ bridge_->GetCcLayer()->SetContentsOpaque(opaque_);
+ client_->SetCcLayer(layer);
}
-void WebMediaPlayerImpl::UnregisterContentsLayer(blink::WebLayer* web_layer) {
- // |client_| will unregister its WebLayer if given a nullptr.
- client_->SetWebLayer(nullptr);
+void WebMediaPlayerImpl::UnregisterContentsLayer(cc::Layer* layer) {
+ // |client_| will unregister its cc::Layer if given a nullptr.
+ client_->SetCcLayer(nullptr);
}
void WebMediaPlayerImpl::OnSurfaceIdUpdated(viz::SurfaceId surface_id) {
@@ -401,8 +416,15 @@ void WebMediaPlayerImpl::OnSurfaceIdUpdated(viz::SurfaceId surface_id) {
// TODO(726619): Handle the behavior when Picture-in-Picture mode is
// disabled.
- if (client_ && client_->IsInPictureInPictureMode())
- pip_surface_info_cb_.Run(pip_surface_id_, pipeline_metadata_.natural_size);
+ // The viz::SurfaceId may be updated when the video begins playback or when
+ // the size of the video changes.
+ if (client_ &&
+ client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture &&
+ !client_->IsInAutoPIP()) {
+ delegate_->DidPictureInPictureSurfaceChange(
+ delegate_id_, surface_id, pipeline_metadata_.natural_size);
+ }
}
bool WebMediaPlayerImpl::SupportsOverlayFullscreenVideo() {
@@ -776,44 +798,42 @@ void WebMediaPlayerImpl::SetVolume(double volume) {
UpdatePlayState();
}
-void WebMediaPlayerImpl::EnterPictureInPicture() {
- if (!pip_surface_id_.is_valid())
- return;
-
- pip_surface_info_cb_.Run(pip_surface_id_, pipeline_metadata_.natural_size);
+void WebMediaPlayerImpl::EnterPictureInPicture(
+ blink::WebMediaPlayer::PipWindowOpenedCallback callback) {
+ DCHECK(pip_surface_id_.is_valid());
- // Updates the MediaWebContentsObserver with |delegate_id_| to track which
- // media player is in Picture-in-Picture mode.
- delegate_->DidPictureInPictureSourceChange(delegate_id_);
-
- if (client_)
- client_->PictureInPictureStarted();
+ // Notifies the browser process that the player should now be in
+ // Picture-in-Picture mode.
+ delegate_->DidPictureInPictureModeStart(delegate_id_, pip_surface_id_,
+ pipeline_metadata_.natural_size,
+ std::move(callback));
}
-void WebMediaPlayerImpl::ExitPictureInPicture() {
- // TODO(apacible): Handle ending PiP from a user gesture. This currently
- // handles ending Picture-in-Picture mode from the source.
- // https://crbug.com/823172.
+void WebMediaPlayerImpl::ExitPictureInPicture(
+ blink::WebMediaPlayer::PipWindowClosedCallback callback) {
+ DCHECK(pip_surface_id_.is_valid());
- // Do not clear |pip_surface_id_| in case we enter Picture-in-Picture mode
- // again.
- if (!pip_surface_id_.is_valid())
- return;
+ // Notifies the browser process that Picture-in-Picture has ended. It will
+ // clear out the states and close the window.
+ delegate_->DidPictureInPictureModeEnd(delegate_id_, std::move(callback));
- // Signals that Picture-in-Picture has ended.
- exit_pip_cb_.Run();
+ // Internal cleanups.
+ OnPictureInPictureModeEnded();
+}
- // Updates the MediaWebContentsObserver with |delegate_id_| to clear the
- // tracked media player that is in Picture-in-Picture mode.
- delegate_->DidPictureInPictureModeEnd(delegate_id_);
+void WebMediaPlayerImpl::RegisterPictureInPictureWindowResizeCallback(
+ blink::WebMediaPlayer::PipWindowResizedCallback callback) {
+ DCHECK(pip_surface_id_.is_valid());
+ DCHECK(client_->DisplayType() ==
+ WebMediaPlayer::DisplayType::kPictureInPicture &&
+ !client_->IsInAutoPIP());
- if (client_)
- client_->PictureInPictureStopped();
+ delegate_->RegisterPictureInPictureWindowResizeCallback(delegate_id_,
+ std::move(callback));
}
void WebMediaPlayerImpl::SetSinkId(
const blink::WebString& sink_id,
- const blink::WebSecurityOrigin& security_origin,
blink::WebSetSinkIdCallbacks* web_callback) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
DVLOG(1) << __func__;
@@ -821,10 +841,8 @@ void WebMediaPlayerImpl::SetSinkId(
media::OutputDeviceStatusCB callback =
media::ConvertToOutputDeviceStatusCB(web_callback);
media_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&SetSinkIdOnMediaThread, audio_source_provider_,
- sink_id.Utf8(), static_cast<url::Origin>(security_origin),
- callback));
+ FROM_HERE, base::Bind(&SetSinkIdOnMediaThread, audio_source_provider_,
+ sink_id.Utf8(), callback));
}
STATIC_ASSERT_ENUM(WebMediaPlayer::kPreloadNone, MultibufferDataSource::NONE);
@@ -1596,7 +1614,15 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
metadata.video_decoder_config.video_rotation(),
VIDEO_ROTATION_MAX + 1);
+ if (HasAudio()) {
+ RecordEncryptionScheme("Audio",
+ metadata.audio_decoder_config.encryption_scheme());
+ }
+
if (HasVideo()) {
+ RecordEncryptionScheme("Video",
+ metadata.video_decoder_config.encryption_scheme());
+
if (overlay_enabled_) {
// SurfaceView doesn't support rotated video, so transition back if
// the video is now rotated. If |always_enable_overlays_|, we keep the
@@ -1610,20 +1636,21 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
}
if (!surface_layer_for_video_enabled_) {
- DCHECK(!video_weblayer_);
- video_weblayer_.reset(new cc_blink::WebLayerImpl(cc::VideoLayer::Create(
+ DCHECK(!video_layer_);
+ video_layer_ = cc::VideoLayer::Create(
compositor_.get(),
- pipeline_metadata_.video_decoder_config.video_rotation())));
- video_weblayer_->layer()->SetContentsOpaque(opaque_);
- video_weblayer_->SetContentsOpaqueIsFixed(true);
- client_->SetWebLayer(video_weblayer_.get());
+ pipeline_metadata_.video_decoder_config.video_rotation());
+ video_layer_->SetContentsOpaque(opaque_);
+ client_->SetCcLayer(video_layer_.get());
} else {
vfc_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&VideoFrameCompositor::EnableSubmission,
base::Unretained(compositor_.get()), bridge_->GetFrameSinkId(),
- pipeline_metadata_.video_decoder_config.video_rotation()));
+ pipeline_metadata_.video_decoder_config.video_rotation(),
+ BindToCurrentLoop(base::BindRepeating(
+ &WebMediaPlayerImpl::OnFrameSinkDestroyed, AsWeakPtr()))));
}
}
@@ -1639,6 +1666,10 @@ void WebMediaPlayerImpl::OnMetadata(PipelineMetadata metadata) {
UpdatePlayState();
}
+void WebMediaPlayerImpl::OnFrameSinkDestroyed() {
+ bridge_->ClearSurfaceId();
+}
+
void WebMediaPlayerImpl::OnBufferingStateChange(BufferingState state) {
OnBufferingStateChangeInternal(state, false);
}
@@ -1669,7 +1700,7 @@ void WebMediaPlayerImpl::CreateVideoDecodeStatsReporter() {
base::Bind(&WebMediaPlayerImpl::GetPipelineStatistics,
base::Unretained(this)),
pipeline_metadata_.video_decoder_config,
- frame_->GetTaskRunner(blink::TaskType::kUnthrottled)));
+ frame_->GetTaskRunner(blink::TaskType::kInternalMedia)));
if (delegate_->IsFrameHidden())
video_decode_stats_reporter_->OnHidden();
@@ -1753,7 +1784,7 @@ void WebMediaPlayerImpl::OnBufferingStateChangeInternal(
// Let the DataSource know we have enough data. It may use this information
// to release unused network connections.
- if (data_source_)
+ if (data_source_ && !client_->CouldPlayIfEnoughData())
data_source_->OnBufferingHaveEnough(false);
// Blink expects a timeChanged() in response to a seek().
@@ -1875,13 +1906,11 @@ void WebMediaPlayerImpl::OnVideoOpacityChange(bool opaque) {
DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
opaque_ = opaque;
- // Modify content opaqueness of cc::Layer directly so that
- // SetContentsOpaqueIsFixed is ignored.
if (!surface_layer_for_video_enabled_) {
- if (video_weblayer_)
- video_weblayer_->layer()->SetContentsOpaque(opaque_);
- } else if (bridge_->GetWebLayer()) {
- bridge_->GetWebLayer()->CcLayer()->SetContentsOpaque(opaque_);
+ if (video_layer_)
+ video_layer_->SetContentsOpaque(opaque_);
+ } else if (bridge_->GetCcLayer()) {
+ bridge_->GetCcLayer()->SetContentsOpaque(opaque_);
}
}
@@ -2069,6 +2098,17 @@ void WebMediaPlayerImpl::OnBecamePersistentVideo(bool value) {
client_->OnBecamePersistentVideo(value);
}
+void WebMediaPlayerImpl::OnPictureInPictureModeEnded() {
+ // This should never be called if |pip_surface_id_| is invalid. This is either
+ // called from the Picture-in-Picture window side by a user gesture to end
+ // Picture-in-Picture mode, or in ExitPictureInPicture(), which already checks
+ // for validity.
+ DCHECK(pip_surface_id_.is_valid());
+
+ if (client_)
+ client_->PictureInPictureStopped();
+}
+
void WebMediaPlayerImpl::ScheduleRestart() {
// TODO(watk): All restart logic should be moved into PipelineController.
if (pipeline_controller_.IsPipelineRunning() &&
@@ -2113,6 +2153,29 @@ void WebMediaPlayerImpl::OnRemotePlaybackEnded() {
client_->TimeChanged();
}
+void WebMediaPlayerImpl::FlingingStarted() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(!disable_pipeline_auto_suspend_);
+ disable_pipeline_auto_suspend_ = true;
+
+ // Capabilities reporting should only be performed for local playbacks.
+ video_decode_stats_reporter_.reset();
+
+ // Requests to restart media pipeline. A flinging renderer will be created via
+ // the |renderer_factory_selector_|.
+ ScheduleRestart();
+}
+
+void WebMediaPlayerImpl::FlingingStopped() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(disable_pipeline_auto_suspend_);
+ disable_pipeline_auto_suspend_ = false;
+
+ CreateVideoDecodeStatsReporter();
+
+ ScheduleRestart();
+}
+
void WebMediaPlayerImpl::OnDisconnectedFromRemoteDevice(double t) {
DoSeek(base::TimeDelta::FromSecondsD(t), false);
@@ -2143,15 +2206,15 @@ void WebMediaPlayerImpl::SuspendForRemote() {
gfx::Size WebMediaPlayerImpl::GetCanvasSize() const {
if (!surface_layer_for_video_enabled_) {
- if (!video_weblayer_)
+ if (!video_layer_)
return pipeline_metadata_.natural_size;
- return video_weblayer_->Bounds();
+ return video_layer_->bounds();
}
- if (!bridge_->GetWebLayer())
+ if (!bridge_->GetCcLayer())
return pipeline_metadata_.natural_size;
- return bridge_->GetWebLayer()->Bounds();
+ return bridge_->GetCcLayer()->bounds();
}
void WebMediaPlayerImpl::SetDeviceScaleFactor(float scale_factor) {
@@ -2799,12 +2862,12 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
pipeline_metadata_.audio_decoder_config.codec(),
pipeline_metadata_.video_decoder_config.codec(),
pipeline_metadata_.has_audio, pipeline_metadata_.has_video, false,
- !!chunk_demuxer_, is_encrypted_, embedded_media_experience_enabled_,
- pipeline_metadata_.natural_size),
+ false, !!chunk_demuxer_, is_encrypted_,
+ embedded_media_experience_enabled_, pipeline_metadata_.natural_size),
base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
base::Unretained(this)),
media_metrics_provider_.get(),
- frame_->GetTaskRunner(blink::TaskType::kUnthrottled)));
+ frame_->GetTaskRunner(blink::TaskType::kInternalMedia)));
watch_time_reporter_->OnVolumeChange(volume_);
if (delegate_->IsFrameHidden())
@@ -2908,6 +2971,10 @@ bool WebMediaPlayerImpl::ShouldDisableVideoWhenHidden() const {
bool WebMediaPlayerImpl::IsBackgroundOptimizationCandidate() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
+ // Don't optimize Picture-in-Picture players.
+ if (client_->DisplayType() == WebMediaPlayer::DisplayType::kPictureInPicture)
+ return false;
+
#if defined(OS_ANDROID) // WMPI_CAST
// Don't optimize players being Cast.
if (IsRemote())
@@ -3046,6 +3113,7 @@ void WebMediaPlayerImpl::ReportTimeFromForegroundToFirstFrame(
void WebMediaPlayerImpl::SwitchToRemoteRenderer(
const std::string& remote_device_friendly_name) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(!disable_pipeline_auto_suspend_);
disable_pipeline_auto_suspend_ = true;
// Capabilities reporting should only be performed for local playbacks.
@@ -3063,6 +3131,7 @@ void WebMediaPlayerImpl::SwitchToRemoteRenderer(
void WebMediaPlayerImpl::SwitchToLocalRenderer(
MediaObserverClient::ReasonToSwitchToLocal reason) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(disable_pipeline_auto_suspend_);
disable_pipeline_auto_suspend_ = false;
// Capabilities reporting may resume now that playback is local.
@@ -3140,4 +3209,19 @@ void WebMediaPlayerImpl::RecordTimingUMA(const std::string& key,
base::UmaHistogramMediumTimes(key + ".EME", elapsed);
}
+void WebMediaPlayerImpl::RecordEncryptionScheme(
+ const std::string& stream_name,
+ const EncryptionScheme& encryption_scheme) {
+ DCHECK(stream_name == "Audio" || stream_name == "Video");
+
+ // If the stream is not encrypted, don't record it.
+ if (encryption_scheme.mode() == EncryptionScheme::CIPHER_MODE_UNENCRYPTED)
+ return;
+
+ base::UmaHistogramEnumeration(
+ "Media.EME.EncryptionScheme.Initial." + stream_name,
+ DetermineEncryptionSchemeUMAValue(encryption_scheme),
+ EncryptionSchemeUMA::kCount);
+}
+
} // namespace media
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index 02dfddf97c4..1feaa9a1970 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -63,8 +63,8 @@ class SingleThreadTaskRunner;
class TaskRunner;
}
-namespace cc_blink {
-class WebLayerImpl;
+namespace cc {
+class VideoLayer;
}
namespace gpu {
@@ -76,6 +76,7 @@ class GLES2Interface;
namespace media {
class CdmContextRef;
class ChunkDemuxer;
+class EncryptionScheme;
class VideoDecodeStatsReporter;
class MediaLog;
class UrlIndex;
@@ -119,8 +120,8 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// WebSurfaceLayerBridgeObserver implementation.
void OnWebLayerUpdated() override;
- void RegisterContentsLayer(blink::WebLayer* web_layer) override;
- void UnregisterContentsLayer(blink::WebLayer* web_layer) override;
+ void RegisterContentsLayer(cc::Layer* layer) override;
+ void UnregisterContentsLayer(cc::Layer* layer) override;
void OnSurfaceIdUpdated(viz::SurfaceId surface_id) override;
void Load(LoadType load_type,
@@ -133,10 +134,13 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void Seek(double seconds) override;
void SetRate(double rate) override;
void SetVolume(double volume) override;
- void EnterPictureInPicture() override;
- void ExitPictureInPicture() override;
+ void EnterPictureInPicture(
+ blink::WebMediaPlayer::PipWindowOpenedCallback callback) override;
+ void ExitPictureInPicture(
+ blink::WebMediaPlayer::PipWindowClosedCallback callback) override;
+ void RegisterPictureInPictureWindowResizeCallback(
+ blink::WebMediaPlayer::PipWindowResizedCallback callback) override;
void SetSinkId(const blink::WebString& sink_id,
- const blink::WebSecurityOrigin& security_origin,
blink::WebSetSinkIdCallbacks* web_callback) override;
void SetPoster(const blink::WebURL& poster) override;
void SetPreload(blink::WebMediaPlayer::Preload preload) override;
@@ -238,13 +242,19 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnSeekBackward(double seconds) override;
void OnVolumeMultiplierUpdate(double multiplier) override;
void OnBecamePersistentVideo(bool value) override;
+ void OnPictureInPictureModeEnded() override;
void RequestRemotePlaybackDisabled(bool disabled) override;
#if defined(OS_ANDROID) // WMPI_CAST
+ // TODO(https://crbug.com/839651): Rename Flinging[Started/Stopped] to
+ // RemotePlayback[Started/Stopped] once the other RemotePlayback methods have
+ // been removed
bool IsRemote() const override;
void RequestRemotePlayback() override;
void RequestRemotePlaybackControl() override;
void RequestRemotePlaybackStop() override;
+ void FlingingStarted() override;
+ void FlingingStopped() override;
void SetMediaPlayerManager(
RendererMediaPlayerManagerInterface* media_player_manager);
@@ -333,6 +343,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnAudioDecoderChange(const std::string& name) override;
void OnVideoDecoderChange(const std::string& name) override;
+ // When we lose the context_provider, we destroy the CompositorFrameSink to
+ // prevent frames from being submitted. The current surface_ids become
+ // invalid.
+ void OnFrameSinkDestroyed();
+
// Actually seek. Avoids causing |should_notify_time_changed_| to be set when
// |time_updated| is false.
void DoSeek(base::TimeDelta time, bool time_updated);
@@ -561,6 +576,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// of |chunk_demuxer_|, while the EME one is only recorded if |is_encrypted_|.
void RecordTimingUMA(const std::string& key, base::TimeDelta elapsed);
+ // Records the encryption scheme used by the stream |stream_name|. This is
+ // only recorded when metadata is available.
+ void RecordEncryptionScheme(const std::string& stream_name,
+ const EncryptionScheme& encryption_scheme);
+
blink::WebLocalFrame* const frame_;
// The playback state last reported to |delegate_|, to avoid setting duplicate
@@ -707,7 +727,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// The compositor layer for displaying the video content when using composited
// playback.
- std::unique_ptr<cc_blink::WebLayerImpl> video_weblayer_;
+ scoped_refptr<cc::VideoLayer> video_layer_;
std::unique_ptr<blink::WebContentDecryptionModuleResult> set_cdm_result_;
@@ -903,12 +923,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// route the video to be shown in the Picture-in-Picture window.
viz::SurfaceId pip_surface_id_;
- // Callback to pass updated information about the current surface info.
- WebMediaPlayerParams::PipSurfaceInfoCB pip_surface_info_cb_;
-
- // Callback to signal Picture-in-Picture mode has ended.
- WebMediaPlayerParams::ExitPipCB exit_pip_cb_;
-
DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerImpl);
};
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index c859fe076fa..d8d002d4abc 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -13,18 +13,16 @@
#include "base/command_line.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/task_runner_util.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
-#include "cc/blink/web_layer_impl.h"
+#include "cc/layers/layer.h"
#include "components/viz/test/test_context_provider.h"
#include "media/base/decoder_buffer.h"
#include "media/base/gmock_callback_support.h"
@@ -42,12 +40,11 @@
#include "media/mojo/services/media_metrics_provider.h"
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "media/mojo/services/watch_time_recorder.h"
+#include "media/renderers/default_decoder_factory.h"
#include "media/renderers/default_renderer_factory.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
-#include "third_party/blink/public/platform/scheduler/web_main_thread_scheduler.h"
#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_media_player_client.h"
@@ -55,9 +52,7 @@
#include "third_party/blink/public/platform/web_security_origin.h"
#include "third_party/blink/public/platform/web_size.h"
#include "third_party/blink/public/platform/web_surface_layer_bridge.h"
-#include "third_party/blink/public/platform/web_thread.h"
#include "third_party/blink/public/platform/web_url_response.h"
-#include "third_party/blink/public/web/blink.h"
#include "third_party/blink/public/web/web_frame_client.h"
#include "third_party/blink/public/web/web_local_frame.h"
#include "third_party/blink/public/web/web_scoped_user_gesture.h"
@@ -137,7 +132,7 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD0(DurationChanged, void());
MOCK_METHOD0(SizeChanged, void());
MOCK_METHOD0(PlaybackStateChanged, void());
- MOCK_METHOD1(SetWebLayer, void(blink::WebLayer*));
+ MOCK_METHOD1(SetCcLayer, void(cc::Layer*));
MOCK_METHOD5(AddAudioTrack,
blink::WebMediaPlayer::TrackId(
const blink::WebString&,
@@ -173,12 +168,12 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD0(HasNativeControls, bool());
MOCK_METHOD0(IsAudioElement, bool());
MOCK_CONST_METHOD0(DisplayType, blink::WebMediaPlayer::DisplayType());
+ MOCK_CONST_METHOD0(IsInAutoPIP, bool());
MOCK_METHOD1(ActivateViewportIntersectionMonitoring, void(bool));
MOCK_METHOD1(MediaRemotingStarted, void(const blink::WebString&));
MOCK_METHOD1(MediaRemotingStopped, void(blink::WebLocalizedString::Name));
MOCK_METHOD0(PictureInPictureStarted, void());
MOCK_METHOD0(PictureInPictureStopped, void());
- MOCK_METHOD0(IsInPictureInPictureMode, bool());
MOCK_CONST_METHOD0(CouldPlayIfEnoughData, bool());
void set_is_autoplaying_muted(bool value) { is_autoplaying_muted_ = value; }
@@ -192,7 +187,7 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
class MockWebMediaPlayerDelegate : public WebMediaPlayerDelegate {
public:
MockWebMediaPlayerDelegate() = default;
- ~MockWebMediaPlayerDelegate() = default;
+ ~MockWebMediaPlayerDelegate() override = default;
// WebMediaPlayerDelegate implementation.
int AddObserver(Observer* observer) override {
@@ -225,8 +220,17 @@ class MockWebMediaPlayerDelegate : public WebMediaPlayerDelegate {
DCHECK_EQ(player_id_, delegate_id);
}
- MOCK_METHOD1(DidPictureInPictureSourceChange, void(int));
- MOCK_METHOD1(DidPictureInPictureModeEnd, void(int));
+ MOCK_METHOD4(DidPictureInPictureModeStart,
+ void(int,
+ const viz::SurfaceId&,
+ const gfx::Size&,
+ blink::WebMediaPlayer::PipWindowOpenedCallback));
+ MOCK_METHOD2(DidPictureInPictureModeEnd,
+ void(int, blink::WebMediaPlayer::PipWindowClosedCallback));
+ MOCK_METHOD3(DidPictureInPictureSurfaceChange,
+ void(int, const viz::SurfaceId&, const gfx::Size&));
+ MOCK_METHOD2(RegisterPictureInPictureWindowResizeCallback,
+ void(int, blink::WebMediaPlayer::PipWindowResizedCallback));
void ClearStaleFlag(int player_id) override {
DCHECK_EQ(player_id_, player_id);
@@ -286,8 +290,9 @@ class MockWebMediaPlayerDelegate : public WebMediaPlayerDelegate {
class MockSurfaceLayerBridge : public blink::WebSurfaceLayerBridge {
public:
- MOCK_CONST_METHOD0(GetWebLayer, blink::WebLayer*());
+ MOCK_CONST_METHOD0(GetCcLayer, cc::Layer*());
MOCK_CONST_METHOD0(GetFrameSinkId, const viz::FrameSinkId&());
+ MOCK_METHOD0(ClearSurfaceId, void());
};
class MockVideoFrameCompositor : public VideoFrameCompositor {
@@ -295,58 +300,15 @@ class MockVideoFrameCompositor : public VideoFrameCompositor {
MockVideoFrameCompositor(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
: VideoFrameCompositor(task_runner, nullptr) {}
- ~MockVideoFrameCompositor() = default;
+ ~MockVideoFrameCompositor() override = default;
// MOCK_METHOD doesn't like OnceCallback.
- void SetOnNewProcessedFrameCallback(OnNewProcessedFrameCB cb) {}
+ void SetOnNewProcessedFrameCallback(OnNewProcessedFrameCB cb) override {}
MOCK_METHOD0(GetCurrentFrameAndUpdateIfStale, scoped_refptr<VideoFrame>());
- MOCK_METHOD2(EnableSubmission,
- void(const viz::FrameSinkId&, media::VideoRotation));
-};
-
-// We must use a custom blink::Platform that ensures the main thread scheduler
-// knows about the ScopedTaskEnvironment; the default one is not setup with a
-// ScopedTaskEnvironment to allow other tests to use mock MessageLoops.
-class BlinkPlatformWithTaskEnvironment : public blink::Platform {
- public:
- BlinkPlatformWithTaskEnvironment()
- : original_platform_(blink::Platform::Current()),
- scoped_task_environment_(
- std::make_unique<base::test::ScopedTaskEnvironment>()),
- main_thread_scheduler_(
- blink::scheduler::CreateWebMainThreadSchedulerForTests()),
- main_thread_(main_thread_scheduler_->CreateMainThread()) {
- DCHECK(original_platform_);
- blink::Platform::SetCurrentPlatformForTesting(this);
- }
-
- ~BlinkPlatformWithTaskEnvironment() override {
- main_thread_scheduler_->Shutdown();
- main_thread_.reset();
- main_thread_scheduler_.reset();
- scoped_task_environment_.reset();
-
- DCHECK_EQ(this, blink::Platform::Current());
- blink::Platform::SetCurrentPlatformForTesting(original_platform_);
- }
-
- blink::WebThread* CurrentThread() override {
- EXPECT_TRUE(main_thread_->IsCurrentThread());
- return main_thread_.get();
- }
-
- scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
- return scoped_task_environment_->GetMainThreadTaskRunner();
- }
-
- private:
- blink::Platform* const original_platform_;
-
- // Must be constructed first; otherwise the main thread may stop pumping.
- std::unique_ptr<base::test::ScopedTaskEnvironment> scoped_task_environment_;
- std::unique_ptr<blink::scheduler::WebMainThreadScheduler>
- main_thread_scheduler_;
- std::unique_ptr<blink::WebThread> main_thread_;
+ MOCK_METHOD3(EnableSubmission,
+ void(const viz::FrameSinkId&,
+ media::VideoRotation,
+ blink::WebFrameSinkDestroyedCallback));
};
class WebMediaPlayerImplTest : public testing::Test {
@@ -379,11 +341,12 @@ class WebMediaPlayerImplTest : public testing::Test {
ASSERT_FALSE(media_log_) << "Reinitialization of media_log_ is disallowed";
media_log_ = media_log.get();
+ decoder_factory_.reset(new media::DefaultDecoderFactory(nullptr));
auto factory_selector = std::make_unique<RendererFactorySelector>();
factory_selector->AddFactory(
RendererFactorySelector::FactoryType::DEFAULT,
std::make_unique<DefaultRendererFactory>(
- media_log.get(), nullptr,
+ media_log.get(), decoder_factory_.get(),
DefaultRendererFactory::GetGpuFactoriesCB()));
factory_selector->SetBaseFactoryType(
RendererFactorySelector::FactoryType::DEFAULT);
@@ -402,8 +365,8 @@ class WebMediaPlayerImplTest : public testing::Test {
auto params = std::make_unique<WebMediaPlayerParams>(
std::move(media_log), WebMediaPlayerParams::DeferLoadCB(), audio_sink_,
- media_thread_.task_runner(), platform_.task_runner(),
- platform_.task_runner(), media_thread_.task_runner(),
+ media_thread_.task_runner(), base::ThreadTaskRunnerHandle::Get(),
+ base::ThreadTaskRunnerHandle::Get(), media_thread_.task_runner(),
base::BindRepeating(&WebMediaPlayerImplTest::OnAdjustAllocatedMemory,
base::Unretained(this)),
nullptr, nullptr, RequestRoutingTokenCallback(), nullptr,
@@ -414,9 +377,7 @@ class WebMediaPlayerImplTest : public testing::Test {
&WebMediaPlayerImplTest::CreateMockSurfaceLayerBridge,
base::Unretained(this)),
viz::TestContextProvider::Create(),
- base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo),
- base::BindRepeating(pip_surface_info_cb_.Get()),
- base::BindRepeating(exit_pip_cb_.Get()));
+ base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo));
auto compositor = std::make_unique<StrictMock<MockVideoFrameCompositor>>(
params->video_frame_compositor_task_runner());
@@ -433,7 +394,7 @@ class WebMediaPlayerImplTest : public testing::Test {
}
~WebMediaPlayerImplTest() override {
- EXPECT_CALL(client_, SetWebLayer(nullptr));
+ EXPECT_CALL(client_, SetCcLayer(nullptr));
EXPECT_CALL(client_, MediaRemotingStopped(_));
// Destruct WebMediaPlayerImpl and pump the message loop to ensure that
// objects passed to the message loop for destruction are released.
@@ -660,23 +621,43 @@ class WebMediaPlayerImplTest : public testing::Test {
// Copy over the file data and indicate that's everything.
client->DidReceiveData(reinterpret_cast<const char*>(data->data()),
data->data_size());
- client->DidFinishLoading(0);
-
- // This runs until we reach the have metadata state.
- base::RunLoop loop;
- EXPECT_CALL(client_, ReadyStateChanged())
- .WillOnce(RunClosure(loop.QuitClosure()));
- loop.Run();
+ client->DidFinishLoading();
+
+ // This runs until we reach the have current data state. Attempting to wait
+ // for states < kReadyStateHaveCurrentData is unreliable due to asynchronous
+ // execution of tasks on the base::test:ScopedTaskEnvironment.
+ while (wmpi_->GetReadyState() <
+ blink::WebMediaPlayer::kReadyStateHaveCurrentData) {
+ base::RunLoop loop;
+ EXPECT_CALL(client_, ReadyStateChanged())
+ .WillRepeatedly(RunClosure(loop.QuitClosure()));
+ loop.Run();
+
+ // Clear the mock so it doesn't have a stale QuitClosure.
+ testing::Mock::VerifyAndClearExpectations(&client_);
+ }
- // Verify we made it to pipeline startup.
- EXPECT_EQ(blink::WebMediaPlayer::kReadyStateHaveMetadata,
- wmpi_->GetReadyState());
+ // Verify we made it through pipeline startup.
EXPECT_TRUE(wmpi_->data_source_);
EXPECT_TRUE(wmpi_->demuxer_);
- EXPECT_TRUE(wmpi_->seeking_);
+ EXPECT_FALSE(wmpi_->seeking_);
}
- BlinkPlatformWithTaskEnvironment platform_;
+ void CycleThreads() {
+ // Ensure any tasks waiting to be posted to the media thread are posted.
+ base::RunLoop().RunUntilIdle();
+
+ // Cycle media thread.
+ {
+ base::RunLoop loop;
+ media_thread_.task_runner()->PostTaskAndReply(
+ FROM_HERE, base::DoNothing(), loop.QuitClosure());
+ loop.Run();
+ }
+
+ // Cycle anything that was posted back from the media thread.
+ base::RunLoop().RunUntilIdle();
+ }
// "Media" thread. This is necessary because WMPI destruction waits on a
// WaitableEvent.
@@ -722,15 +703,12 @@ class WebMediaPlayerImplTest : public testing::Test {
// Total memory in bytes allocated by the WebMediaPlayerImpl instance.
int64_t reported_memory_ = 0;
+ // default decoder factory for WMPI
+ std::unique_ptr<DecoderFactory> decoder_factory_;
+
// The WebMediaPlayerImpl instance under test.
std::unique_ptr<WebMediaPlayerImpl> wmpi_;
- // Callback used for updating Picture-in-Picture about new Surface info.
- base::MockCallback<WebMediaPlayerParams::PipSurfaceInfoCB>
- pip_surface_info_cb_;
-
- base::MockCallback<WebMediaPlayerParams::ExitPipCB> exit_pip_cb_;
-
private:
DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerImplTest);
};
@@ -744,9 +722,10 @@ TEST_F(WebMediaPlayerImplTest, ConstructAndDestroy) {
TEST_F(WebMediaPlayerImplTest, LoadAndDestroy) {
InitializeWebMediaPlayerImpl();
EXPECT_FALSE(IsSuspended());
+ wmpi_->SetPreload(blink::WebMediaPlayer::kPreloadAuto);
LoadAndWaitForMetadata(kAudioOnlyTestFile);
EXPECT_FALSE(IsSuspended());
- base::RunLoop().RunUntilIdle();
+ CycleThreads();
// The data source contains the entire file, so subtract it from the memory
// usage to ensure we're getting audio buffer and demuxer usage too.
@@ -756,23 +735,14 @@ TEST_F(WebMediaPlayerImplTest, LoadAndDestroy) {
}
// Verify that preload=metadata suspend works properly.
-// Flaky on Linux MSan and TSan. http://crbug.com/831566.
-#if defined(OS_LINUX) && \
- (defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER))
-#define MAYBE_LoadPreloadMetadataSuspend DISABLED_LoadPreloadMetadataSuspend
-#else
-#define MAYBE_LoadPreloadMetadataSuspend LoadPreloadMetadataSuspend
-#endif
-TEST_F(WebMediaPlayerImplTest, MAYBE_LoadPreloadMetadataSuspend) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(media::kPreloadMetadataSuspend);
+TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspend) {
InitializeWebMediaPlayerImpl();
EXPECT_CALL(client_, CouldPlayIfEnoughData()).WillRepeatedly(Return(false));
wmpi_->SetPreload(blink::WebMediaPlayer::kPreloadMetaData);
LoadAndWaitForMetadata(kAudioOnlyTestFile);
testing::Mock::VerifyAndClearExpectations(&client_);
EXPECT_CALL(client_, ReadyStateChanged()).Times(AnyNumber());
- base::RunLoop().RunUntilIdle();
+ CycleThreads();
EXPECT_TRUE(IsSuspended());
// The data source contains the entire file, so subtract it from the memory
@@ -784,8 +754,6 @@ TEST_F(WebMediaPlayerImplTest, MAYBE_LoadPreloadMetadataSuspend) {
// Verify that preload=metadata suspend video w/ poster uses zero video memory.
TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspendNoVideoMemoryUsage) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(media::kPreloadMetadataSuspend);
InitializeWebMediaPlayerImpl();
EXPECT_CALL(client_, CouldPlayIfEnoughData()).WillRepeatedly(Return(false));
wmpi_->SetPreload(blink::WebMediaPlayer::kPreloadMetaData);
@@ -793,7 +761,7 @@ TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspendNoVideoMemoryUsage) {
LoadAndWaitForMetadata("bear-320x240-video-only.webm");
testing::Mock::VerifyAndClearExpectations(&client_);
EXPECT_CALL(client_, ReadyStateChanged()).Times(AnyNumber());
- base::RunLoop().RunUntilIdle();
+ CycleThreads();
EXPECT_TRUE(IsSuspended());
// The data source contains the entire file, so subtract it from the memory
@@ -806,8 +774,6 @@ TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspendNoVideoMemoryUsage) {
// Verify that preload=metadata suspend is aborted if we know the element will
// play as soon as we reach kReadyStateHaveFutureData.
TEST_F(WebMediaPlayerImplTest, LoadPreloadMetadataSuspendCouldPlay) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(media::kPreloadMetadataSuspend);
InitializeWebMediaPlayerImpl();
EXPECT_CALL(client_, CouldPlayIfEnoughData()).WillRepeatedly(Return(true));
wmpi_->SetPreload(blink::WebMediaPlayer::kPreloadMetaData);
@@ -1180,11 +1146,11 @@ TEST_F(WebMediaPlayerImplTest, NoStreams) {
InitializeWebMediaPlayerImpl();
PipelineMetadata metadata;
- EXPECT_CALL(client_, SetWebLayer(_)).Times(0);
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
if (base::FeatureList::IsEnabled(media::kUseSurfaceLayerForVideo)) {
EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId()).Times(0);
- EXPECT_CALL(*compositor_, EnableSubmission(_, _)).Times(0);
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _)).Times(0);
}
// Nothing should happen. In particular, no assertions should fail.
@@ -1199,12 +1165,12 @@ TEST_F(WebMediaPlayerImplTest, NaturalSizeChange) {
metadata.natural_size = gfx::Size(320, 240);
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
- EXPECT_CALL(client_, SetWebLayer(_)).Times(0);
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
.WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
} else {
- EXPECT_CALL(client_, SetWebLayer(NotNull()));
+ EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
OnMetadata(metadata);
@@ -1224,12 +1190,12 @@ TEST_F(WebMediaPlayerImplTest, NaturalSizeChange_Rotated) {
metadata.natural_size = gfx::Size(320, 240);
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
- EXPECT_CALL(client_, SetWebLayer(_)).Times(0);
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
.WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
} else {
- EXPECT_CALL(client_, SetWebLayer(NotNull()));
+ EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
OnMetadata(metadata);
@@ -1250,12 +1216,12 @@ TEST_F(WebMediaPlayerImplTest, VideoLockedWhenPausedWhenHidden) {
metadata.video_decoder_config = TestVideoConfig::Normal();
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
- EXPECT_CALL(client_, SetWebLayer(_)).Times(0);
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
.WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
} else {
- EXPECT_CALL(client_, SetWebLayer(NotNull()));
+ EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
OnMetadata(metadata);
@@ -1324,12 +1290,12 @@ TEST_F(WebMediaPlayerImplTest, InfiniteDuration) {
metadata.natural_size = gfx::Size(400, 400);
if (base::FeatureList::IsEnabled(kUseSurfaceLayerForVideo)) {
- EXPECT_CALL(client_, SetWebLayer(_)).Times(0);
+ EXPECT_CALL(client_, SetCcLayer(_)).Times(0);
EXPECT_CALL(*surface_layer_bridge_ptr_, GetFrameSinkId())
.WillOnce(ReturnRef(frame_sink_id_));
- EXPECT_CALL(*compositor_, EnableSubmission(_, _));
+ EXPECT_CALL(*compositor_, EnableSubmission(_, _, _));
} else {
- EXPECT_CALL(client_, SetWebLayer(NotNull()));
+ EXPECT_CALL(client_, SetCcLayer(NotNull()));
}
OnMetadata(metadata);
@@ -1354,14 +1320,12 @@ TEST_F(WebMediaPlayerImplTest, SetContentsLayerGetsWebLayerFromBridge) {
InitializeWebMediaPlayerImpl();
- std::unique_ptr<cc_blink::WebLayerImpl> web_layer =
- std::make_unique<cc_blink::WebLayerImpl>();
- cc_blink::WebLayerImpl* web_layer_ptr = web_layer.get();
+ scoped_refptr<cc::Layer> layer = cc::Layer::Create();
- EXPECT_CALL(*surface_layer_bridge_ptr_, GetWebLayer())
- .WillRepeatedly(Return(web_layer_ptr));
- EXPECT_CALL(client_, SetWebLayer(Eq(web_layer_ptr)));
- wmpi_->RegisterContentsLayer(web_layer.get());
+ EXPECT_CALL(*surface_layer_bridge_ptr_, GetCcLayer())
+ .WillRepeatedly(Return(layer.get()));
+ EXPECT_CALL(client_, SetCcLayer(Eq(layer.get())));
+ wmpi_->RegisterContentsLayer(layer.get());
}
TEST_F(WebMediaPlayerImplTest, PlaybackRateChangeMediaLogs) {
@@ -1385,31 +1349,32 @@ TEST_F(WebMediaPlayerImplTest, PlaybackRateChangeMediaLogs) {
TEST_F(WebMediaPlayerImplTest, PictureInPictureTriggerCallback) {
InitializeWebMediaPlayerImpl();
- // These calls should do nothing since there is no SurfaceId set.
- wmpi_->EnterPictureInPicture();
- wmpi_->ExitPictureInPicture();
+ EXPECT_CALL(client_, DisplayType())
+ .WillRepeatedly(
+ Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
+ EXPECT_CALL(delegate_,
+ DidPictureInPictureSurfaceChange(delegate_.player_id(),
+ surface_id_, GetNaturalSize()))
+ .Times(2);
- EXPECT_CALL(client_, IsInPictureInPictureMode());
wmpi_->OnSurfaceIdUpdated(surface_id_);
- testing::Mock::VerifyAndClearExpectations(&client_);
EXPECT_CALL(delegate_,
- DidPictureInPictureSourceChange(delegate_.player_id()));
- EXPECT_CALL(pip_surface_info_cb_, Run(surface_id_, GetNaturalSize()));
- // This call should trigger the callback since the SurfaceId is set.
- wmpi_->EnterPictureInPicture();
- testing::Mock::VerifyAndClearExpectations(&client_);
+ DidPictureInPictureModeStart(delegate_.player_id(), surface_id_,
+ GetNaturalSize(), _));
+
+ wmpi_->EnterPictureInPicture(base::DoNothing());
+ wmpi_->OnSurfaceIdUpdated(surface_id_);
// Upon exiting Picture-in-Picture mode, functions to cleanup are expected to
// be called. ~WMPI calls ExitPictureInPicture().
- EXPECT_CALL(exit_pip_cb_, Run());
- EXPECT_CALL(delegate_, DidPictureInPictureModeEnd(delegate_.player_id()));
+ EXPECT_CALL(delegate_, DidPictureInPictureModeEnd(delegate_.player_id(), _));
}
class WebMediaPlayerImplBackgroundBehaviorTest
: public WebMediaPlayerImplTest,
public ::testing::WithParamInterface<
- std::tuple<bool, bool, int, int, bool, bool, bool>> {
+ std::tuple<bool, bool, int, int, bool, bool, bool, bool>> {
public:
// Indices of the tuple parameters.
static const int kIsMediaSuspendEnabled = 0;
@@ -1419,6 +1384,7 @@ class WebMediaPlayerImplBackgroundBehaviorTest
static const int kIsResumeBackgroundVideoEnabled = 4;
static const int kIsMediaSource = 5;
static const int kIsBackgroundPauseEnabled = 6;
+ static const int kIsPictureInPictureEnabled = 7;
void SetUp() override {
WebMediaPlayerImplTest::SetUp();
@@ -1461,6 +1427,15 @@ class WebMediaPlayerImplBackgroundBehaviorTest
SetVideoKeyframeDistanceAverage(
base::TimeDelta::FromSeconds(GetAverageKeyframeDistanceSec()));
SetDuration(base::TimeDelta::FromSeconds(GetDurationSec()));
+
+ if (IsPictureInPictureOn()) {
+ EXPECT_CALL(client_, DisplayType())
+ .WillRepeatedly(
+ Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
+
+ wmpi_->OnSurfaceIdUpdated(surface_id_);
+ }
+
BackgroundPlayer();
}
@@ -1486,6 +1461,10 @@ class WebMediaPlayerImplBackgroundBehaviorTest
return std::get<kIsBackgroundPauseEnabled>(GetParam());
}
+ bool IsPictureInPictureOn() {
+ return std::get<kIsPictureInPictureEnabled>(GetParam());
+ }
+
int GetDurationSec() const { return std::get<kDurationSec>(GetParam()); }
int GetAverageKeyframeDistanceSec() const {
@@ -1539,12 +1518,14 @@ TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, VideoOnly) {
// Never disable video track for a video only stream.
EXPECT_FALSE(ShouldDisableVideoWhenHidden());
- // Video only is always optimized.
- EXPECT_TRUE(IsBackgroundOptimizationCandidate());
+ // There's no optimization criteria for video only in Picture-in-Picture.
+ bool matches_requirements = !IsPictureInPictureOn();
+ EXPECT_EQ(matches_requirements, IsBackgroundOptimizationCandidate());
// Video is always paused when suspension is on and only if matches the
// optimization criteria if the optimization is on.
- bool should_pause = IsMediaSuspendOn() || IsBackgroundPauseOn();
+ bool should_pause =
+ IsMediaSuspendOn() || (IsBackgroundPauseOn() && matches_requirements);
EXPECT_EQ(should_pause, ShouldPauseVideoWhenHidden());
}
@@ -1553,8 +1534,9 @@ TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, AudioVideo) {
// Optimization requirements are the same for all platforms.
bool matches_requirements =
- (GetDurationSec() < GetMaxKeyframeDistanceSec()) ||
- (GetAverageKeyframeDistanceSec() < GetMaxKeyframeDistanceSec());
+ !IsPictureInPictureOn() &&
+ ((GetDurationSec() < GetMaxKeyframeDistanceSec()) ||
+ (GetAverageKeyframeDistanceSec() < GetMaxKeyframeDistanceSec()));
EXPECT_EQ(matches_requirements, IsBackgroundOptimizationCandidate());
EXPECT_EQ(IsBackgroundOptimizationOn() && matches_requirements,
@@ -1594,6 +1576,7 @@ INSTANTIATE_TEST_CASE_P(BackgroundBehaviorTestInstances,
::testing::Values(5, 100),
::testing::Bool(),
::testing::Bool(),
+ ::testing::Bool(),
::testing::Bool()));
} // namespace media
diff --git a/chromium/media/blink/webmediaplayer_params.cc b/chromium/media/blink/webmediaplayer_params.cc
index 8c28983295d..99f28940d35 100644
--- a/chromium/media/blink/webmediaplayer_params.cc
+++ b/chromium/media/blink/webmediaplayer_params.cc
@@ -32,9 +32,7 @@ WebMediaPlayerParams::WebMediaPlayerParams(
base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)> create_bridge_callback,
scoped_refptr<viz::ContextProvider> context_provider,
- bool use_surface_layer_for_video,
- const PipSurfaceInfoCB& pip_surface_info_cb,
- const ExitPipCB& exit_pip_cb)
+ bool use_surface_layer_for_video)
: defer_load_cb_(defer_load_cb),
audio_renderer_sink_(audio_renderer_sink),
media_log_(std::move(media_log)),
@@ -56,9 +54,7 @@ WebMediaPlayerParams::WebMediaPlayerParams(
metrics_provider_(std::move(metrics_provider)),
create_bridge_callback_(create_bridge_callback),
context_provider_(std::move(context_provider)),
- use_surface_layer_for_video_(use_surface_layer_for_video),
- pip_surface_info_cb_(pip_surface_info_cb),
- exit_pip_cb_(exit_pip_cb) {}
+ use_surface_layer_for_video_(use_surface_layer_for_video) {}
WebMediaPlayerParams::~WebMediaPlayerParams() = default;
diff --git a/chromium/media/blink/webmediaplayer_params.h b/chromium/media/blink/webmediaplayer_params.h
index 5506eed7d94..680f433e589 100644
--- a/chromium/media/blink/webmediaplayer_params.h
+++ b/chromium/media/blink/webmediaplayer_params.h
@@ -34,10 +34,6 @@ class WebSurfaceLayerBridge;
class WebSurfaceLayerBridgeObserver;
} // namespace blink
-namespace viz {
-class SurfaceId;
-}
-
namespace media {
class SwitchableAudioRendererSink;
@@ -50,15 +46,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
typedef base::Callback<void(const base::Closure&)> DeferLoadCB;
typedef base::Callback<Context3D()> Context3DCB;
- // Callback to obtain the SurfaceInfo and natural size for the relevant video
- // to trigger Picture-in-Picture mode.
- using PipSurfaceInfoCB =
- base::RepeatingCallback<void(const viz::SurfaceId& surface_id,
- const gfx::Size& natural_size)>;
-
- // Callback to exit Picture-in-Picture.
- using ExitPipCB = base::RepeatingCallback<void()>;
-
// Callback to obtain the media ContextProvider.
// Requires being called on the media thread.
// The argument callback is also called on the media thread as a reply.
@@ -96,9 +83,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
base::Callback<std::unique_ptr<blink::WebSurfaceLayerBridge>(
blink::WebSurfaceLayerBridgeObserver*)> bridge_callback,
scoped_refptr<viz::ContextProvider> context_provider,
- bool use_surface_layer_for_video,
- const PipSurfaceInfoCB& surface_info_cb,
- const ExitPipCB& exit_pip_cb);
+ bool use_surface_layer_for_video);
~WebMediaPlayerParams();
@@ -181,12 +166,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
return use_surface_layer_for_video_;
}
- const PipSurfaceInfoCB pip_surface_info_cb() const {
- return pip_surface_info_cb_;
- }
-
- const ExitPipCB exit_pip_cb() const { return exit_pip_cb_; }
-
private:
DeferLoadCB defer_load_cb_;
scoped_refptr<SwitchableAudioRendererSink> audio_renderer_sink_;
@@ -212,8 +191,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
create_bridge_callback_;
scoped_refptr<viz::ContextProvider> context_provider_;
bool use_surface_layer_for_video_;
- PipSurfaceInfoCB pip_surface_info_cb_;
- ExitPipCB exit_pip_cb_;
DISALLOW_IMPLICIT_CONSTRUCTORS(WebMediaPlayerParams);
};
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index ec675c766bb..be5925b27b6 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -223,17 +223,30 @@ component("capture_lib") {
if (is_chromeos) {
sources += [
+ "video/chromeos/camera_3a_controller.cc",
+ "video/chromeos/camera_3a_controller.h",
"video/chromeos/camera_buffer_factory.cc",
+ "video/chromeos/camera_buffer_factory.h",
"video/chromeos/camera_device_context.cc",
+ "video/chromeos/camera_device_context.h",
"video/chromeos/camera_device_delegate.cc",
+ "video/chromeos/camera_device_delegate.h",
"video/chromeos/camera_hal_delegate.cc",
+ "video/chromeos/camera_hal_delegate.h",
"video/chromeos/camera_hal_dispatcher_impl.cc",
+ "video/chromeos/camera_hal_dispatcher_impl.h",
"video/chromeos/camera_metadata_utils.cc",
+ "video/chromeos/camera_metadata_utils.h",
"video/chromeos/display_rotation_observer.cc",
+ "video/chromeos/display_rotation_observer.h",
"video/chromeos/pixel_format_utils.cc",
+ "video/chromeos/pixel_format_utils.h",
"video/chromeos/stream_buffer_manager.cc",
+ "video/chromeos/stream_buffer_manager.h",
"video/chromeos/video_capture_device_chromeos_halv3.cc",
+ "video/chromeos/video_capture_device_chromeos_halv3.h",
"video/chromeos/video_capture_device_factory_chromeos.cc",
+ "video/chromeos/video_capture_device_factory_chromeos.h",
]
deps += [
"//chromeos:chromeos",
@@ -275,6 +288,8 @@ test("capture_unittests") {
"video/linux/camera_config_chromeos_unittest.cc",
"video/linux/v4l2_capture_delegate_unittest.cc",
"video/mac/video_capture_device_factory_mac_unittest.mm",
+ "video/mock_gpu_memory_buffer_manager.cc",
+ "video/mock_gpu_memory_buffer_manager.h",
"video/shared_memory_handle_provider_unittest.cc",
"video/video_capture_device_client_unittest.cc",
"video/video_capture_device_unittest.cc",
@@ -289,6 +304,7 @@ test("capture_unittests") {
":capture",
":test_support",
"//base/test:test_support",
+ "//gpu/command_buffer/client",
"//media:test_support",
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
@@ -328,13 +344,15 @@ test("capture_unittests") {
if (is_chromeos) {
sources += [
+ "video/chromeos/camera_3a_controller_unittest.cc",
"video/chromeos/camera_device_delegate_unittest.cc",
"video/chromeos/camera_hal_delegate_unittest.cc",
"video/chromeos/camera_hal_dispatcher_impl_unittest.cc",
"video/chromeos/local_gpu_memory_buffer_manager.cc",
"video/chromeos/mock_camera_module.cc",
- "video/chromeos/mock_gpu_memory_buffer_manager.cc",
+ "video/chromeos/mock_camera_module.h",
"video/chromeos/mock_video_capture_client.cc",
+ "video/chromeos/mock_video_capture_client.h",
"video/chromeos/stream_buffer_manager_unittest.cc",
]
deps += [
diff --git a/chromium/media/capture/content/OWNERS b/chromium/media/capture/content/OWNERS
index 980d5f2cd5a..e837b5fcb0d 100644
--- a/chromium/media/capture/content/OWNERS
+++ b/chromium/media/capture/content/OWNERS
@@ -1,3 +1,3 @@
miu@chromium.org
-# COMPONENT: UI>Browser>TabCapture
+# COMPONENT: Internals>Media>ScreenCapture
diff --git a/chromium/media/capture/content/screen_capture_device_core.cc b/chromium/media/capture/content/screen_capture_device_core.cc
index 38a03882a58..5ae39399c9e 100644
--- a/chromium/media/capture/content/screen_capture_device_core.cc
+++ b/chromium/media/capture/content/screen_capture_device_core.cc
@@ -44,8 +44,7 @@ void ScreenCaptureDeviceCore::AllocateAndStart(
return;
}
- if (params.requested_format.pixel_format != PIXEL_FORMAT_I420 ||
- params.requested_format.pixel_storage != VideoPixelStorage::CPU) {
+ if (params.requested_format.pixel_format != PIXEL_FORMAT_I420) {
client->OnError(
FROM_HERE,
base::StringPrintf(
diff --git a/chromium/media/capture/content/thread_safe_capture_oracle.cc b/chromium/media/capture/content/thread_safe_capture_oracle.cc
index 81be8964c3f..4d07bb7cdf6 100644
--- a/chromium/media/capture/content/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/thread_safe_capture_oracle.cc
@@ -101,8 +101,7 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
base::bits::Align(visible_size.height(), 16));
output_buffer = client_->ReserveOutputBuffer(
- coded_size, params_.requested_format.pixel_format,
- params_.requested_format.pixel_storage, frame_number);
+ coded_size, params_.requested_format.pixel_format, frame_number);
// Get the current buffer pool utilization and attenuate it: The utilization
// reported to the oracle is in terms of a maximum sustainable amount (not
@@ -137,8 +136,6 @@ bool ThreadSafeCaptureOracle::ObserveEventAndDecideCapture(
std::unique_ptr<VideoCaptureBufferHandle> output_buffer_access =
output_buffer.handle_provider->GetHandleForInProcessAccess();
- DCHECK_EQ(media::VideoPixelStorage::CPU,
- params_.requested_format.pixel_storage);
*storage = VideoFrame::WrapExternalSharedMemory(
params_.requested_format.pixel_format, coded_size,
gfx::Rect(visible_size), visible_size, output_buffer_access->data(),
@@ -241,9 +238,9 @@ void ThreadSafeCaptureOracle::DidCaptureFrame(
frame->metadata()->SetTimeTicks(VideoFrameMetadata::REFERENCE_TIME,
reference_time);
- media::VideoCaptureFormat format(
- frame->coded_size(), params_.requested_format.frame_rate, frame->format(),
- media::VideoPixelStorage::CPU);
+ media::VideoCaptureFormat format(frame->coded_size(),
+ params_.requested_format.frame_rate,
+ frame->format());
client_->OnIncomingCapturedBufferExt(
std::move(capture->buffer), format, reference_time, frame->timestamp(),
frame->visible_rect(), *frame->metadata());
diff --git a/chromium/media/capture/ipc/capture_param_traits.cc b/chromium/media/capture/ipc/capture_param_traits.cc
index 769dc63a26a..b55e80f764c 100644
--- a/chromium/media/capture/ipc/capture_param_traits.cc
+++ b/chromium/media/capture/ipc/capture_param_traits.cc
@@ -22,7 +22,6 @@ void ParamTraits<VideoCaptureFormat>::Write(base::Pickle* m,
WriteParam(m, p.frame_size);
WriteParam(m, p.frame_rate);
WriteParam(m, p.pixel_format);
- WriteParam(m, p.pixel_storage);
}
bool ParamTraits<VideoCaptureFormat>::Read(const base::Pickle* m,
@@ -30,8 +29,7 @@ bool ParamTraits<VideoCaptureFormat>::Read(const base::Pickle* m,
VideoCaptureFormat* r) {
if (!ReadParam(m, iter, &r->frame_size) ||
!ReadParam(m, iter, &r->frame_rate) ||
- !ReadParam(m, iter, &r->pixel_format) ||
- !ReadParam(m, iter, &r->pixel_storage)) {
+ !ReadParam(m, iter, &r->pixel_format)) {
return false;
}
return r->IsValid();
diff --git a/chromium/media/capture/ipc/capture_param_traits_macros.h b/chromium/media/capture/ipc/capture_param_traits_macros.h
index 740deebfefa..30d8f9396d4 100644
--- a/chromium/media/capture/ipc/capture_param_traits_macros.h
+++ b/chromium/media/capture/ipc/capture_param_traits_macros.h
@@ -9,9 +9,6 @@
#include "media/capture/video/video_capture_device_descriptor.h"
#include "media/capture/video_capture_types.h"
-IPC_ENUM_TRAITS_MAX_VALUE(media::VideoPixelStorage,
- media::VideoPixelStorage::MAX)
-
IPC_STRUCT_TRAITS_BEGIN(media::VideoCaptureDeviceDescriptor::CameraCalibration)
IPC_STRUCT_TRAITS_MEMBER(focal_length_x)
IPC_STRUCT_TRAITS_MEMBER(focal_length_y)
diff --git a/chromium/media/capture/mojom/BUILD.gn b/chromium/media/capture/mojom/BUILD.gn
index 93828b661b8..6fc162b6f8d 100644
--- a/chromium/media/capture/mojom/BUILD.gn
+++ b/chromium/media/capture/mojom/BUILD.gn
@@ -11,7 +11,7 @@ mojom("video_capture") {
]
public_deps = [
- "//media/mojo/interfaces",
+ "//gpu/ipc/common:interfaces",
"//mojo/public/mojom/base",
"//ui/gfx/geometry/mojo",
]
diff --git a/chromium/media/capture/mojom/video_capture.mojom b/chromium/media/capture/mojom/video_capture.mojom
index 3d7d80ea16b..6926112fa46 100644
--- a/chromium/media/capture/mojom/video_capture.mojom
+++ b/chromium/media/capture/mojom/video_capture.mojom
@@ -4,7 +4,6 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
import "media/capture/mojom/video_capture_types.mojom";
import "ui/gfx/geometry/mojo/geometry.mojom";
@@ -21,8 +20,8 @@ import "ui/gfx/geometry/mojo/geometry.mojom";
// Observer VideoCaptureHost
// | ---> StartCapture |
// | OnStateChanged(STARTED) <--- |
-// | OnBufferCreated(1) <--- |
-// | OnBufferCreated(2) <--- |
+// | OnNewBuffer(1) <--- |
+// | OnNewBuffer(2) <--- |
// = =
// and capture will then refer to those preallocated buffers:
// | OnBufferReady(1) <--- |
@@ -37,11 +36,11 @@ import "ui/gfx/geometry/mojo/geometry.mojom";
// Buffers can be reallocated with a larger size, if e.g. resolution changes.
// | (resolution change) |
// | OnBufferDestroyed(1) <--- |
-// | OnBufferCreated(3) <--- |
+// | OnNewBuffer(3) <--- |
// | OnBufferReady(3) <--- |
// | ---> ReleaseBuffer(2) |
// | OnBufferDestroyed(2) <--- |
-// | OnBufferCreated(5) <--- |
+// | OnNewBuffer(5) <--- |
// | OnBufferReady(5) <--- |
// = =
// In the communication epilogue, the client Stop()s capture, receiving a last
@@ -64,14 +63,17 @@ interface VideoCaptureObserver {
// Gets notified about a VideoCaptureState update.
OnStateChanged(VideoCaptureState state);
- // A new buffer identified by |buffer_id| has been created for video capture.
- OnBufferCreated(int32 buffer_id, handle<shared_buffer> handle_fd);
+ // Registers a |buffer_handle| at the Renderer/Client using the given
+ // |buffer_id|. The Browser/Host may subsequently use |buffer_id| to share
+ // video frames via calls to OnBufferReady().
+ OnNewBuffer(int32 buffer_id, media.mojom.VideoBufferHandle buffer_handle);
// |buffer_id| has video capture data with |info| containing the associated
// VideoFrame constituent parts.
OnBufferReady(int32 buffer_id, VideoFrameInfo info);
- // |buffer_id| has been released by VideoCaptureHost and must not be used.
+ // The buffer handle previously registered for |buffer_id| via OnNewBuffer(),
+ // is no longer going to be used by the Browser/Host.
OnBufferDestroyed(int32 buffer_id);
};
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index caeef7bb7b1..414b5ca327f 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -4,11 +4,40 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "gpu/ipc/common/mailbox_holder.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/values.mojom";
import "ui/gfx/geometry/mojo/geometry.mojom";
+enum VideoCapturePixelFormat {
+ UNKNOWN,
+ I420,
+ YV12,
+ I422,
+ I420A,
+ I444,
+ NV12,
+ NV21,
+ UYVY,
+ YUY2,
+ ARGB,
+ XRGB,
+ RGB24,
+ RGB32,
+ MJPEG,
+ MT21,
+ YUV420P9,
+ YUV420P10,
+ YUV422P9,
+ YUV422P10,
+ YUV444P9,
+ YUV444P10,
+ YUV420P12,
+ YUV422P12,
+ YUV444P12,
+ Y16
+};
+
enum ResolutionChangePolicy {
FIXED_RESOLUTION,
FIXED_ASPECT_RATIO,
@@ -21,11 +50,6 @@ enum PowerLineFrequency {
HZ_60
};
-enum VideoPixelStorage {
- CPU,
- GPUMEMORYBUFFER
-};
-
enum VideoCaptureApi {
LINUX_V4L2_SINGLE_PLANE,
WIN_MEDIA_FOUNDATION,
@@ -49,8 +73,7 @@ enum VideoCaptureTransportType {
struct VideoCaptureFormat {
gfx.mojom.Size frame_size;
float frame_rate;
- VideoPixelFormat pixel_format;
- VideoPixelStorage pixel_storage;
+ VideoCapturePixelFormat pixel_format;
};
struct VideoCaptureParams {
@@ -62,8 +85,7 @@ struct VideoCaptureParams {
struct VideoFrameInfo{
mojo_base.mojom.TimeDelta timestamp;
mojo_base.mojom.DictionaryValue metadata;
- VideoPixelFormat pixel_format;
- VideoPixelStorage storage_type;
+ VideoCapturePixelFormat pixel_format;
gfx.mojom.Size coded_size;
gfx.mojom.Rect visible_rect;
};
@@ -88,3 +110,14 @@ struct VideoCaptureDeviceInfo {
VideoCaptureDeviceDescriptor descriptor;
array<VideoCaptureFormat> supported_formats;
};
+
+struct MailboxBufferHandleSet {
+ // Size must be kept in sync with media::VideoFrame::kMaxPlanes.
+ array<gpu.mojom.MailboxHolder, 4> mailbox_holder;
+};
+
+union VideoBufferHandle {
+ handle<shared_buffer> shared_buffer_handle;
+ MailboxBufferHandleSet mailbox_handles;
+};
+
diff --git a/chromium/media/capture/mojom/video_capture_types.typemap b/chromium/media/capture/mojom/video_capture_types.typemap
index d0a97c0e32d..27b85993df2 100644
--- a/chromium/media/capture/mojom/video_capture_types.typemap
+++ b/chromium/media/capture/mojom/video_capture_types.typemap
@@ -32,8 +32,8 @@ deps = [
type_mappings = [
"media.mojom.ResolutionChangePolicy=media::ResolutionChangePolicy",
"media.mojom.PowerLineFrequency=media::PowerLineFrequency",
+ "media.mojom.VideoCapturePixelFormat=media::VideoPixelFormat",
"media.mojom.VideoCaptureFormat=media::VideoCaptureFormat",
- "media.mojom.VideoPixelStorage=media::VideoPixelStorage",
"media.mojom.VideoCaptureParams=media::VideoCaptureParams",
"media.mojom.VideoCaptureDeviceDescriptorCameraCalibration=media::VideoCaptureDeviceDescriptor::CameraCalibration",
"media.mojom.VideoCaptureDeviceDescriptor=media::VideoCaptureDeviceDescriptor",
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index c6a4068085c..396b0139456 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -11,23 +11,6 @@
namespace mojo {
// static
-media::mojom::VideoPixelStorage
-EnumTraits<media::mojom::VideoPixelStorage, media::VideoPixelStorage>::ToMojom(
- media::VideoPixelStorage video_pixel_storage) {
- DCHECK_EQ(media::VideoPixelStorage::CPU, video_pixel_storage);
- return media::mojom::VideoPixelStorage::CPU;
-}
-
-// static
-bool EnumTraits<media::mojom::VideoPixelStorage, media::VideoPixelStorage>::
- FromMojom(media::mojom::VideoPixelStorage input,
- media::VideoPixelStorage* out) {
- DCHECK_EQ(media::mojom::VideoPixelStorage::CPU, input);
- *out = media::VideoPixelStorage::CPU;
- return true;
-}
-
-// static
media::mojom::ResolutionChangePolicy
EnumTraits<media::mojom::ResolutionChangePolicy,
media::ResolutionChangePolicy>::ToMojom(media::ResolutionChangePolicy
@@ -100,6 +83,157 @@ bool EnumTraits<media::mojom::PowerLineFrequency, media::PowerLineFrequency>::
}
// static
+media::mojom::VideoCapturePixelFormat
+EnumTraits<media::mojom::VideoCapturePixelFormat,
+ media::VideoPixelFormat>::ToMojom(media::VideoPixelFormat input) {
+ switch (input) {
+ case media::VideoPixelFormat::PIXEL_FORMAT_UNKNOWN:
+ return media::mojom::VideoCapturePixelFormat::UNKNOWN;
+ case media::VideoPixelFormat::PIXEL_FORMAT_I420:
+ return media::mojom::VideoCapturePixelFormat::I420;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YV12:
+ return media::mojom::VideoCapturePixelFormat::YV12;
+ case media::VideoPixelFormat::PIXEL_FORMAT_I422:
+ return media::mojom::VideoCapturePixelFormat::I422;
+ case media::VideoPixelFormat::PIXEL_FORMAT_I420A:
+ return media::mojom::VideoCapturePixelFormat::I420A;
+ case media::VideoPixelFormat::PIXEL_FORMAT_I444:
+ return media::mojom::VideoCapturePixelFormat::I444;
+ case media::VideoPixelFormat::PIXEL_FORMAT_NV12:
+ return media::mojom::VideoCapturePixelFormat::NV12;
+ case media::VideoPixelFormat::PIXEL_FORMAT_NV21:
+ return media::mojom::VideoCapturePixelFormat::NV21;
+ case media::VideoPixelFormat::PIXEL_FORMAT_UYVY:
+ return media::mojom::VideoCapturePixelFormat::UYVY;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUY2:
+ return media::mojom::VideoCapturePixelFormat::YUY2;
+ case media::VideoPixelFormat::PIXEL_FORMAT_ARGB:
+ return media::mojom::VideoCapturePixelFormat::ARGB;
+ case media::VideoPixelFormat::PIXEL_FORMAT_XRGB:
+ return media::mojom::VideoCapturePixelFormat::XRGB;
+ case media::VideoPixelFormat::PIXEL_FORMAT_RGB24:
+ return media::mojom::VideoCapturePixelFormat::RGB24;
+ case media::VideoPixelFormat::PIXEL_FORMAT_RGB32:
+ return media::mojom::VideoCapturePixelFormat::RGB32;
+ case media::VideoPixelFormat::PIXEL_FORMAT_MJPEG:
+ return media::mojom::VideoCapturePixelFormat::MJPEG;
+ case media::VideoPixelFormat::PIXEL_FORMAT_MT21:
+ return media::mojom::VideoCapturePixelFormat::MT21;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV420P9:
+ return media::mojom::VideoCapturePixelFormat::YUV420P9;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV420P10:
+ return media::mojom::VideoCapturePixelFormat::YUV420P10;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV422P9:
+ return media::mojom::VideoCapturePixelFormat::YUV422P9;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV422P10:
+ return media::mojom::VideoCapturePixelFormat::YUV422P10;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV444P9:
+ return media::mojom::VideoCapturePixelFormat::YUV444P9;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV444P10:
+ return media::mojom::VideoCapturePixelFormat::YUV444P10;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV420P12:
+ return media::mojom::VideoCapturePixelFormat::YUV420P12;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV422P12:
+ return media::mojom::VideoCapturePixelFormat::YUV422P12;
+ case media::VideoPixelFormat::PIXEL_FORMAT_YUV444P12:
+ return media::mojom::VideoCapturePixelFormat::YUV444P12;
+ case media::VideoPixelFormat::PIXEL_FORMAT_Y16:
+ return media::mojom::VideoCapturePixelFormat::Y16;
+ }
+ NOTREACHED();
+ return media::mojom::VideoCapturePixelFormat::I420;
+}
+
+// static
+bool EnumTraits<media::mojom::VideoCapturePixelFormat,
+ media::VideoPixelFormat>::
+ FromMojom(media::mojom::VideoCapturePixelFormat input,
+ media::VideoPixelFormat* output) {
+ switch (input) {
+ case media::mojom::VideoCapturePixelFormat::UNKNOWN:
+ *output = media::PIXEL_FORMAT_UNKNOWN;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::I420:
+ *output = media::PIXEL_FORMAT_I420;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YV12:
+ *output = media::PIXEL_FORMAT_YV12;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::I422:
+ *output = media::PIXEL_FORMAT_I422;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::I420A:
+ *output = media::PIXEL_FORMAT_I420A;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::I444:
+ *output = media::PIXEL_FORMAT_I444;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::NV12:
+ *output = media::PIXEL_FORMAT_NV12;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::NV21:
+ *output = media::PIXEL_FORMAT_NV21;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::UYVY:
+ *output = media::PIXEL_FORMAT_UYVY;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUY2:
+ *output = media::PIXEL_FORMAT_YUY2;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::ARGB:
+ *output = media::PIXEL_FORMAT_ARGB;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::XRGB:
+ *output = media::PIXEL_FORMAT_XRGB;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::RGB24:
+ *output = media::PIXEL_FORMAT_RGB24;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::RGB32:
+ *output = media::PIXEL_FORMAT_RGB32;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::MJPEG:
+ *output = media::PIXEL_FORMAT_MJPEG;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::MT21:
+ *output = media::PIXEL_FORMAT_MT21;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV420P9:
+ *output = media::PIXEL_FORMAT_YUV420P9;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV420P10:
+ *output = media::PIXEL_FORMAT_YUV420P10;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV422P9:
+ *output = media::PIXEL_FORMAT_YUV422P9;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV422P10:
+ *output = media::PIXEL_FORMAT_YUV422P10;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV444P9:
+ *output = media::PIXEL_FORMAT_YUV444P9;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV444P10:
+ *output = media::PIXEL_FORMAT_YUV444P10;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV420P12:
+ *output = media::PIXEL_FORMAT_YUV420P12;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV422P12:
+ *output = media::PIXEL_FORMAT_YUV422P12;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::YUV444P12:
+ *output = media::PIXEL_FORMAT_YUV444P12;
+ return true;
+ case media::mojom::VideoCapturePixelFormat::Y16:
+ *output = media::PIXEL_FORMAT_Y16;
+ return true;
+ }
+ NOTREACHED();
+ return false;
+}
+
+// static
media::mojom::VideoCaptureApi
EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi>::ToMojom(
media::VideoCaptureApi input) {
@@ -216,8 +350,6 @@ bool StructTraits<media::mojom::VideoCaptureFormatDataView,
out->frame_rate = data.frame_rate();
if (!data.ReadPixelFormat(&out->pixel_format))
return false;
- if (!data.ReadPixelStorage(&out->pixel_storage))
- return false;
return true;
}
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
index 3eeb6eb44d4..4c9283c48da 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
@@ -13,15 +13,6 @@
namespace mojo {
template <>
-struct EnumTraits<media::mojom::VideoPixelStorage, media::VideoPixelStorage> {
- static media::mojom::VideoPixelStorage ToMojom(
- media::VideoPixelStorage video_pixel_storage);
-
- static bool FromMojom(media::mojom::VideoPixelStorage input,
- media::VideoPixelStorage* out);
-};
-
-template <>
struct EnumTraits<media::mojom::ResolutionChangePolicy,
media::ResolutionChangePolicy> {
static media::mojom::ResolutionChangePolicy ToMojom(
@@ -41,6 +32,15 @@ struct EnumTraits<media::mojom::PowerLineFrequency, media::PowerLineFrequency> {
};
template <>
+struct EnumTraits<media::mojom::VideoCapturePixelFormat,
+ media::VideoPixelFormat> {
+ static media::mojom::VideoCapturePixelFormat ToMojom(
+ media::VideoPixelFormat input);
+ static bool FromMojom(media::mojom::VideoCapturePixelFormat input,
+ media::VideoPixelFormat* output);
+};
+
+template <>
struct EnumTraits<media::mojom::VideoCaptureApi, media::VideoCaptureApi> {
static media::mojom::VideoCaptureApi ToMojom(media::VideoCaptureApi input);
static bool FromMojom(media::mojom::VideoCaptureApi input,
@@ -72,11 +72,6 @@ struct StructTraits<media::mojom::VideoCaptureFormatDataView,
return format.pixel_format;
}
- static media::VideoPixelStorage pixel_storage(
- const media::VideoCaptureFormat& format) {
- return format.pixel_storage;
- }
-
static bool Read(media::mojom::VideoCaptureFormatDataView data,
media::VideoCaptureFormat* out);
};
diff --git a/chromium/media/capture/video/android/BUILD.gn b/chromium/media/capture/video/android/BUILD.gn
index c967486ef33..cfb0229e0f0 100644
--- a/chromium/media/capture/video/android/BUILD.gn
+++ b/chromium/media/capture/video/android/BUILD.gn
@@ -41,6 +41,7 @@ generate_jni("capture_jni_headers") {
java_cpp_enum("media_java_enums_srcjar") {
sources = [
+ "../../../base/video_facing.h",
"../video_capture_device_descriptor.h",
"photo_capabilities.h",
"video_capture_device_android.h",
diff --git a/chromium/media/capture/video/android/video_capture_device_android.cc b/chromium/media/capture/video/android/video_capture_device_android.cc
index 785fc1bf8d6..cf6b4b013f5 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_android.cc
@@ -466,11 +466,13 @@ void VideoCaptureDeviceAndroid::DoTakePhoto(TakePhotoCallback callback) {
std::unique_ptr<TakePhotoCallback> heap_callback(
new TakePhotoCallback(std::move(callback)));
const intptr_t callback_id = reinterpret_cast<intptr_t>(heap_callback.get());
- if (!Java_VideoCapture_takePhoto(env, j_capture_, callback_id))
- return;
- {
- base::AutoLock lock(photo_callbacks_lock_);
+ // We need lock here because asynchronous response to
+ // Java_VideoCapture_takePhoto(), i.e. a call to OnPhotoTaken, arrives from a
+ // separate thread, and it can arrive before |photo_callbacks_.push_back()|
+ // has executed.
+ base::AutoLock lock(photo_callbacks_lock_);
+ if (Java_VideoCapture_takePhoto(env, j_capture_, callback_id)) {
photo_callbacks_.push_back(std::move(heap_callback));
}
}
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index b6191cc21af..e1a0cba1908 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -70,6 +70,8 @@ void VideoCaptureDeviceFactoryAndroid::GetDeviceDescriptors(
const int capture_api_type =
Java_VideoCaptureFactory_getCaptureApiType(env, camera_id);
+ const int facing_mode =
+ Java_VideoCaptureFactory_getFacingMode(env, camera_id);
const std::string display_name =
base::android::ConvertJavaStringToUTF8(device_name);
const std::string device_id = base::IntToString(camera_id);
@@ -77,9 +79,20 @@ void VideoCaptureDeviceFactoryAndroid::GetDeviceDescriptors(
// Android cameras are not typically USB devices, and the model_id is
// currently only used for USB model identifiers, so this implementation
// just indicates an unknown device model (by not providing one).
- device_descriptors->emplace_back(
- display_name, device_id,
- static_cast<VideoCaptureApi>(capture_api_type));
+ VideoCaptureDeviceDescriptor descriptor(
+ display_name, device_id, "" /*model_id*/,
+ static_cast<VideoCaptureApi>(capture_api_type),
+ VideoCaptureTransportType::OTHER_TRANSPORT,
+ static_cast<VideoFacingMode>(facing_mode));
+
+ // We put user-facing devices to the front of the list in order to make
+ // them by-default preferred over environment-facing ones when no other
+ // constraints for device selection are given.
+ if (facing_mode == MEDIA_VIDEO_FACING_USER)
+ device_descriptors->insert(device_descriptors->begin(),
+ std::move(descriptor));
+ else
+ device_descriptors->emplace_back(std::move(descriptor));
DVLOG(1) << __func__ << ": camera "
<< "device_name=" << display_name << ", unique_id=" << device_id;
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
new file mode 100644
index 00000000000..0ff045ef0a3
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -0,0 +1,327 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_3a_controller.h"
+
+#include "media/capture/video/chromeos/camera_metadata_utils.h"
+
+namespace media {
+
+namespace {
+
+template <typename EntryType>
+bool Get3AEntry(const cros::mojom::CameraMetadataPtr& metadata,
+ cros::mojom::CameraMetadataTag control,
+ EntryType* result) {
+ const auto* entry = GetMetadataEntry(metadata, control);
+ if (entry) {
+ *result = static_cast<EntryType>((*entry)->data[0]);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+} // namespace
+
+Camera3AController::Camera3AController(
+ const cros::mojom::CameraMetadataPtr& static_metadata,
+ CaptureMetadataDispatcher* capture_metadata_dispatcher,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner)
+ : capture_metadata_dispatcher_(capture_metadata_dispatcher),
+ task_runner_(std::move(task_runner)),
+ af_mode_(cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF),
+ af_state_(cros::mojom::AndroidControlAfState::
+ ANDROID_CONTROL_AF_STATE_INACTIVE),
+ af_mode_set_(false),
+ ae_mode_(cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON),
+ ae_state_(cros::mojom::AndroidControlAeState::
+ ANDROID_CONTROL_AE_STATE_INACTIVE),
+ ae_mode_set_(false),
+ awb_mode_(
+ cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO),
+ awb_state_(cros::mojom::AndroidControlAwbState::
+ ANDROID_CONTROL_AWB_STATE_INACTIVE),
+ awb_mode_set_(false),
+ weak_ptr_factory_(this) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ capture_metadata_dispatcher_->AddResultMetadataObserver(this);
+
+ auto* af_modes = GetMetadataEntry(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES);
+ if (af_modes) {
+ for (const auto& m : (*af_modes)->data) {
+ available_af_modes_.insert(
+ static_cast<cros::mojom::AndroidControlAfMode>(m));
+ }
+ }
+ auto* ae_modes = GetMetadataEntry(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES);
+ if (ae_modes) {
+ for (const auto& m : (*ae_modes)->data) {
+ available_ae_modes_.insert(
+ static_cast<cros::mojom::AndroidControlAeMode>(m));
+ }
+ }
+ auto* awb_modes = GetMetadataEntry(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES);
+ if (awb_modes) {
+ for (const auto& m : (*awb_modes)->data) {
+ available_awb_modes_.insert(
+ static_cast<cros::mojom::AndroidControlAwbMode>(m));
+ }
+ }
+
+ // Enable AF if supported. MODE_AUTO is always supported on auto-focus camera
+ // modules; fixed focus camera modules always has MODE_OFF.
+ if (available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)) {
+ af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO;
+ }
+ // AE should always be MODE_ON unless we enable manual sensor control. Since
+ // we don't have flash on any of our devices we don't care about the
+ // flash-related AE modes.
+ //
+ // AWB should always be MODE_AUTO unless we enable manual sensor control.
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ base::checked_cast<uint8_t>(af_mode_));
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
+ base::checked_cast<uint8_t>(ae_mode_));
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
+ base::checked_cast<uint8_t>(awb_mode_));
+}
+
+Camera3AController::~Camera3AController() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ capture_metadata_dispatcher_->RemoveResultMetadataObserver(this);
+}
+
+void Camera3AController::Stabilize3AForStillCapture(
+ base::OnceClosure on_3a_stabilized_callback) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (on_3a_stabilized_callback_ || on_3a_mode_set_callback_) {
+ // Already stabilizing 3A.
+ return;
+ }
+
+ if (Is3AStabilized()) {
+ std::move(on_3a_stabilized_callback).Run();
+ return;
+ }
+
+ // Wait until all the 3A modes are set in the HAL; otherwise the AF trigger
+ // and AE precapture trigger may be invalidated during mode transition.
+ if (!af_mode_set_ || !ae_mode_set_ || !awb_mode_set_) {
+ on_3a_mode_set_callback_ =
+ base::BindOnce(&Camera3AController::Stabilize3AForStillCapture,
+ GetWeakPtr(), base::Passed(&on_3a_stabilized_callback));
+ return;
+ }
+
+ on_3a_stabilized_callback_ = std::move(on_3a_stabilized_callback);
+
+ if (af_mode_ !=
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF) {
+ DVLOG(1) << "Start AF trigger to lock focus";
+ std::vector<uint8_t> af_trigger = {
+ base::checked_cast<uint8_t>(cros::mojom::AndroidControlAfTrigger::
+ ANDROID_CONTROL_AF_TRIGGER_START)};
+ capture_metadata_dispatcher_->SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
+ }
+
+ if (ae_mode_ !=
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF) {
+ DVLOG(1) << "Start AE precapture trigger to converge exposure";
+ std::vector<uint8_t> ae_precapture_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAePrecaptureTrigger::
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START)};
+ capture_metadata_dispatcher_->SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, std::move(ae_precapture_trigger));
+ }
+}
+
+void Camera3AController::OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr& result_metadata) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (af_mode_set_ && ae_mode_set_ && awb_mode_set_ &&
+ !on_3a_stabilized_callback_) {
+ // Process the result metadata only when we need to check if 3A modes are
+ // synchronized, or when there's a pending 3A stabilization request.
+ return;
+ }
+
+ cros::mojom::AndroidControlAfMode af_mode;
+ if (Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ &af_mode)) {
+ af_mode_set_ = (af_mode == af_mode_);
+ } else {
+ DVLOG(2) << "AF mode is not available in the metadata";
+ }
+ if (!Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
+ &af_state_)) {
+ DVLOG(2) << "AF state is not available in the metadata";
+ }
+
+ cros::mojom::AndroidControlAeMode ae_mode;
+ if (Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
+ &ae_mode)) {
+ ae_mode_set_ = (ae_mode == ae_mode_);
+ } else {
+ DVLOG(2) << "AE mode is not available in the metadata";
+ }
+ if (!Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
+ &ae_state_)) {
+ DVLOG(2) << "AE state is not available in the metadata";
+ }
+
+ cros::mojom::AndroidControlAwbMode awb_mode;
+ if (Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
+ &awb_mode)) {
+ awb_mode_set_ = (awb_mode == awb_mode_);
+ } else {
+ DVLOG(2) << "AWB mode is not available in the metadata";
+ }
+ if (!Get3AEntry(result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
+ &awb_state_)) {
+ DVLOG(2) << "AWB state is not available in the metadata";
+ }
+
+ DVLOG(2) << "AF mode: " << af_mode_;
+ DVLOG(2) << "AF state: " << af_state_;
+ DVLOG(2) << "AE mode: " << ae_mode_;
+ DVLOG(2) << "AE state: " << ae_state_;
+ DVLOG(2) << "AWB mode: " << awb_mode_;
+ DVLOG(2) << "AWB state: " << awb_state_;
+
+ if (on_3a_mode_set_callback_ && af_mode_set_ && ae_mode_set_ &&
+ awb_mode_set_) {
+ task_runner_->PostTask(FROM_HERE, std::move(on_3a_mode_set_callback_));
+ }
+
+ if (on_3a_stabilized_callback_ && Is3AStabilized()) {
+ std::move(on_3a_stabilized_callback_).Run();
+ }
+}
+
+void Camera3AController::SetAutoFocusModeForStillCapture() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
+ capture_metadata_dispatcher_->SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
+
+ if (available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
+ af_mode_ = cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+ }
+ std::vector<uint8_t> af_mode = {base::checked_cast<uint8_t>(af_mode_)};
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ base::checked_cast<uint8_t>(af_mode_));
+ DVLOG(1) << "Setting AF mode to: " << af_mode_;
+}
+
+void Camera3AController::SetAutoFocusModeForVideoRecording() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
+ capture_metadata_dispatcher_->SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, std::move(af_trigger));
+
+ if (available_af_modes_.count(cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
+ af_mode_ = cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+ }
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ base::checked_cast<uint8_t>(af_mode_));
+ DVLOG(1) << "Setting AF mode to: " << af_mode_;
+}
+
+base::WeakPtr<Camera3AController> Camera3AController::GetWeakPtr() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ return weak_ptr_factory_.GetWeakPtr();
+}
+
+void Camera3AController::Set3AMode(cros::mojom::CameraMetadataTag tag,
+ uint8_t target_mode) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE ||
+ tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE ||
+ tag == cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE);
+
+ std::vector<uint8_t> mode = {base::checked_cast<uint8_t>(target_mode)};
+ capture_metadata_dispatcher_->SetCaptureMetadata(
+ tag, cros::mojom::EntryType::TYPE_BYTE, 1, std::move(mode));
+
+ switch (tag) {
+ case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE:
+ af_mode_set_ = false;
+ break;
+ case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE:
+ ae_mode_set_ = false;
+ break;
+ case cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE:
+ awb_mode_set_ = false;
+ break;
+ default:
+ NOTREACHED() << "Invalid 3A mode: " << tag;
+ }
+}
+
+bool Camera3AController::Is3AStabilized() {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (af_mode_ !=
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF) {
+ if (af_state_ != cros::mojom::AndroidControlAfState::
+ ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
+ af_state_ != cros::mojom::AndroidControlAfState::
+ ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
+ return false;
+ }
+ }
+ if (ae_mode_ !=
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF) {
+ if (ae_state_ != cros::mojom::AndroidControlAeState::
+ ANDROID_CONTROL_AE_STATE_CONVERGED &&
+ ae_state_ != cros::mojom::AndroidControlAeState::
+ ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED) {
+ return false;
+ }
+ }
+ if (awb_mode_ ==
+ cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO) {
+ if (awb_state_ != cros::mojom::AndroidControlAwbState::
+ ANDROID_CONTROL_AWB_STATE_CONVERGED) {
+ return false;
+ }
+ }
+ DVLOG(1) << "3A stabilized";
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.h b/chromium/media/capture/video/chromeos/camera_3a_controller.h
new file mode 100644
index 00000000000..c96b550a6ba
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.h
@@ -0,0 +1,83 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
+
+#include <unordered_set>
+
+#include "media/base/media_export.h"
+#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/stream_buffer_manager.h"
+
+namespace media {
+
+// A class to control the auto-exposure, auto-focus, and auto-white-balancing
+// operations and modes of the camera. For the detailed state transitions for
+// auto-exposure, auto-focus, and auto-white-balancing, see
+// https://source.android.com/devices/camera/camera3_3Amodes
+class CAPTURE_EXPORT Camera3AController
+ : public CaptureMetadataDispatcher::ResultMetadataObserver {
+ public:
+ Camera3AController(const cros::mojom::CameraMetadataPtr& static_metadata,
+ CaptureMetadataDispatcher* capture_metadata_dispatcher,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+ ~Camera3AController() final;
+
+ // Trigger the camera to start exposure, focus, and white-balance metering and
+ // lock them for still capture.
+ void Stabilize3AForStillCapture(base::OnceClosure on_3a_stabilized_callback);
+
+ // CaptureMetadataDispatcher::ResultMetadataObserver implementation.
+ void OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr& result_metadata) final;
+
+ // Enable the auto-focus mode suitable for still capture.
+ void SetAutoFocusModeForStillCapture();
+
+ // Enable the auto-focus mode suitable for video recording.
+ void SetAutoFocusModeForVideoRecording();
+
+ base::WeakPtr<Camera3AController> GetWeakPtr();
+
+ private:
+ void Set3AMode(cros::mojom::CameraMetadataTag tag, uint8_t target_mode);
+ bool Is3AStabilized();
+
+ CaptureMetadataDispatcher* capture_metadata_dispatcher_;
+ const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ std::unordered_set<cros::mojom::AndroidControlAfMode> available_af_modes_;
+ cros::mojom::AndroidControlAfMode af_mode_;
+ cros::mojom::AndroidControlAfState af_state_;
+ // |af_mode_set_| is set to true when the AF mode is synchronized between the
+ // HAL and the Camera3AController.
+ bool af_mode_set_;
+
+ std::unordered_set<cros::mojom::AndroidControlAeMode> available_ae_modes_;
+ cros::mojom::AndroidControlAeMode ae_mode_;
+ cros::mojom::AndroidControlAeState ae_state_;
+ // |ae_mode_set_| is set to true when the AE mode is synchronized between the
+ // HAL and the Camera3AController.
+ bool ae_mode_set_;
+
+ std::unordered_set<cros::mojom::AndroidControlAwbMode> available_awb_modes_;
+ cros::mojom::AndroidControlAwbMode awb_mode_;
+ cros::mojom::AndroidControlAwbState awb_state_;
+ // |awb_mode_set_| is set to true when the AWB mode is synchronized between
+ // the HAL and the Camera3AController.
+ bool awb_mode_set_;
+
+ base::OnceClosure on_3a_mode_set_callback_;
+
+ base::OnceClosure on_3a_stabilized_callback_;
+
+ base::WeakPtrFactory<Camera3AController> weak_ptr_factory_;
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(Camera3AController);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_3A_CONTROLLER_H_
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller_unittest.cc b/chromium/media/capture/video/chromeos/camera_3a_controller_unittest.cc
new file mode 100644
index 00000000000..070e0fe1adb
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller_unittest.cc
@@ -0,0 +1,502 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_3a_controller.h"
+
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread.h"
+#include "media/capture/video/chromeos/camera_metadata_utils.h"
+#include "media/capture/video/chromeos/stream_buffer_manager.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+
+namespace media {
+
+namespace {
+
+class MockCaptureMetadataDispatcher : public CaptureMetadataDispatcher {
+ public:
+ MockCaptureMetadataDispatcher() {}
+ ~MockCaptureMetadataDispatcher() override {}
+ MOCK_METHOD1(
+ AddResultMetadataObserver,
+ void(CaptureMetadataDispatcher::ResultMetadataObserver* observer));
+ MOCK_METHOD1(
+ RemoveResultMetadataObserver,
+ void(CaptureMetadataDispatcher::ResultMetadataObserver* observer));
+ MOCK_METHOD4(SetCaptureMetadata,
+ void(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value));
+};
+
+} // namespace
+
+class Camera3AControllerTest : public ::testing::Test {
+ public:
+ Camera3AControllerTest() : thread_("Camera3AControllerThread") {}
+
+ void SetUp() override {
+ thread_.Start();
+ mock_capture_metadata_dispatcher_ =
+ std::make_unique<MockCaptureMetadataDispatcher>();
+ }
+
+ void TearDown() override {
+ thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&Camera3AControllerTest::Clear3AControllerOnThread,
+ base::Unretained(this)));
+ thread_.Stop();
+ mock_capture_metadata_dispatcher_.reset();
+ }
+
+ void RunOnThreadSync(const base::Location& location,
+ base::OnceClosure closure) {
+ base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ thread_.task_runner()->PostTask(
+ location,
+ base::BindOnce(&Camera3AControllerTest::RunOnThread,
+ base::Unretained(this), base::ConstRef(location),
+ base::Passed(&closure), base::Unretained(&done)));
+ done.Wait();
+ }
+
+ void Reset3AController(
+ const cros::mojom::CameraMetadataPtr& static_metadata) {
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AControllerTest::Reset3AControllerOnThread,
+ base::Unretained(this),
+ base::ConstRef(static_metadata)));
+ }
+
+ template <typename Value>
+ void Set3AMode(cros::mojom::CameraMetadataPtr* metadata,
+ cros::mojom::CameraMetadataTag control,
+ Value value,
+ bool append = false) {
+ auto* e = GetMetadataEntry(*metadata, control);
+ if (e) {
+ if (append) {
+ (*e)->count++;
+ (*e)->data.push_back(base::checked_cast<uint8_t>(value));
+ } else {
+ (*e)->count = 1;
+ (*e)->data = {base::checked_cast<uint8_t>(value)};
+ }
+ } else {
+ cros::mojom::CameraMetadataEntryPtr entry =
+ cros::mojom::CameraMetadataEntry::New();
+ entry->index = (*metadata)->entries.value().size();
+ entry->tag = control;
+ entry->type = cros::mojom::EntryType::TYPE_BYTE;
+ entry->count = 1;
+ entry->data = {base::checked_cast<uint8_t>(value)};
+
+ (*metadata)->entries.value().push_back(std::move(entry));
+ (*metadata)->entry_count++;
+ (*metadata)->entry_capacity++;
+ }
+ SortCameraMetadata(metadata);
+ }
+
+ cros::mojom::CameraMetadataPtr CreateDefaultFakeStaticMetadata() {
+ auto metadata = cros::mojom::CameraMetadata::New();
+ metadata->entries = std::vector<cros::mojom::CameraMetadataEntryPtr>();
+ metadata->entry_count = 0;
+ metadata->entry_capacity = 0;
+
+ // Set the available AF modes.
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
+ /* append */ true);
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
+ /* append */ true);
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
+ /* append */ true);
+
+ // Set the available AE modes.
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF);
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON,
+ /* append */ true);
+
+ // Set the available AWB modes.
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_OFF);
+ Set3AMode(
+ &metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO,
+ /* append */ true);
+
+ return metadata;
+ }
+
+ void On3AStabilizedCallback(base::WaitableEvent* done) { done->Signal(); }
+
+ protected:
+ base::Thread thread_;
+ std::unique_ptr<MockCaptureMetadataDispatcher>
+ mock_capture_metadata_dispatcher_;
+ std::unique_ptr<Camera3AController> camera_3a_controller_;
+
+ private:
+ void RunOnThread(const base::Location& location,
+ base::OnceClosure closure,
+ base::WaitableEvent* done) {
+ DCHECK(thread_.task_runner()->BelongsToCurrentThread());
+
+ std::move(closure).Run();
+ done->Signal();
+ }
+
+ void Clear3AControllerOnThread() {
+ DCHECK(thread_.task_runner()->BelongsToCurrentThread());
+
+ if (camera_3a_controller_) {
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ RemoveResultMetadataObserver(camera_3a_controller_.get()))
+ .Times(1);
+ }
+ camera_3a_controller_.reset();
+ }
+
+ void Reset3AControllerOnThread(
+ const cros::mojom::CameraMetadataPtr& static_metadata) {
+ DCHECK(thread_.task_runner()->BelongsToCurrentThread());
+
+ Clear3AControllerOnThread();
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ AddResultMetadataObserver(_))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, _))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, _))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, _))
+ .Times(1);
+ camera_3a_controller_ = std::make_unique<Camera3AController>(
+ static_metadata, mock_capture_metadata_dispatcher_.get(),
+ thread_.task_runner());
+ }
+};
+
+TEST_F(Camera3AControllerTest, Stabilize3AForStillCaptureTest) {
+ Reset3AController(CreateDefaultFakeStaticMetadata());
+
+ // Set AF mode.
+ std::vector<uint8_t> af_trigger_start, af_trigger_cancel, af_mode, ae_trigger;
+ af_trigger_start = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_START)};
+ af_trigger_cancel = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)};
+ ae_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAePrecaptureTrigger::
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START)};
+
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_cancel))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ base::Unretained(camera_3a_controller_.get())));
+
+ // |camera_3a_controller_| should wait until the AF mode is set
+ // before setting the AF and AE precapture triggers.
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_start))
+ .Times(0);
+ EXPECT_CALL(
+ *mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, ae_trigger))
+ .Times(0);
+ base::WaitableEvent done(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(
+ &Camera3AController::Stabilize3AForStillCapture,
+ base::Unretained(camera_3a_controller_.get()),
+ base::BindOnce(&Camera3AControllerTest::On3AStabilizedCallback,
+ base::Unretained(this), &done)));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+
+ // |camera_3a_controller_| should set the AF and AE precapture triggers once
+ // the 3A modes are set.
+ auto result_metadata = CreateDefaultFakeStaticMetadata();
+ Set3AMode(&result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ Set3AMode(
+ &result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
+ cros::mojom::AndroidControlAfState::ANDROID_CONTROL_AF_STATE_INACTIVE);
+ Set3AMode(&result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON);
+ Set3AMode(
+ &result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
+ cros::mojom::AndroidControlAeState::ANDROID_CONTROL_AE_STATE_INACTIVE);
+ Set3AMode(&result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
+ cros::mojom::AndroidControlAwbMode::ANDROID_CONTROL_AWB_MODE_AUTO);
+ Set3AMode(
+ &result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
+ cros::mojom::AndroidControlAwbState::ANDROID_CONTROL_AWB_STATE_INACTIVE);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger_start))
+ .Times(1);
+ EXPECT_CALL(
+ *mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, ae_trigger))
+ .Times(1);
+ RunOnThreadSync(FROM_HERE,
+ base::BindOnce(&Camera3AController::OnResultMetadataAvailable,
+ base::Unretained(camera_3a_controller_.get()),
+ base::ConstRef(result_metadata)));
+
+ // |camera_3a_controller_| should call the registered callback once 3A are
+ // stabilized.
+ Set3AMode(&result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_STATE,
+ cros::mojom::AndroidControlAfState::
+ ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
+ Set3AMode(
+ &result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_STATE,
+ cros::mojom::AndroidControlAeState::ANDROID_CONTROL_AE_STATE_CONVERGED);
+ Set3AMode(
+ &result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_STATE,
+ cros::mojom::AndroidControlAwbState::ANDROID_CONTROL_AWB_STATE_CONVERGED);
+ RunOnThreadSync(FROM_HERE,
+ base::BindOnce(&Camera3AController::OnResultMetadataAvailable,
+ base::Unretained(camera_3a_controller_.get()),
+ base::ConstRef(result_metadata)));
+ done.Wait();
+}
+
+// Test that SetAutoFocusModeForStillCapture sets the right auto-focus mode on
+// cameras with different capabilities.
+TEST_F(Camera3AControllerTest, SetAutoFocusModeForStillCaptureTest) {
+ auto static_metadata = CreateDefaultFakeStaticMetadata();
+ std::vector<uint8_t> af_mode;
+ std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
+
+ // For camera that supports continuous auto-focus for picture mode.
+ Reset3AController(static_metadata);
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+
+ // For camera that only supports basic auto focus.
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
+ /* append */ true);
+ Reset3AController(static_metadata);
+ af_mode.clear();
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+
+ // For camera that is fixed-focus.
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
+ Reset3AController(static_metadata);
+ af_mode.clear();
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+}
+
+// Test that SetAutoFocusModeForVideoRecording sets the right auto-focus mode on
+// cameras with different capabilities.
+TEST_F(Camera3AControllerTest, SetAutoFocusModeForVideoRecordingTest) {
+ auto static_metadata = CreateDefaultFakeStaticMetadata();
+ std::vector<uint8_t> af_mode;
+ std::vector<uint8_t> af_trigger = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfTrigger::ANDROID_CONTROL_AF_TRIGGER_CANCEL)};
+
+ // For camera that supports continuous auto-focus for picture mode.
+ Reset3AController(static_metadata);
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::
+ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+
+ // For camera that only supports basic auto focus.
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO,
+ /* append */ true);
+ Reset3AController(static_metadata);
+ af_mode.clear();
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+
+ // For camera that is fixed-focus.
+ Set3AMode(&static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF);
+ Reset3AController(static_metadata);
+ af_mode.clear();
+ af_mode = {base::checked_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)};
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_TRIGGER,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_trigger))
+ .Times(1);
+ EXPECT_CALL(*mock_capture_metadata_dispatcher_,
+ SetCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ cros::mojom::EntryType::TYPE_BYTE, 1, af_mode))
+ .Times(1);
+ RunOnThreadSync(
+ FROM_HERE,
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForVideoRecording,
+ base::Unretained(camera_3a_controller_.get())));
+ testing::Mock::VerifyAndClearExpectations(camera_3a_controller_.get());
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_buffer_factory.cc b/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
index 02df0b66ba0..7cdd18618ed 100644
--- a/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
+++ b/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
@@ -21,9 +21,12 @@ CameraBufferFactory::CreateGpuMemoryBuffer(const gfx::Size& size,
LOG(ERROR) << "GpuMemoryBufferManager not set";
return std::unique_ptr<gfx::GpuMemoryBuffer>();
}
- return buf_manager->CreateGpuMemoryBuffer(
- size, format, gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
- gpu::kNullSurfaceHandle);
+ gfx::BufferUsage buffer_usage = gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE;
+ if (format == gfx::BufferFormat::R_8) {
+ buffer_usage = gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE;
+ }
+ return buf_manager->CreateGpuMemoryBuffer(size, format, buffer_usage,
+ gpu::kNullSurfaceHandle);
}
// There's no good way to resolve the HAL pixel format to the platform-specific
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.cc b/chromium/media/capture/video/chromeos/camera_device_context.cc
index 8a8e3d64eb7..32de5064987 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_context.cc
@@ -43,14 +43,13 @@ void CameraDeviceContext::LogToClient(std::string message) {
}
void CameraDeviceContext::SubmitCapturedData(
- const uint8_t* data,
- int length,
+ gfx::GpuMemoryBuffer* buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
- int total_rotation = (sensor_orientation_ + screen_rotation_) % 360;
- client_->OnIncomingCapturedData(data, length, frame_format, total_rotation,
- reference_time, timestamp);
+ client_->OnIncomingCapturedGfxBuffer(buffer, frame_format,
+ GetCameraFrameOrientation(),
+ reference_time, timestamp);
}
void CameraDeviceContext::SetSensorOrientation(int sensor_orientation) {
@@ -67,4 +66,8 @@ void CameraDeviceContext::SetScreenRotation(int screen_rotation) {
screen_rotation_ = screen_rotation;
}
+int CameraDeviceContext::GetCameraFrameOrientation() {
+ return (sensor_orientation_ + screen_rotation_) % 360;
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.h b/chromium/media/capture/video/chromeos/camera_device_context.h
index 5b5372847c4..96bd7edb3db 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.h
+++ b/chromium/media/capture/video/chromeos/camera_device_context.h
@@ -14,6 +14,9 @@
namespace media {
// A class storing the context of a running CameraDeviceDelegate.
+//
+// The class is also used to forward/translate events and method calls to a
+// given VideoCaptureDevice::Client.
class CAPTURE_EXPORT CameraDeviceContext {
public:
// The internal state of the running CameraDeviceDelegate. The state
@@ -53,7 +56,7 @@ class CAPTURE_EXPORT CameraDeviceContext {
//
// ConstructDefaultRequestSettings() ->
// OnConstructedDefaultRequestSettings() ->
- // |stream_buffer_manager_|->StartCapture()
+ // |stream_buffer_manager_|->StartPreview()
//
// In the kCapturing state the |stream_buffer_manager_| runs the capture
// loop to send capture requests and process capture results.
@@ -106,8 +109,7 @@ class CAPTURE_EXPORT CameraDeviceContext {
void LogToClient(std::string message);
// Submits the capture data to |client_->OnIncomingCapturedData|.
- void SubmitCapturedData(const uint8_t* data,
- int length,
+ void SubmitCapturedData(gfx::GpuMemoryBuffer* buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
@@ -116,6 +118,8 @@ class CAPTURE_EXPORT CameraDeviceContext {
void SetScreenRotation(int screen_rotation);
+ int GetCameraFrameOrientation();
+
private:
friend class StreamBufferManagerTest;
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index cfb63ebb82c..8505fcf230b 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -10,6 +10,9 @@
#include <vector>
#include "media/base/bind_to_current_loop.h"
+#include "media/capture/mojom/image_capture_types.h"
+#include "media/capture/video/blob_utils.h"
+#include "media/capture/video/chromeos/camera_3a_controller.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
@@ -20,6 +23,81 @@
namespace media {
+namespace {
+
+void GetMaxBlobStreamResolution(
+ const cros::mojom::CameraMetadataPtr& static_metadata,
+ int32_t* max_blob_width,
+ int32_t* max_blob_height) {
+ const cros::mojom::CameraMetadataEntryPtr* stream_configurations =
+ GetMetadataEntry(static_metadata,
+ cros::mojom::CameraMetadataTag::
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ DCHECK(stream_configurations);
+ // The available stream configurations are stored as tuples of four int32s:
+ // (hal_pixel_format, width, height, type) x n
+ const size_t kStreamFormatOffset = 0;
+ const size_t kStreamWidthOffset = 1;
+ const size_t kStreamHeightOffset = 2;
+ const size_t kStreamTypeOffset = 3;
+ const size_t kStreamConfigurationSize = 4;
+ int32_t* iter =
+ reinterpret_cast<int32_t*>((*stream_configurations)->data.data());
+ *max_blob_width = 0;
+ *max_blob_height = 0;
+ for (size_t i = 0; i < (*stream_configurations)->count;
+ i += kStreamConfigurationSize) {
+ auto format =
+ static_cast<cros::mojom::HalPixelFormat>(iter[kStreamFormatOffset]);
+ int32_t width = iter[kStreamWidthOffset];
+ int32_t height = iter[kStreamHeightOffset];
+ auto type =
+ static_cast<cros::mojom::Camera3StreamType>(iter[kStreamTypeOffset]);
+ iter += kStreamConfigurationSize;
+
+ if (type != cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT ||
+ format != cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB) {
+ continue;
+ }
+ if (width > *max_blob_width && height > *max_blob_height) {
+ *max_blob_width = width;
+ *max_blob_height = height;
+ }
+ }
+ DCHECK_GT(*max_blob_width, 0);
+ DCHECK_GT(*max_blob_height, 0);
+}
+
+// VideoCaptureDevice::TakePhotoCallback is given by the application and is used
+// to return the captured JPEG blob buffer. The second base::OnceClosure is
+// created locally by the caller of TakePhoto(), and can be used to, for
+// exmaple, restore some settings to the values before TakePhoto() is called to
+// facilitate the switch between photo and non-photo modes.
+void TakePhotoCallbackBundle(VideoCaptureDevice::TakePhotoCallback callback,
+ base::OnceClosure on_photo_taken_callback,
+ mojom::BlobPtr blob) {
+ std::move(callback).Run(std::move(blob));
+ std::move(on_photo_taken_callback).Run();
+}
+
+} // namespace
+
+std::string StreamTypeToString(StreamType stream_type) {
+ switch (stream_type) {
+ case StreamType::kPreview:
+ return std::string("StreamType::kPreview");
+ case StreamType::kStillCapture:
+ return std::string("StreamType::kStillCapture");
+ default:
+ return std::string("Unknown StreamType value: ") +
+ std::to_string(static_cast<int32_t>(stream_type));
+ }
+} // namespace media
+
+std::ostream& operator<<(std::ostream& os, StreamType stream_type) {
+ return os << StreamTypeToString(stream_type);
+}
+
StreamCaptureInterface::Plane::Plane() = default;
StreamCaptureInterface::Plane::~Plane() = default;
@@ -81,7 +159,7 @@ void CameraDeviceDelegate::AllocateAndStart(
// We need to get the static camera metadata of the camera device first.
camera_hal_delegate_->GetCameraInfo(
- camera_id_, BindToCurrentLoop(base::Bind(
+ camera_id_, BindToCurrentLoop(base::BindOnce(
&CameraDeviceDelegate::OnGotCameraInfo, GetWeakPtr())));
}
@@ -110,30 +188,65 @@ void CameraDeviceDelegate::StopAndDeAllocate(
// The device delegate is in the process of opening the camera device.
return;
}
- stream_buffer_manager_->StopCapture();
- device_ops_->Close(base::Bind(&CameraDeviceDelegate::OnClosed, GetWeakPtr()));
+ stream_buffer_manager_->StopPreview();
+ device_ops_->Close(
+ base::BindOnce(&CameraDeviceDelegate::OnClosed, GetWeakPtr()));
}
void CameraDeviceDelegate::TakePhoto(
VideoCaptureDevice::TakePhotoCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- // TODO(jcliang): Implement TakePhoto.
- NOTIMPLEMENTED() << "TakePhoto is not implemented";
+
+ take_photo_callbacks_.push(std::move(callback));
+
+ if (!device_context_ ||
+ (device_context_->GetState() !=
+ CameraDeviceContext::State::kStreamConfigured &&
+ device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
+ return;
+ }
+
+ camera_3a_controller_->Stabilize3AForStillCapture(
+ base::BindOnce(&CameraDeviceDelegate::ConstructDefaultRequestSettings,
+ GetWeakPtr(), StreamType::kStillCapture));
}
void CameraDeviceDelegate::GetPhotoState(
VideoCaptureDevice::GetPhotoStateCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- // TODO(jcliang): Implement GetPhotoState.
- NOTIMPLEMENTED() << "GetPhotoState is not implemented";
+
+ auto photo_state = mojo::CreateEmptyPhotoState();
+
+ if (!device_context_ ||
+ (device_context_->GetState() !=
+ CameraDeviceContext::State::kStreamConfigured &&
+ device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
+ std::move(callback).Run(std::move(photo_state));
+ return;
+ }
+
+ auto stream_config =
+ stream_buffer_manager_->GetStreamConfiguration(StreamType::kStillCapture);
+ if (stream_config) {
+ photo_state->width->current = stream_config->width;
+ photo_state->width->min = stream_config->width;
+ photo_state->width->max = stream_config->width;
+ photo_state->width->step = 0.0;
+ photo_state->height->current = stream_config->height;
+ photo_state->height->min = stream_config->height;
+ photo_state->height->max = stream_config->height;
+ photo_state->height->step = 0.0;
+ }
+ std::move(callback).Run(std::move(photo_state));
}
void CameraDeviceDelegate::SetPhotoOptions(
mojom::PhotoSettingsPtr settings,
VideoCaptureDevice::SetPhotoOptionsCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- // TODO(jcliang): Implement SetPhotoOptions.
- NOTIMPLEMENTED() << "SetPhotoOptions is not implemented";
+
+ // Not supported at the moment.
+ std::move(callback).Run(true);
}
void CameraDeviceDelegate::SetRotation(int rotation) {
@@ -157,7 +270,7 @@ void CameraDeviceDelegate::OnMojoConnectionError() {
} else {
// The Mojo channel terminated unexpectedly.
if (stream_buffer_manager_) {
- stream_buffer_manager_->StopCapture();
+ stream_buffer_manager_->StopPreview();
}
device_context_->SetState(CameraDeviceContext::State::kStopped);
device_context_->SetErrorState(FROM_HERE, "Mojo connection error");
@@ -186,6 +299,7 @@ void CameraDeviceDelegate::ResetMojoInterface() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
device_ops_.reset();
+ camera_3a_controller_.reset();
stream_buffer_manager_.reset();
}
@@ -205,6 +319,7 @@ void CameraDeviceDelegate::OnGotCameraInfo(
device_context_->SetErrorState(FROM_HERE, "Failed to get camera info");
return;
}
+ SortCameraMetadata(&camera_info->static_camera_characteristics);
static_metadata_ = std::move(camera_info->static_camera_characteristics);
const cros::mojom::CameraMetadataEntryPtr* sensor_orientation =
@@ -223,12 +338,12 @@ void CameraDeviceDelegate::OnGotCameraInfo(
// |device_ops_| is bound after the MakeRequest call.
cros::mojom::Camera3DeviceOpsRequest device_ops_request =
mojo::MakeRequest(&device_ops_);
- device_ops_.set_connection_error_handler(
- base::Bind(&CameraDeviceDelegate::OnMojoConnectionError, GetWeakPtr()));
+ device_ops_.set_connection_error_handler(base::BindOnce(
+ &CameraDeviceDelegate::OnMojoConnectionError, GetWeakPtr()));
camera_hal_delegate_->OpenDevice(
camera_id_, std::move(device_ops_request),
BindToCurrentLoop(
- base::Bind(&CameraDeviceDelegate::OnOpenedDevice, GetWeakPtr())));
+ base::BindOnce(&CameraDeviceDelegate::OnOpenedDevice, GetWeakPtr())));
}
void CameraDeviceDelegate::OnOpenedDevice(int32_t result) {
@@ -264,10 +379,12 @@ void CameraDeviceDelegate::Initialize() {
std::move(callback_ops_request),
std::make_unique<StreamCaptureInterfaceImpl>(GetWeakPtr()),
device_context_, std::make_unique<CameraBufferFactory>(),
- ipc_task_runner_);
+ base::BindRepeating(&Blobify), ipc_task_runner_);
+ camera_3a_controller_ = std::make_unique<Camera3AController>(
+ static_metadata_, stream_buffer_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
std::move(callback_ops_ptr),
- base::Bind(&CameraDeviceDelegate::OnInitialized, GetWeakPtr()));
+ base::BindOnce(&CameraDeviceDelegate::OnInitialized, GetWeakPtr()));
}
void CameraDeviceDelegate::OnInitialized(int32_t result) {
@@ -296,8 +413,7 @@ void CameraDeviceDelegate::ConfigureStreams() {
// Set up context for preview stream.
cros::mojom::Camera3StreamPtr preview_stream =
cros::mojom::Camera3Stream::New();
- preview_stream->id = static_cast<uint64_t>(
- cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
+ preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
preview_stream->stream_type =
cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
preview_stream->width =
@@ -310,14 +426,36 @@ void CameraDeviceDelegate::ConfigureStreams() {
preview_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+ // Set up context for still capture stream. We set still capture stream to the
+ // JPEG stream configuration with maximum supported resolution.
+ // TODO(jcliang): Once we support SetPhotoOptions() the still capture stream
+ // should be configured dynamically per the photo options.
+ int32_t max_blob_width = 0, max_blob_height = 0;
+ GetMaxBlobStreamResolution(static_metadata_, &max_blob_width,
+ &max_blob_height);
+
+ cros::mojom::Camera3StreamPtr still_capture_stream =
+ cros::mojom::Camera3Stream::New();
+ still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
+ still_capture_stream->stream_type =
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ still_capture_stream->width = max_blob_width;
+ still_capture_stream->height = max_blob_height;
+ still_capture_stream->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB;
+ still_capture_stream->data_space = 0;
+ still_capture_stream->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+
cros::mojom::Camera3StreamConfigurationPtr stream_config =
cros::mojom::Camera3StreamConfiguration::New();
stream_config->streams.push_back(std::move(preview_stream));
+ stream_config->streams.push_back(std::move(still_capture_stream));
stream_config->operation_mode = cros::mojom::Camera3StreamConfigurationMode::
CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE;
device_ops_->ConfigureStreams(
std::move(stream_config),
- base::Bind(&CameraDeviceDelegate::OnConfiguredStreams, GetWeakPtr()));
+ base::BindOnce(&CameraDeviceDelegate::OnConfiguredStreams, GetWeakPtr()));
}
void CameraDeviceDelegate::OnConfiguredStreams(
@@ -336,43 +474,46 @@ void CameraDeviceDelegate::OnConfiguredStreams(
std::string(strerror(result)));
return;
}
- if (!updated_config || updated_config->streams.size() != 1) {
+ if (!updated_config ||
+ updated_config->streams.size() != kMaxConfiguredStreams) {
device_context_->SetErrorState(
FROM_HERE, std::string("Wrong number of streams configured: ") +
std::to_string(updated_config->streams.size()));
return;
}
- // The partial result count metadata is optional; defaults to 1 in case it
- // is not set in the static metadata.
- uint32_t partial_result_count = 1;
- const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
- static_metadata_,
- cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
- if (partial_count) {
- partial_result_count =
- *reinterpret_cast<int32_t*>((*partial_count)->data.data());
- }
- stream_buffer_manager_->SetUpStreamAndBuffers(
- chrome_capture_params_.requested_format, partial_result_count,
- std::move(updated_config->streams[0]));
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ chrome_capture_params_.requested_format, static_metadata_,
+ std::move(updated_config->streams));
device_context_->SetState(CameraDeviceContext::State::kStreamConfigured);
- ConstructDefaultRequestSettings();
+ // Kick off the preview stream.
+ ConstructDefaultRequestSettings(StreamType::kPreview);
}
-void CameraDeviceDelegate::ConstructDefaultRequestSettings() {
+void CameraDeviceDelegate::ConstructDefaultRequestSettings(
+ StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK_EQ(device_context_->GetState(),
- CameraDeviceContext::State::kStreamConfigured);
-
- device_ops_->ConstructDefaultRequestSettings(
- cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
- base::Bind(&CameraDeviceDelegate::OnConstructedDefaultRequestSettings,
- GetWeakPtr()));
+ DCHECK(device_context_->GetState() ==
+ CameraDeviceContext::State::kStreamConfigured ||
+ device_context_->GetState() == CameraDeviceContext::State::kCapturing);
+
+ if (stream_type == StreamType::kPreview) {
+ device_ops_->ConstructDefaultRequestSettings(
+ cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
+ base::BindOnce(
+ &CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings,
+ GetWeakPtr()));
+ } else { // stream_type == StreamType::kStillCapture
+ device_ops_->ConstructDefaultRequestSettings(
+ cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_STILL_CAPTURE,
+ base::BindOnce(&CameraDeviceDelegate::
+ OnConstructedDefaultStillCaptureRequestSettings,
+ GetWeakPtr()));
+ }
}
-void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
+void CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings(
cros::mojom::CameraMetadataPtr settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -388,7 +529,29 @@ void CameraDeviceDelegate::OnConstructedDefaultRequestSettings(
return;
}
device_context_->SetState(CameraDeviceContext::State::kCapturing);
- stream_buffer_manager_->StartCapture(std::move(settings));
+ camera_3a_controller_->SetAutoFocusModeForStillCapture();
+ stream_buffer_manager_->StartPreview(std::move(settings));
+
+ if (!take_photo_callbacks_.empty()) {
+ camera_3a_controller_->Stabilize3AForStillCapture(
+ base::BindOnce(&CameraDeviceDelegate::ConstructDefaultRequestSettings,
+ GetWeakPtr(), StreamType::kStillCapture));
+ }
+}
+
+void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
+ cros::mojom::CameraMetadataPtr settings) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ while (!take_photo_callbacks_.empty()) {
+ stream_buffer_manager_->TakePhoto(
+ settings.Clone(),
+ base::BindOnce(
+ &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ camera_3a_controller_->GetWeakPtr())));
+ take_photo_callbacks_.pop();
+ }
}
void CameraDeviceDelegate::RegisterBuffer(
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index 7b93f600d4f..c6567aa048b 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
#include "media/capture/video/video_capture_device.h"
@@ -16,10 +17,21 @@
namespace media {
-class CameraHalDelegate;
+class Camera3AController;
class CameraDeviceContext;
+class CameraHalDelegate;
class StreamBufferManager;
+enum class StreamType : uint64_t {
+ kPreview = 0,
+ kStillCapture = 1,
+ kUnknown,
+};
+
+std::string StreamTypeToString(StreamType stream_type);
+
+std::ostream& operator<<(std::ostream& os, StreamType stream_type);
+
// The interface to register buffer with and send capture request to the
// camera HAL.
class CAPTURE_EXPORT StreamCaptureInterface {
@@ -121,10 +133,14 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
// settings of the stream in |stream_context_|.
// OnConstructedDefaultRequestSettings sets the request settings in
// |streams_context_|. If there's no error
- // OnConstructedDefaultRequestSettings calls StartCapture to start the video
- // capture loop.
- void ConstructDefaultRequestSettings();
- void OnConstructedDefaultRequestSettings(
+ // OnConstructedDefaultPreviewRequestSettings calls StartPreview to start the
+ // video capture loop.
+ // OnConstructDefaultStillCaptureRequestSettings triggers
+ // |stream_buffer_manager_| to request a still capture.
+ void ConstructDefaultRequestSettings(StreamType stream_type);
+ void OnConstructedDefaultPreviewRequestSettings(
+ cros::mojom::CameraMetadataPtr settings);
+ void OnConstructedDefaultStillCaptureRequestSettings(
cros::mojom::CameraMetadataPtr settings);
// StreamCaptureInterface implementations. These methods are called by
@@ -150,8 +166,12 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
CameraDeviceContext* device_context_;
+ std::queue<VideoCaptureDevice::TakePhotoCallback> take_photo_callbacks_;
+
std::unique_ptr<StreamBufferManager> stream_buffer_manager_;
+ std::unique_ptr<Camera3AController> camera_3a_controller_;
+
// Stores the static camera characteristics of the camera device. E.g. the
// supported formats and resolution, various available exposure and apeture
// settings, etc.
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 529ea0b6b6e..aa0f3c91fcc 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -17,9 +17,9 @@
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/mock_camera_module.h"
-#include "media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h"
#include "media/capture/video/chromeos/mock_video_capture_client.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
+#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -110,8 +110,11 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
DISALLOW_COPY_AND_ASSIGN(MockCameraDevice);
};
+constexpr int32_t kJpegMaxBufferSize = 1024;
+constexpr size_t kDefaultWidth = 1280, kDefaultHeight = 720;
const VideoCaptureDeviceDescriptor kDefaultDescriptor("Fake device", "0");
-const VideoCaptureFormat kDefaultCaptureFormat(gfx::Size(1280, 720),
+const VideoCaptureFormat kDefaultCaptureFormat(gfx::Size(kDefaultWidth,
+ kDefaultHeight),
30.0,
PIXEL_FORMAT_I420);
@@ -153,16 +156,60 @@ class CameraDeviceDelegateTest : public ::testing::Test {
cros::mojom::CameraInfoPtr camera_info = cros::mojom::CameraInfo::New();
cros::mojom::CameraMetadataPtr static_metadata =
cros::mojom::CameraMetadata::New();
+
+ static_metadata->entry_count = 3;
+ static_metadata->entry_capacity = 3;
+ static_metadata->entries =
+ std::vector<cros::mojom::CameraMetadataEntryPtr>();
+
cros::mojom::CameraMetadataEntryPtr entry =
cros::mojom::CameraMetadataEntry::New();
entry->index = 0;
+ entry->tag = cros::mojom::CameraMetadataTag::
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ entry->type = cros::mojom::EntryType::TYPE_INT32;
+ entry->count = 12;
+ std::vector<int32_t> stream_configurations(entry->count);
+ stream_configurations[0] = static_cast<int32_t>(
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+ stream_configurations[1] = kDefaultWidth;
+ stream_configurations[2] = kDefaultHeight;
+ stream_configurations[3] = static_cast<int32_t>(
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
+ stream_configurations[4] = static_cast<int32_t>(
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888);
+ stream_configurations[5] = kDefaultWidth;
+ stream_configurations[6] = kDefaultHeight;
+ stream_configurations[7] = static_cast<int32_t>(
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
+ stream_configurations[8] = static_cast<int32_t>(
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB);
+ stream_configurations[9] = kDefaultWidth;
+ stream_configurations[10] = kDefaultHeight;
+ stream_configurations[11] = static_cast<int32_t>(
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT);
+ uint8_t* as_int8 = reinterpret_cast<uint8_t*>(stream_configurations.data());
+ entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
+ static_metadata->entries->push_back(std::move(entry));
+
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 1;
entry->tag = cros::mojom::CameraMetadataTag::ANDROID_SENSOR_ORIENTATION;
entry->type = cros::mojom::EntryType::TYPE_INT32;
entry->count = 1;
entry->data = std::vector<uint8_t>(4, 0);
- static_metadata->entries =
- std::vector<cros::mojom::CameraMetadataEntryPtr>();
static_metadata->entries->push_back(std::move(entry));
+
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 2;
+ entry->tag = cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE;
+ entry->type = cros::mojom::EntryType::TYPE_INT32;
+ entry->count = 1;
+ int32_t jpeg_max_size = kJpegMaxBufferSize;
+ as_int8 = reinterpret_cast<uint8_t*>(&jpeg_max_size);
+ entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
+ static_metadata->entries->push_back(std::move(entry));
+
switch (camera_id) {
case 0:
camera_info->facing = cros::mojom::CameraFacing::CAMERA_FACING_FRONT;
@@ -195,23 +242,17 @@ class CameraDeviceDelegateTest : public ::testing::Test {
base::OnceCallback<void(int32_t,
cros::mojom::Camera3StreamConfigurationPtr)>&
callback) {
- ASSERT_EQ(1u, config->streams.size());
- ASSERT_EQ(static_cast<uint32_t>(kDefaultCaptureFormat.frame_size.width()),
- config->streams[0]->width);
- ASSERT_EQ(static_cast<uint32_t>(kDefaultCaptureFormat.frame_size.height()),
- config->streams[0]->height);
- ASSERT_EQ(cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
- config->streams[0]->format);
- config->streams[0]->usage = 0;
- config->streams[0]->max_buffers = 1;
+ ASSERT_EQ(2u, config->streams.size());
+ for (size_t i = 0; i < config->streams.size(); ++i) {
+ config->streams[i]->usage = 0;
+ config->streams[i]->max_buffers = 1;
+ }
std::move(callback).Run(0, std::move(config));
}
void ConstructFakeRequestSettings(
cros::mojom::Camera3RequestTemplate type,
base::OnceCallback<void(cros::mojom::CameraMetadataPtr)>& callback) {
- ASSERT_EQ(cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW,
- type);
cros::mojom::CameraMetadataPtr fake_settings =
cros::mojom::CameraMetadata::New();
fake_settings->entry_count = 1;
@@ -289,18 +330,33 @@ class CameraDeviceDelegateTest : public ::testing::Test {
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
- .WillOnce(Invoke(
- &mock_gpu_memory_buffer_manager_,
- &unittest_internal::MockGpuMemoryBufferManager::ReturnValidBuffer));
+ .WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer));
+ EXPECT_CALL(
+ mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(_, gfx::BufferFormat::R_8,
+ gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
+ .Times(1)
+ .WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer));
+ EXPECT_CALL(
+ mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(gfx::Size(kDefaultWidth, kDefaultHeight),
+ gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
+ gpu::kNullSurfaceHandle))
+ .Times(1)
+ .WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer));
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(
- gfx::Size(1280, 720), gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
+ gfx::Size(kJpegMaxBufferSize, 1), gfx::BufferFormat::R_8,
+ gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
- .WillOnce(Invoke(
- &mock_gpu_memory_buffer_manager_,
- &unittest_internal::MockGpuMemoryBufferManager::ReturnValidBuffer));
+ .WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer));
}
void SetUpExpectationUntilCapturing(
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index 708e62f4872..3a33d9b8f89 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -67,14 +67,14 @@ void CameraHalDelegate::RegisterCameraClient() {
void CameraHalDelegate::SetCameraModule(
cros::mojom::CameraModulePtrInfo camera_module_ptr_info) {
ipc_task_runner_->PostTask(
- FROM_HERE, base::Bind(&CameraHalDelegate::SetCameraModuleOnIpcThread,
- this, base::Passed(&camera_module_ptr_info)));
+ FROM_HERE, base::BindOnce(&CameraHalDelegate::SetCameraModuleOnIpcThread,
+ this, base::Passed(&camera_module_ptr_info)));
}
void CameraHalDelegate::Reset() {
ipc_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&CameraHalDelegate::ResetMojoInterfaceOnIpcThread, this));
+ base::BindOnce(&CameraHalDelegate::ResetMojoInterfaceOnIpcThread, this));
}
std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
@@ -130,21 +130,25 @@ void CameraHalDelegate::GetSupportedFormats(
reinterpret_cast<int64_t*>((*min_frame_durations)->data.data());
for (size_t i = 0; i < (*min_frame_durations)->count;
i += kStreamDurationSize) {
- int32_t format = base::checked_cast<int32_t>(iter[kStreamFormatOffset]);
+ auto hal_format =
+ static_cast<cros::mojom::HalPixelFormat>(iter[kStreamFormatOffset]);
int32_t width = base::checked_cast<int32_t>(iter[kStreamWidthOffset]);
int32_t height = base::checked_cast<int32_t>(iter[kStreamHeightOffset]);
int64_t duration = iter[kStreamDurationOffset];
iter += kStreamDurationSize;
+ if (hal_format == cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB) {
+ // Skip BLOB formats and use it only for TakePicture() since it's
+ // inefficient to stream JPEG frames for CrOS camera HAL.
+ continue;
+ }
+
if (duration <= 0) {
LOG(ERROR) << "Ignoring invalid frame duration: " << duration;
continue;
}
float max_fps = 1.0 * 1000000000LL / duration;
- DVLOG(1) << "[" << std::hex << format << " " << std::dec << width << " "
- << height << " " << duration << "]";
- auto hal_format = static_cast<cros::mojom::HalPixelFormat>(format);
const ChromiumPixelFormat cr_format =
camera_buffer_factory_->ResolveStreamBufferFormat(hal_format);
if (cr_format.video_format == PIXEL_FORMAT_UNKNOWN) {
@@ -198,20 +202,20 @@ void CameraHalDelegate::GetDeviceDescriptors(
}
void CameraHalDelegate::GetCameraInfo(int32_t camera_id,
- const GetCameraInfoCallback& callback) {
+ GetCameraInfoCallback callback) {
DCHECK(!ipc_task_runner_->BelongsToCurrentThread());
// This method may be called on any thread except |ipc_task_runner_|.
// Currently this method is used by CameraDeviceDelegate to query camera info.
camera_module_has_been_set_.Wait();
ipc_task_runner_->PostTask(
- FROM_HERE, base::Bind(&CameraHalDelegate::GetCameraInfoOnIpcThread, this,
- camera_id, callback));
+ FROM_HERE, base::BindOnce(&CameraHalDelegate::GetCameraInfoOnIpcThread,
+ this, camera_id, std::move(callback)));
}
void CameraHalDelegate::OpenDevice(
int32_t camera_id,
cros::mojom::Camera3DeviceOpsRequest device_ops_request,
- const OpenDeviceCallback& callback) {
+ OpenDeviceCallback callback) {
DCHECK(!ipc_task_runner_->BelongsToCurrentThread());
// This method may be called on any thread except |ipc_task_runner_|.
// Currently this method is used by CameraDeviceDelegate to open a camera
@@ -219,8 +223,8 @@ void CameraHalDelegate::OpenDevice(
camera_module_has_been_set_.Wait();
ipc_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&CameraHalDelegate::OpenDeviceOnIpcThread, this, camera_id,
- base::Passed(&device_ops_request), callback));
+ base::BindOnce(&CameraHalDelegate::OpenDeviceOnIpcThread, this, camera_id,
+ base::Passed(&device_ops_request), std::move(callback)));
}
void CameraHalDelegate::SetCameraModuleOnIpcThread(
@@ -232,7 +236,7 @@ void CameraHalDelegate::SetCameraModuleOnIpcThread(
}
camera_module_ = mojo::MakeProxy(std::move(camera_module_ptr_info));
camera_module_.set_connection_error_handler(
- base::Bind(&CameraHalDelegate::ResetMojoInterfaceOnIpcThread, this));
+ base::BindOnce(&CameraHalDelegate::ResetMojoInterfaceOnIpcThread, this));
camera_module_has_been_set_.Signal();
}
@@ -258,7 +262,8 @@ bool CameraHalDelegate::UpdateBuiltInCameraInfo() {
// v3 specification. We only update the built-in camera info once.
ipc_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread, this));
+ base::BindOnce(&CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread,
+ this));
if (!builtin_camera_info_updated_.TimedWait(kEventWaitTimeoutMs)) {
LOG(ERROR) << "Timed out getting camera info";
return false;
@@ -268,8 +273,8 @@ bool CameraHalDelegate::UpdateBuiltInCameraInfo() {
void CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- camera_module_->GetNumberOfCameras(
- base::Bind(&CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread, this));
+ camera_module_->GetNumberOfCameras(base::BindOnce(
+ &CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread, this));
}
void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
@@ -290,7 +295,7 @@ void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
camera_module_callbacks_.Bind(std::move(camera_module_callbacks_request));
camera_module_->SetCallbacks(
std::move(camera_module_callbacks_ptr),
- base::Bind(&CameraHalDelegate::OnSetCallbacksOnIpcThread, this));
+ base::BindOnce(&CameraHalDelegate::OnSetCallbacksOnIpcThread, this));
}
void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
@@ -303,16 +308,17 @@ void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
}
for (size_t camera_id = 0; camera_id < num_builtin_cameras_; ++camera_id) {
GetCameraInfoOnIpcThread(
- camera_id, base::Bind(&CameraHalDelegate::OnGotCameraInfoOnIpcThread,
- this, camera_id));
+ camera_id,
+ base::BindOnce(&CameraHalDelegate::OnGotCameraInfoOnIpcThread, this,
+ camera_id));
}
}
void CameraHalDelegate::GetCameraInfoOnIpcThread(
int32_t camera_id,
- const GetCameraInfoCallback& callback) {
+ GetCameraInfoCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- camera_module_->GetCameraInfo(camera_id, callback);
+ camera_module_->GetCameraInfo(camera_id, std::move(callback));
}
void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
@@ -325,6 +331,7 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
LOG(ERROR) << "Failed to get camera info. Camera id: " << camera_id;
}
// In case of error |camera_info| is empty.
+ SortCameraMetadata(&camera_info->static_camera_characteristics);
camera_info_[std::to_string(camera_id)] = std::move(camera_info);
if (camera_info_.size() == num_builtin_cameras_) {
builtin_camera_info_updated_.Signal();
@@ -334,10 +341,10 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
void CameraHalDelegate::OpenDeviceOnIpcThread(
int32_t camera_id,
cros::mojom::Camera3DeviceOpsRequest device_ops_request,
- const OpenDeviceCallback& callback) {
+ OpenDeviceCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
camera_module_->OpenDevice(camera_id, std::move(device_ops_request),
- callback);
+ std::move(callback));
}
// CameraModuleCallbacks implementations.
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index a600e816794..b8c0745e063 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/sequence_checker.h"
+#include "base/single_thread_task_runner.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
@@ -63,16 +64,16 @@ class CAPTURE_EXPORT CameraHalDelegate final
// Asynchronous method to get the camera info of |camera_id|. This method may
// be called on any thread.
using GetCameraInfoCallback =
- base::Callback<void(int32_t, cros::mojom::CameraInfoPtr)>;
- void GetCameraInfo(int32_t camera_id, const GetCameraInfoCallback& callback);
+ base::OnceCallback<void(int32_t, cros::mojom::CameraInfoPtr)>;
+ void GetCameraInfo(int32_t camera_id, GetCameraInfoCallback callback);
// Asynchronous method to open the camera device designated by |camera_id|.
// This method may be called on any thread; |callback| will run on
// |ipc_task_runner_|.
- using OpenDeviceCallback = base::Callback<void(int32_t)>;
+ using OpenDeviceCallback = base::OnceCallback<void(int32_t)>;
void OpenDevice(int32_t camera_id,
cros::mojom::Camera3DeviceOpsRequest device_ops_request,
- const OpenDeviceCallback& callback);
+ OpenDeviceCallback callback);
private:
friend class base::RefCountedThreadSafe<CameraHalDelegate>;
@@ -98,7 +99,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
// to |camera_module_|.
void OnSetCallbacksOnIpcThread(int32_t result);
void GetCameraInfoOnIpcThread(int32_t camera_id,
- const GetCameraInfoCallback& callback);
+ GetCameraInfoCallback callback);
void OnGotCameraInfoOnIpcThread(int32_t camera_id,
int32_t result,
cros::mojom::CameraInfoPtr camera_info);
@@ -108,7 +109,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
void OpenDeviceOnIpcThread(
int32_t camera_id,
cros::mojom::Camera3DeviceOpsRequest device_ops_request,
- const OpenDeviceCallback& callback);
+ OpenDeviceCallback callback);
// CameraModuleCallbacks implementation. Operates on |ipc_task_runner_|.
void CameraDeviceStatusChange(
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index ab63cc3f26c..33349860f1c 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -13,8 +13,8 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "media/capture/video/chromeos/mock_camera_module.h"
-#include "media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
+#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -153,9 +153,8 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
- .WillOnce(Invoke(
- &mock_gpu_memory_buffer_manager_,
- &unittest_internal::MockGpuMemoryBufferManager::ReturnValidBuffer));
+ .WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer));
VideoCaptureFormats supported_formats;
camera_hal_delegate_->GetSupportedFormats(descriptors[0], &supported_formats);
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index 3008d574737..50eb9cb16a8 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -116,6 +116,7 @@ bool CameraHalDispatcherImpl::StartThreads() {
bool CameraHalDispatcherImpl::Start(
MojoJpegDecodeAcceleratorFactoryCB jda_factory,
MojoJpegEncodeAcceleratorFactoryCB jea_factory) {
+ DCHECK(!IsStarted());
if (!StartThreads()) {
return false;
}
@@ -125,8 +126,8 @@ bool CameraHalDispatcherImpl::Start(
base::WaitableEvent::InitialState::NOT_SIGNALED);
blocking_io_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&CameraHalDispatcherImpl::CreateSocket, base::Unretained(this),
- base::Unretained(&started)));
+ base::BindOnce(&CameraHalDispatcherImpl::CreateSocket,
+ base::Unretained(this), base::Unretained(&started)));
started.Wait();
return IsStarted();
}
@@ -155,8 +156,8 @@ CameraHalDispatcherImpl::~CameraHalDispatcherImpl() {
VLOG(1) << "Stopping CameraHalDispatcherImpl...";
if (proxy_thread_.IsRunning()) {
proxy_thread_.task_runner()->PostTask(
- FROM_HERE, base::Bind(&CameraHalDispatcherImpl::StopOnProxyThread,
- base::Unretained(this)));
+ FROM_HERE, base::BindOnce(&CameraHalDispatcherImpl::StopOnProxyThread,
+ base::Unretained(this)));
proxy_thread_.Stop();
}
blocking_io_thread_.Stop();
@@ -172,8 +173,8 @@ void CameraHalDispatcherImpl::RegisterServer(
return;
}
camera_hal_server.set_connection_error_handler(
- base::Bind(&CameraHalDispatcherImpl::OnCameraHalServerConnectionError,
- base::Unretained(this)));
+ base::BindOnce(&CameraHalDispatcherImpl::OnCameraHalServerConnectionError,
+ base::Unretained(this)));
camera_hal_server_ = std::move(camera_hal_server);
VLOG(1) << "Camera HAL server registered";
@@ -189,7 +190,7 @@ void CameraHalDispatcherImpl::RegisterClient(
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
auto client_observer =
std::make_unique<MojoCameraClientObserver>(std::move(client));
- client_observer->client().set_connection_error_handler(base::Bind(
+ client_observer->client().set_connection_error_handler(base::BindOnce(
&CameraHalDispatcherImpl::OnCameraHalClientConnectionError,
base::Unretained(this), base::Unretained(client_observer.get())));
AddClientObserver(std::move(client_observer));
@@ -210,8 +211,9 @@ void CameraHalDispatcherImpl::CreateSocket(base::WaitableEvent* started) {
DCHECK(blocking_io_task_runner_->BelongsToCurrentThread());
base::FilePath socket_path(kArcCamera3SocketPath);
- mojo::edk::ScopedPlatformHandle socket_fd = mojo::edk::CreateServerHandle(
- mojo::edk::NamedPlatformHandle(socket_path.value()));
+ mojo::edk::ScopedInternalPlatformHandle socket_fd =
+ mojo::edk::CreateServerHandle(
+ mojo::edk::NamedPlatformHandle(socket_path.value()));
if (!socket_fd.is_valid()) {
LOG(ERROR) << "Failed to create the socket file: " << kArcCamera3SocketPath;
started->Signal();
@@ -249,13 +251,14 @@ void CameraHalDispatcherImpl::CreateSocket(base::WaitableEvent* started) {
}
blocking_io_task_runner_->PostTask(
- FROM_HERE, base::Bind(&CameraHalDispatcherImpl::StartServiceLoop,
- base::Unretained(this), base::Passed(&socket_fd),
- base::Unretained(started)));
+ FROM_HERE,
+ base::BindOnce(&CameraHalDispatcherImpl::StartServiceLoop,
+ base::Unretained(this), base::Passed(&socket_fd),
+ base::Unretained(started)));
}
void CameraHalDispatcherImpl::StartServiceLoop(
- mojo::edk::ScopedPlatformHandle socket_fd,
+ mojo::edk::ScopedInternalPlatformHandle socket_fd,
base::WaitableEvent* started) {
DCHECK(blocking_io_task_runner_->BelongsToCurrentThread());
DCHECK(!proxy_fd_.is_valid());
@@ -279,7 +282,7 @@ void CameraHalDispatcherImpl::StartServiceLoop(
return;
}
- mojo::edk::ScopedPlatformHandle accepted_fd;
+ mojo::edk::ScopedInternalPlatformHandle accepted_fd;
if (mojo::edk::ServerAcceptConnection(proxy_fd_, &accepted_fd, false) &&
accepted_fd.is_valid()) {
VLOG(1) << "Accepted a connection";
@@ -296,7 +299,7 @@ void CameraHalDispatcherImpl::StartServiceLoop(
mojo::edk::ConnectionParams(mojo::edk::TransportProtocol::kLegacy,
channel_pair.PassServerHandle()));
- std::vector<mojo::edk::ScopedPlatformHandle> handles;
+ std::vector<mojo::edk::ScopedInternalPlatformHandle> handles;
handles.emplace_back(channel_pair.PassClientHandle());
struct iovec iov = {const_cast<char*>(token.c_str()), token.length()};
@@ -306,8 +309,9 @@ void CameraHalDispatcherImpl::StartServiceLoop(
PLOG(ERROR) << "sendmsg()";
} else {
proxy_task_runner_->PostTask(
- FROM_HERE, base::Bind(&CameraHalDispatcherImpl::OnPeerConnected,
- base::Unretained(this), base::Passed(&pipe)));
+ FROM_HERE,
+ base::BindOnce(&CameraHalDispatcherImpl::OnPeerConnected,
+ base::Unretained(this), base::Passed(&pipe)));
}
}
}
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
index 7ab1d66bfe1..7470f777b4b 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
@@ -78,7 +78,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
// Waits for incoming connections (from HAL process or from client processes).
// Runs on |blocking_io_thread_|.
- void StartServiceLoop(mojo::edk::ScopedPlatformHandle socket_fd,
+ void StartServiceLoop(mojo::edk::ScopedInternalPlatformHandle socket_fd,
base::WaitableEvent* started);
void AddClientObserverOnProxyThread(
@@ -95,7 +95,7 @@ class CAPTURE_EXPORT CameraHalDispatcherImpl final
void StopOnProxyThread();
- mojo::edk::ScopedPlatformHandle proxy_fd_;
+ mojo::edk::ScopedInternalPlatformHandle proxy_fd_;
base::ScopedFD cancel_pipe_;
base::Thread proxy_thread_;
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
index e5dedcfaeab..f552085cfd8 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
@@ -8,6 +8,7 @@
#include <utility>
#include "base/run_loop.h"
+#include "base/single_thread_task_runner.h"
#include "base/test/scoped_task_environment.h"
#include "media/capture/video/chromeos/mojo/cros_camera_service.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
index 8f3d3a461d6..c02241e49f3 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
@@ -4,22 +4,62 @@
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include <set>
+#include <algorithm>
+#include <unordered_set>
namespace media {
-const cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
+cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
const cros::mojom::CameraMetadataPtr& camera_metadata,
cros::mojom::CameraMetadataTag tag) {
- if (!camera_metadata->entries.has_value()) {
+ if (!camera_metadata || !camera_metadata->entries.has_value()) {
return nullptr;
}
- for (const auto& entry : camera_metadata->entries.value()) {
- if (entry->tag == tag) {
- return &entry;
- }
+ // We assume the metadata entries are sorted.
+ auto iter = std::find_if(camera_metadata->entries.value().begin(),
+ camera_metadata->entries.value().end(),
+ [tag](const cros::mojom::CameraMetadataEntryPtr& e) {
+ return e->tag == tag;
+ });
+ if (iter == camera_metadata->entries.value().end()) {
+ return nullptr;
+ }
+ return &(camera_metadata->entries.value()[(*iter)->index]);
+}
+
+void AddOrUpdateMetadataEntry(cros::mojom::CameraMetadataPtr* to,
+ cros::mojom::CameraMetadataEntryPtr entry) {
+ auto* e = GetMetadataEntry(*to, entry->tag);
+ if (e) {
+ (*to)->data_count += entry->data.size() - (*e)->data.size();
+ (*to)->data_capacity = std::max((*to)->data_capacity, (*to)->data_count);
+ (*e)->count = entry->count;
+ (*e)->data = std::move(entry->data);
+ } else {
+ entry->index = (*to)->entries->size();
+ (*to)->entry_count += 1;
+ (*to)->entry_capacity = std::max((*to)->entry_capacity, (*to)->entry_count);
+ (*to)->data_count += entry->data.size();
+ (*to)->data_capacity = std::max((*to)->data_capacity, (*to)->data_count);
+ (*to)->entries->push_back(std::move(entry));
+ SortCameraMetadata(to);
+ }
+}
+
+void SortCameraMetadata(cros::mojom::CameraMetadataPtr* camera_metadata) {
+ if (!camera_metadata || !(*camera_metadata) ||
+ !(*camera_metadata)->entries.has_value()) {
+ return;
+ }
+ std::sort((*camera_metadata)->entries.value().begin(),
+ (*camera_metadata)->entries.value().end(),
+ [](const cros::mojom::CameraMetadataEntryPtr& a,
+ const cros::mojom::CameraMetadataEntryPtr& b) {
+ return a->tag < b->tag;
+ });
+ for (size_t i = 0; i < (*camera_metadata)->entries.value().size(); ++i) {
+ (*camera_metadata)->entries.value()[i]->index = i;
}
- return nullptr;
}
void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
@@ -34,7 +74,7 @@ void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
return;
}
- std::set<cros::mojom::CameraMetadataTag> tags;
+ std::unordered_set<cros::mojom::CameraMetadataTag> tags;
if ((*to)->entries) {
for (const auto& entry : (*to)->entries.value()) {
tags.insert(entry->tag);
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.h b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
index 8dbb1084d75..3ff02d638ee 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.h
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
@@ -5,16 +5,25 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
+#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/mojo/camera_metadata.mojom.h"
namespace media {
-const cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
+CAPTURE_EXPORT cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
const cros::mojom::CameraMetadataPtr& camera_metadata,
cros::mojom::CameraMetadataTag tag);
-void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
- const cros::mojom::CameraMetadataPtr& from);
+CAPTURE_EXPORT void AddOrUpdateMetadataEntry(
+ cros::mojom::CameraMetadataPtr* to,
+ cros::mojom::CameraMetadataEntryPtr entry);
+
+// Sort the camera metadata entries using the metadata tags.
+CAPTURE_EXPORT void SortCameraMetadata(
+ cros::mojom::CameraMetadataPtr* camera_metadata);
+
+CAPTURE_EXPORT void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
+ const cros::mojom::CameraMetadataPtr& from);
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/display_rotation_observer.cc b/chromium/media/capture/video/chromeos/display_rotation_observer.cc
index e1d5de3760f..42454811d20 100644
--- a/chromium/media/capture/video/chromeos/display_rotation_observer.cc
+++ b/chromium/media/capture/video/chromeos/display_rotation_observer.cc
@@ -19,7 +19,8 @@ ScreenObserverDelegate::ScreenObserverDelegate(
delegate_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
display_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&ScreenObserverDelegate::AddObserverOnDisplayThread, this));
+ base::BindOnce(&ScreenObserverDelegate::AddObserverOnDisplayThread,
+ this));
}
void ScreenObserverDelegate::RemoveObserver() {
@@ -27,7 +28,8 @@ void ScreenObserverDelegate::RemoveObserver() {
observer_ = NULL;
display_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&ScreenObserverDelegate::RemoveObserverOnDisplayThread, this));
+ base::BindOnce(&ScreenObserverDelegate::RemoveObserverOnDisplayThread,
+ this));
}
ScreenObserverDelegate::~ScreenObserverDelegate() {
@@ -72,8 +74,9 @@ void ScreenObserverDelegate::SendDisplayRotation(
DCHECK(display_task_runner_->BelongsToCurrentThread());
delegate_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&ScreenObserverDelegate::SendDisplayRotationOnCaptureThread,
- this, display));
+ base::BindOnce(
+ &ScreenObserverDelegate::SendDisplayRotationOnCaptureThread, this,
+ display));
}
void ScreenObserverDelegate::SendDisplayRotationOnCaptureThread(
diff --git a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
index 2396d59c7d3..020ca137016 100644
--- a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
@@ -51,6 +51,8 @@ gbm_device* CreateGbmDevice() {
uint32_t GetDrmFormat(gfx::BufferFormat gfx_format) {
switch (gfx_format) {
+ case gfx::BufferFormat::R_8:
+ return DRM_FORMAT_R8;
case gfx::BufferFormat::YUV_420_BIPLANAR:
return DRM_FORMAT_NV12;
// Add more formats when needed.
@@ -186,7 +188,8 @@ LocalGpuMemoryBufferManager::CreateGpuMemoryBuffer(
gfx::BufferFormat format,
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
- if (usage != gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE) {
+ if (usage != gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE &&
+ usage != gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE) {
LOG(ERROR) << "Unsupported gfx::BufferUsage" << static_cast<int>(usage);
return std::unique_ptr<gfx::GpuMemoryBuffer>();
}
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.cc b/chromium/media/capture/video/chromeos/mock_camera_module.cc
index b36dab0c4cc..3a60bd7e477 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.cc
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.cc
@@ -17,8 +17,8 @@ MockCameraModule::MockCameraModule()
MockCameraModule::~MockCameraModule() {
mock_module_thread_.task_runner()->PostTask(
- FROM_HERE, base::Bind(&MockCameraModule::CloseBindingOnThread,
- base::Unretained(this)));
+ FROM_HERE, base::BindOnce(&MockCameraModule::CloseBindingOnThread,
+ base::Unretained(this)));
mock_module_thread_.Stop();
}
@@ -64,8 +64,8 @@ cros::mojom::CameraModulePtrInfo MockCameraModule::GetInterfacePtrInfo() {
cros::mojom::CameraModulePtrInfo ptr_info;
mock_module_thread_.task_runner()->PostTask(
FROM_HERE,
- base::Bind(&MockCameraModule::BindOnThread, base::Unretained(this),
- base::Unretained(&done), base::Unretained(&ptr_info)));
+ base::BindOnce(&MockCameraModule::BindOnThread, base::Unretained(this),
+ base::Unretained(&done), base::Unretained(&ptr_info)));
done.Wait();
return ptr_info;
}
diff --git a/chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.cc
deleted file mode 100644
index 9a45b69f3ce..00000000000
--- a/chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.cc
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h"
-
-#include <memory>
-
-using ::testing::Return;
-
-namespace media {
-namespace unittest_internal {
-
-MockGpuMemoryBuffer::MockGpuMemoryBuffer() = default;
-
-MockGpuMemoryBuffer::~MockGpuMemoryBuffer() = default;
-
-MockGpuMemoryBufferManager::MockGpuMemoryBufferManager() = default;
-
-MockGpuMemoryBufferManager::~MockGpuMemoryBufferManager() = default;
-
-std::unique_ptr<gfx::GpuMemoryBuffer>
-MockGpuMemoryBufferManager::ReturnValidBuffer(
- const gfx::Size& size,
- gfx::BufferFormat format,
- gfx::BufferUsage usage,
- gpu::SurfaceHandle surface_handle) {
- // We use only NV12 in unit tests.
- EXPECT_EQ(gfx::BufferFormat::YUV_420_BIPLANAR, format);
-
- gfx::GpuMemoryBufferHandle handle;
- handle.type = gfx::NATIVE_PIXMAP;
- // Set a dummy id since this is for testing only.
- handle.id = gfx::GpuMemoryBufferId(0);
- // Set a dummy fd since this is for testing only.
- handle.native_pixmap_handle.fds.push_back(base::FileDescriptor(0, false));
- handle.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(size.width(), 0, size.width() * size.height()));
- handle.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
- size.width(), handle.native_pixmap_handle.planes[0].size,
- size.width() * size.height() / 2));
-
- auto mock_buffer = std::make_unique<MockGpuMemoryBuffer>();
- ON_CALL(*mock_buffer, Map()).WillByDefault(Return(true));
- ON_CALL(*mock_buffer, memory(0))
- .WillByDefault(Return(reinterpret_cast<void*>(0xdeafbeef)));
- ON_CALL(*mock_buffer, GetHandle()).WillByDefault(Return(handle));
-
- return mock_buffer;
-}
-
-} // namespace unittest_internal
-} // namespace media
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
index 7c22b95f77c..29d57c50a45 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
@@ -52,11 +52,24 @@ void MockVideoCaptureClient::OnIncomingCapturedData(
}
}
+void MockVideoCaptureClient::OnIncomingCapturedGfxBuffer(
+ gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id) {
+ ASSERT_TRUE(buffer);
+ ASSERT_GT(buffer->GetSize().width() * buffer->GetSize().height(), 0);
+ if (frame_cb_) {
+ std::move(frame_cb_).Run();
+ }
+}
+
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
VideoCaptureDevice::Client::Buffer MockVideoCaptureClient::ReserveOutputBuffer(
const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) {
DoReserveOutputBuffer();
NOTREACHED() << "This should never be called";
@@ -84,7 +97,6 @@ void MockVideoCaptureClient::OnIncomingCapturedBufferExt(
VideoCaptureDevice::Client::Buffer
MockVideoCaptureClient::ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) {
DoResurrectLastOutputBuffer();
NOTREACHED() << "This should never be called";
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.h b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
index 897be990101..cd3452d839e 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.h
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
@@ -8,6 +8,9 @@
#include "media/capture/video/video_capture_device.h"
#include "testing/gmock/include/gmock/gmock.h"
+// TODO(crbug.com/838774):
+// Consolidate the MockVideoCaptureClient implementations
+
namespace media {
namespace unittest_internal {
@@ -40,10 +43,15 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id) override;
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override;
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
Buffer ReserveOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override;
void OnIncomingCapturedBuffer(Buffer buffer,
const VideoCaptureFormat& format,
@@ -58,7 +66,6 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
const VideoFrameMetadata& additional_metadata) override;
Buffer ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override;
private:
diff --git a/chromium/media/capture/video/chromeos/pixel_format_utils.cc b/chromium/media/capture/video/chromeos/pixel_format_utils.cc
index 9e1449ab419..6f9d4e3645c 100644
--- a/chromium/media/capture/video/chromeos/pixel_format_utils.cc
+++ b/chromium/media/capture/video/chromeos/pixel_format_utils.cc
@@ -31,6 +31,9 @@ struct SupportedFormat {
// support YUV flexbile format video streams.
{cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888,
{PIXEL_FORMAT_NV12, gfx::BufferFormat::YUV_420_BIPLANAR}},
+ // FIXME(jcliang): MJPEG is not accurate; we should have BLOB or JPEG
+ {cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB,
+ {PIXEL_FORMAT_MJPEG, gfx::BufferFormat::R_8}},
// Add more mappings when we have more devices.
};
@@ -51,6 +54,8 @@ uint32_t PixFormatVideoToDrm(VideoPixelFormat from) {
switch (from) {
case PIXEL_FORMAT_NV12:
return DRM_FORMAT_NV12;
+ case PIXEL_FORMAT_MJPEG:
+ return DRM_FORMAT_R8;
default:
// Unsupported format.
return 0;
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index 20c9cd53377..2cd54bb1114 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -15,16 +15,40 @@
namespace media {
+namespace {
+
+size_t GetBufferIndex(uint64_t buffer_id) {
+ return buffer_id & 0xFFFFFFFF;
+}
+
+StreamType StreamIdToStreamType(uint64_t stream_id) {
+ switch (stream_id) {
+ case 0:
+ return StreamType::kPreview;
+ case 1:
+ return StreamType::kStillCapture;
+ default:
+ return StreamType::kUnknown;
+ }
+}
+
+} // namespace
+
StreamBufferManager::StreamBufferManager(
cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
+ base::RepeatingCallback<mojom::BlobPtr(
+ const uint8_t* buffer,
+ const uint32_t bytesused,
+ const VideoCaptureFormat& capture_format)> blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
: callback_ops_(this, std::move(callback_ops_request)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
camera_buffer_factory_(std::move(camera_buffer_factory)),
+ blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
frame_number_(0),
@@ -38,107 +62,247 @@ StreamBufferManager::StreamBufferManager(
StreamBufferManager::~StreamBufferManager() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- if (stream_context_) {
- for (const auto& buf : stream_context_->buffers) {
- if (buf) {
- buf->Unmap();
+ for (const auto& iter : stream_context_) {
+ if (iter.second) {
+ for (const auto& buf : iter.second->buffers) {
+ if (buf) {
+ buf->Unmap();
+ }
}
}
}
}
-void StreamBufferManager::SetUpStreamAndBuffers(
+void StreamBufferManager::SetUpStreamsAndBuffers(
VideoCaptureFormat capture_format,
- uint32_t partial_result_count,
- cros::mojom::Camera3StreamPtr stream) {
+ const cros::mojom::CameraMetadataPtr& static_metadata,
+ std::vector<cros::mojom::Camera3StreamPtr> streams) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(!stream_context_);
-
- VLOG(2) << "Stream " << stream->id << " configured: usage=" << stream->usage
- << " max_buffers=" << stream->max_buffers;
-
- const size_t kMaximumAllowedBuffers = 15;
- if (stream->max_buffers > kMaximumAllowedBuffers) {
- device_context_->SetErrorState(
- FROM_HERE, std::string("Camera HAL requested ") +
- std::to_string(stream->max_buffers) +
- std::string(" buffers which exceeds the allowed maximum "
- "number of buffers"));
- return;
+ DCHECK(!stream_context_[StreamType::kPreview]);
+
+ // The partial result count metadata is optional; defaults to 1 in case it
+ // is not set in the static metadata.
+ const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
+ if (partial_count) {
+ partial_result_count_ =
+ *reinterpret_cast<int32_t*>((*partial_count)->data.data());
}
- partial_result_count_ = partial_result_count;
- stream_context_ = std::make_unique<StreamContext>();
- stream_context_->capture_format = capture_format;
- stream_context_->stream = std::move(stream);
-
- const ChromiumPixelFormat stream_format =
- camera_buffer_factory_->ResolveStreamBufferFormat(
- stream_context_->stream->format);
- stream_context_->capture_format.pixel_format = stream_format.video_format;
-
- // Allocate buffers.
- size_t num_buffers = stream_context_->stream->max_buffers;
- stream_context_->buffers.resize(num_buffers);
- for (size_t j = 0; j < num_buffers; ++j) {
- auto buffer = camera_buffer_factory_->CreateGpuMemoryBuffer(
- gfx::Size(stream_context_->stream->width,
- stream_context_->stream->height),
- stream_format.gfx_format);
- if (!buffer) {
- device_context_->SetErrorState(FROM_HERE,
- "Failed to create GpuMemoryBuffer");
+ for (auto& stream : streams) {
+ DVLOG(2) << "Stream " << stream->id
+ << " configured: usage=" << stream->usage
+ << " max_buffers=" << stream->max_buffers;
+
+ const size_t kMaximumAllowedBuffers = 15;
+ if (stream->max_buffers > kMaximumAllowedBuffers) {
+ device_context_->SetErrorState(
+ FROM_HERE,
+ std::string("Camera HAL requested ") +
+ std::to_string(stream->max_buffers) +
+ std::string(" buffers which exceeds the allowed maximum "
+ "number of buffers"));
return;
}
- bool ret = buffer->Map();
- if (!ret) {
- device_context_->SetErrorState(FROM_HERE,
- "Failed to map GpuMemoryBuffer");
- return;
+
+ // A better way to tell the stream type here would be to check on the usage
+ // flags of the stream.
+ StreamType stream_type;
+ if (stream->format ==
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ stream_type = StreamType::kPreview;
+ } else { // stream->format ==
+ // cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB
+ stream_type = StreamType::kStillCapture;
+ }
+ stream_context_[stream_type] = std::make_unique<StreamContext>();
+ stream_context_[stream_type]->capture_format = capture_format;
+ stream_context_[stream_type]->stream = std::move(stream);
+
+ const ChromiumPixelFormat stream_format =
+ camera_buffer_factory_->ResolveStreamBufferFormat(
+ stream_context_[stream_type]->stream->format);
+ stream_context_[stream_type]->capture_format.pixel_format =
+ stream_format.video_format;
+
+ // Allocate buffers.
+ size_t num_buffers = stream_context_[stream_type]->stream->max_buffers;
+ stream_context_[stream_type]->buffers.resize(num_buffers);
+ int32_t buffer_width, buffer_height;
+ if (stream_type == StreamType::kPreview) {
+ buffer_width = stream_context_[stream_type]->stream->width;
+ buffer_height = stream_context_[stream_type]->stream->height;
+ } else { // StreamType::kStillCapture
+ const cros::mojom::CameraMetadataEntryPtr* jpeg_max_size =
+ GetMetadataEntry(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE);
+ buffer_width = *reinterpret_cast<int32_t*>((*jpeg_max_size)->data.data());
+ buffer_height = 1;
+ }
+ for (size_t j = 0; j < num_buffers; ++j) {
+ auto buffer = camera_buffer_factory_->CreateGpuMemoryBuffer(
+ gfx::Size(buffer_width, buffer_height), stream_format.gfx_format);
+ if (!buffer) {
+ device_context_->SetErrorState(FROM_HERE,
+ "Failed to create GpuMemoryBuffer");
+ return;
+ }
+ bool ret = buffer->Map();
+ if (!ret) {
+ device_context_->SetErrorState(FROM_HERE,
+ "Failed to map GpuMemoryBuffer");
+ return;
+ }
+ stream_context_[stream_type]->buffers[j] = std::move(buffer);
+ stream_context_[stream_type]->free_buffers.push(
+ GetBufferIpcId(stream_type, j));
}
- stream_context_->buffers[j] = std::move(buffer);
- stream_context_->free_buffers.push(j);
+ DVLOG(2) << "Allocated "
+ << stream_context_[stream_type]->stream->max_buffers << " buffers";
}
- VLOG(2) << "Allocated " << stream_context_->stream->max_buffers << " buffers";
}
-void StreamBufferManager::StartCapture(
- cros::mojom::CameraMetadataPtr settings) {
+void StreamBufferManager::StartPreview(
+ cros::mojom::CameraMetadataPtr preview_settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(stream_context_);
- DCHECK(stream_context_->request_settings.is_null());
+ DCHECK(stream_context_[StreamType::kPreview]);
+ DCHECK(repeating_request_settings_.is_null());
capturing_ = true;
- stream_context_->request_settings = std::move(settings);
+ repeating_request_settings_ = std::move(preview_settings);
// We cannot use a loop to register all the free buffers in one shot here
// because the camera HAL v3 API specifies that the client cannot call
// ProcessCaptureRequest before the previous one returns.
- RegisterBuffer();
+ RegisterBuffer(StreamType::kPreview);
}
-void StreamBufferManager::StopCapture() {
+void StreamBufferManager::StopPreview() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
capturing_ = false;
}
-void StreamBufferManager::RegisterBuffer() {
+cros::mojom::Camera3StreamPtr StreamBufferManager::GetStreamConfiguration(
+ StreamType stream_type) {
+ if (!stream_context_.count(stream_type)) {
+ return cros::mojom::Camera3Stream::New();
+ }
+ return stream_context_[stream_type]->stream.Clone();
+}
+
+void StreamBufferManager::TakePhoto(
+ cros::mojom::CameraMetadataPtr settings,
+ VideoCaptureDevice::TakePhotoCallback callback) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(stream_context_[StreamType::kStillCapture]);
+
+ still_capture_callbacks_yet_to_be_processed_.push(std::move(callback));
+
+ std::vector<uint8_t> frame_orientation(sizeof(int32_t));
+ *reinterpret_cast<int32_t*>(frame_orientation.data()) =
+ base::checked_cast<int32_t>(device_context_->GetCameraFrameOrientation());
+ cros::mojom::CameraMetadataEntryPtr e =
+ cros::mojom::CameraMetadataEntry::New();
+ e->tag = cros::mojom::CameraMetadataTag::ANDROID_JPEG_ORIENTATION;
+ e->type = cros::mojom::EntryType::TYPE_INT32;
+ e->count = 1;
+ e->data = std::move(frame_orientation);
+ AddOrUpdateMetadataEntry(&settings, std::move(e));
+
+ oneshot_request_settings_.push(std::move(settings));
+ RegisterBuffer(StreamType::kStillCapture);
+}
+
+void StreamBufferManager::AddResultMetadataObserver(
+ ResultMetadataObserver* observer) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(stream_context_);
+ DCHECK(!result_metadata_observers_.count(observer));
+
+ result_metadata_observers_.insert(observer);
+}
+
+void StreamBufferManager::RemoveResultMetadataObserver(
+ ResultMetadataObserver* observer) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(result_metadata_observers_.count(observer));
+
+ result_metadata_observers_.erase(observer);
+}
+
+void StreamBufferManager::SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ cros::mojom::CameraMetadataEntryPtr setting =
+ cros::mojom::CameraMetadataEntry::New();
+
+ setting->tag = tag;
+ setting->type = type;
+ setting->count = count;
+ setting->data = std::move(value);
+
+ capture_settings_override_.push_back(std::move(setting));
+}
+
+// static
+uint64_t StreamBufferManager::GetBufferIpcId(StreamType stream_type,
+ size_t index) {
+ uint64_t id = 0;
+ id |= static_cast<uint64_t>(stream_type) << 32;
+ id |= index;
+ return id;
+}
+
+void StreamBufferManager::ApplyCaptureSettings(
+ cros::mojom::CameraMetadataPtr* capture_settings) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ if (capture_settings_override_.empty()) {
+ return;
+ }
+ for (auto& s : capture_settings_override_) {
+ auto* entry = GetMetadataEntry(*capture_settings, s->tag);
+ if (entry) {
+ DCHECK_EQ((*entry)->type, s->type);
+ (*entry).Swap(&s);
+ } else {
+ (*capture_settings)->entry_count += 1;
+ (*capture_settings)->entry_capacity += 1;
+ (*capture_settings)->data_count += s->data.size();
+ (*capture_settings)->data_capacity += s->data.size();
+ if (!(*capture_settings)->entries) {
+ (*capture_settings)->entries =
+ std::vector<cros::mojom::CameraMetadataEntryPtr>();
+ }
+ (*capture_settings)->entries.value().push_back(std::move(s));
+ }
+ }
+ capture_settings_override_.clear();
+ SortCameraMetadata(capture_settings);
+}
+
+void StreamBufferManager::RegisterBuffer(StreamType stream_type) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(stream_context_[stream_type]);
if (!capturing_) {
return;
}
- if (stream_context_->free_buffers.empty()) {
+ if (stream_context_[stream_type]->free_buffers.empty()) {
return;
}
- size_t buffer_id = stream_context_->free_buffers.front();
- stream_context_->free_buffers.pop();
+ uint64_t buffer_id = stream_context_[stream_type]->free_buffers.front();
+ stream_context_[stream_type]->free_buffers.pop();
const gfx::GpuMemoryBuffer* buffer =
- stream_context_->buffers[buffer_id].get();
+ stream_context_[stream_type]->buffers[GetBufferIndex(buffer_id)].get();
- VideoPixelFormat buffer_format = stream_context_->capture_format.pixel_format;
+ VideoPixelFormat buffer_format =
+ stream_context_[stream_type]->capture_format.pixel_format;
uint32_t drm_format = PixFormatVideoToDrm(buffer_format);
if (!drm_format) {
device_context_->SetErrorState(
@@ -147,7 +311,7 @@ void StreamBufferManager::RegisterBuffer() {
return;
}
cros::mojom::HalPixelFormat hal_pixel_format =
- stream_context_->stream->format;
+ stream_context_[stream_type]->stream->format;
gfx::NativePixmapHandle buffer_handle =
buffer->GetHandle().native_pixmap_handle;
@@ -162,8 +326,9 @@ void StreamBufferManager::RegisterBuffer() {
device_context_->SetErrorState(FROM_HERE, "Failed to dup fd");
return;
}
- MojoResult result = mojo::edk::CreatePlatformHandleWrapper(
- mojo::edk::ScopedPlatformHandle(mojo::edk::PlatformHandle(dup_fd)),
+ MojoResult result = mojo::edk::CreateInternalPlatformHandleWrapper(
+ mojo::edk::ScopedInternalPlatformHandle(
+ mojo::edk::InternalPlatformHandle(dup_fd)),
&wrapped_handle);
if (result != MOJO_RESULT_OK) {
device_context_->SetErrorState(FROM_HERE,
@@ -174,19 +339,27 @@ void StreamBufferManager::RegisterBuffer() {
planes[i].stride = buffer_handle.planes[i].stride;
planes[i].offset = buffer_handle.planes[i].offset;
}
+ if (stream_type == StreamType::kStillCapture) {
+ still_capture_callbacks_currently_processing_.push(
+ std::move(still_capture_callbacks_yet_to_be_processed_.front()));
+ still_capture_callbacks_yet_to_be_processed_.pop();
+ }
// We reuse BufferType::GRALLOC here since on ARC++ we are using DMA-buf-based
// gralloc buffers.
capture_interface_->RegisterBuffer(
buffer_id, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, drm_format,
- hal_pixel_format, stream_context_->stream->width,
- stream_context_->stream->height, std::move(planes),
- base::Bind(&StreamBufferManager::OnRegisteredBuffer,
- weak_ptr_factory_.GetWeakPtr(), buffer_id));
- VLOG(2) << "Registered buffer " << buffer_id;
+ hal_pixel_format, buffer->GetSize().width(), buffer->GetSize().height(),
+ std::move(planes),
+ base::BindOnce(&StreamBufferManager::OnRegisteredBuffer,
+ weak_ptr_factory_.GetWeakPtr(), stream_type, buffer_id));
+ DVLOG(2) << "Registered buffer " << buffer_id;
}
-void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
+void StreamBufferManager::OnRegisteredBuffer(StreamType stream_type,
+ uint64_t buffer_id,
+ int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ DCHECK(stream_context_[stream_type]);
if (!capturing_) {
return;
@@ -197,32 +370,64 @@ void StreamBufferManager::OnRegisteredBuffer(size_t buffer_id, int32_t result) {
std::string(strerror(result)));
return;
}
- ProcessCaptureRequest(buffer_id);
+ stream_context_[stream_type]->registered_buffers.push(buffer_id);
+ ProcessCaptureRequest();
}
-void StreamBufferManager::ProcessCaptureRequest(size_t buffer_id) {
+void StreamBufferManager::ProcessCaptureRequest() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(stream_context_);
-
- cros::mojom::Camera3StreamBufferPtr buffer =
- cros::mojom::Camera3StreamBuffer::New();
- buffer->stream_id = static_cast<uint64_t>(
- cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW);
- buffer->buffer_id = buffer_id;
- buffer->status = cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
+ DCHECK(stream_context_[StreamType::kPreview]);
+ DCHECK(stream_context_[StreamType::kStillCapture]);
cros::mojom::Camera3CaptureRequestPtr request =
cros::mojom::Camera3CaptureRequest::New();
request->frame_number = frame_number_;
- request->settings = stream_context_->request_settings.Clone();
- request->output_buffers.push_back(std::move(buffer));
+ CaptureResult& pending_result = pending_results_[frame_number_];
+
+ if (!stream_context_[StreamType::kPreview]->registered_buffers.empty()) {
+ cros::mojom::Camera3StreamBufferPtr buffer =
+ cros::mojom::Camera3StreamBuffer::New();
+ buffer->stream_id = static_cast<uint64_t>(StreamType::kPreview);
+ buffer->buffer_id =
+ stream_context_[StreamType::kPreview]->registered_buffers.front();
+ stream_context_[StreamType::kPreview]->registered_buffers.pop();
+ buffer->status = cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
+
+ DVLOG(2) << "Requested capture for stream " << StreamType::kPreview
+ << " in frame " << frame_number_;
+ request->settings = repeating_request_settings_.Clone();
+ request->output_buffers.push_back(std::move(buffer));
+ }
+
+ if (!stream_context_[StreamType::kStillCapture]->registered_buffers.empty()) {
+ DCHECK(!still_capture_callbacks_currently_processing_.empty());
+ cros::mojom::Camera3StreamBufferPtr buffer =
+ cros::mojom::Camera3StreamBuffer::New();
+ buffer->stream_id = static_cast<uint64_t>(StreamType::kStillCapture);
+ buffer->buffer_id =
+ stream_context_[StreamType::kStillCapture]->registered_buffers.front();
+ stream_context_[StreamType::kStillCapture]->registered_buffers.pop();
+ buffer->status = cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK;
+
+ DVLOG(2) << "Requested capture for stream " << StreamType::kStillCapture
+ << " in frame " << frame_number_;
+ // Use the still capture settings and override the preview ones.
+ request->settings = std::move(oneshot_request_settings_.front());
+ oneshot_request_settings_.pop();
+ pending_result.still_capture_callback =
+ std::move(still_capture_callbacks_currently_processing_.front());
+ still_capture_callbacks_currently_processing_.pop();
+ request->output_buffers.push_back(std::move(buffer));
+ }
+
+ pending_result.unsubmitted_buffer_count = request->output_buffers.size();
+
+ ApplyCaptureSettings(&request->settings);
capture_interface_->ProcessCaptureRequest(
std::move(request),
- base::Bind(&StreamBufferManager::OnProcessedCaptureRequest,
- weak_ptr_factory_.GetWeakPtr()));
- VLOG(2) << "Requested capture for frame " << frame_number_ << " with buffer "
- << buffer_id;
+ base::BindOnce(&StreamBufferManager::OnProcessedCaptureRequest,
+ weak_ptr_factory_.GetWeakPtr()));
frame_number_++;
}
@@ -238,7 +443,8 @@ void StreamBufferManager::OnProcessedCaptureRequest(int32_t result) {
std::string(strerror(result)));
return;
}
- RegisterBuffer();
+ // Keeps the preview stream going.
+ RegisterBuffer(StreamType::kPreview);
}
void StreamBufferManager::ProcessCaptureResult(
@@ -251,70 +457,78 @@ void StreamBufferManager::ProcessCaptureResult(
uint32_t frame_number = result->frame_number;
// A new partial result may be created in either ProcessCaptureResult or
// Notify.
- CaptureResult& partial_result = partial_results_[frame_number];
- if (partial_results_.size() > stream_context_->stream->max_buffers) {
- device_context_->SetErrorState(
- FROM_HERE,
- "Received more capture results than the maximum number of buffers");
- return;
- }
- if (result->output_buffers) {
- if (result->output_buffers->size() != 1) {
- device_context_->SetErrorState(
- FROM_HERE,
- std::string("Incorrect number of output buffers received: ") +
- std::to_string(result->output_buffers->size()));
- return;
- }
- cros::mojom::Camera3StreamBufferPtr& stream_buffer =
- result->output_buffers.value()[0];
- VLOG(2) << "Received capture result for frame " << frame_number
- << " stream_id: " << stream_buffer->stream_id;
- // The camera HAL v3 API specifies that only one capture result can carry
- // the result buffer for any given frame number.
- if (!partial_result.buffer.is_null()) {
- device_context_->SetErrorState(
- FROM_HERE,
- std::string("Received multiple result buffers for frame ") +
- std::to_string(frame_number));
- return;
- } else {
- partial_result.buffer = std::move(stream_buffer);
- // If the buffer is marked as error it is due to either a request or a
- // buffer error. In either case the content of the buffer must be dropped
- // and the buffer can be reused. We simply submit the buffer here and
- // don't wait for any partial results. SubmitCaptureResult() will drop
- // and reuse the buffer.
- if (partial_result.buffer->status ==
- cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
- SubmitCaptureResult(frame_number);
- return;
- }
- }
- }
+ CaptureResult& pending_result = pending_results_[frame_number];
- // |result->partial_result| is set to 0 if the capture result contains only
+ // |result->pending_result| is set to 0 if the capture result contains only
// the result buffer handles and no result metadata.
if (result->partial_result) {
uint32_t result_id = result->partial_result;
if (result_id > partial_result_count_) {
device_context_->SetErrorState(
- FROM_HERE, std::string("Invalid partial_result id: ") +
+ FROM_HERE, std::string("Invalid pending_result id: ") +
std::to_string(result_id));
return;
}
- if (partial_result.partial_metadata_received.find(result_id) !=
- partial_result.partial_metadata_received.end()) {
+ if (pending_result.partial_metadata_received.count(result_id)) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received duplicated partial metadata: ") +
std::to_string(result_id));
return;
}
- partial_result.partial_metadata_received.insert(result_id);
- MergeMetadata(&partial_result.metadata, result->result);
+ DVLOG(2) << "Received partial result " << result_id << " for frame "
+ << frame_number;
+ pending_result.partial_metadata_received.insert(result_id);
+ MergeMetadata(&pending_result.metadata, result->result);
}
- SubmitCaptureResultIfComplete(frame_number);
+ if (result->output_buffers) {
+ if (result->output_buffers->size() > kMaxConfiguredStreams) {
+ device_context_->SetErrorState(
+ FROM_HERE,
+ std::string("Incorrect number of output buffers received: ") +
+ std::to_string(result->output_buffers->size()));
+ return;
+ }
+ for (auto& stream_buffer : result->output_buffers.value()) {
+ DVLOG(2) << "Received capture result for frame " << frame_number
+ << " stream_id: " << stream_buffer->stream_id;
+ StreamType stream_type = StreamIdToStreamType(stream_buffer->stream_id);
+ if (stream_type == StreamType::kUnknown) {
+ device_context_->SetErrorState(
+ FROM_HERE,
+ std::string("Invalid type of output buffers received: ") +
+ std::to_string(stream_buffer->stream_id));
+ return;
+ }
+
+ // The camera HAL v3 API specifies that only one capture result can carry
+ // the result buffer for any given frame number.
+ if (stream_context_[stream_type]->capture_results_with_buffer.count(
+ frame_number)) {
+ device_context_->SetErrorState(
+ FROM_HERE,
+ std::string("Received multiple result buffers for frame ") +
+ std::to_string(frame_number) + std::string(" for stream ") +
+ std::to_string(stream_buffer->stream_id));
+ return;
+ }
+
+ pending_result.buffers[stream_type] = std::move(stream_buffer);
+ stream_context_[stream_type]->capture_results_with_buffer[frame_number] =
+ &pending_result;
+ if (pending_result.buffers[stream_type]->status ==
+ cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
+ // If the buffer is marked as error, its content is discarded for this
+ // frame. Send the buffer to the free list directly through
+ // SubmitCaptureResult.
+ SubmitCaptureResult(frame_number, stream_type);
+ }
+ }
+ }
+
+ for (const auto& iter : stream_context_) {
+ SubmitCaptureResultIfComplete(frame_number, iter.first);
+ }
}
void StreamBufferManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
@@ -326,46 +540,49 @@ void StreamBufferManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
if (message->type == cros::mojom::Camera3MsgType::CAMERA3_MSG_ERROR) {
uint32_t frame_number = message->message->get_error()->frame_number;
uint64_t error_stream_id = message->message->get_error()->error_stream_id;
+ StreamType stream_type = StreamIdToStreamType(error_stream_id);
+ if (stream_type == StreamType::kUnknown) {
+ device_context_->SetErrorState(
+ FROM_HERE, std::string("Unknown stream in Camera3NotifyMsg: ") +
+ std::to_string(error_stream_id));
+ return;
+ }
cros::mojom::Camera3ErrorMsgCode error_code =
message->message->get_error()->error_code;
- HandleNotifyError(frame_number, error_stream_id, error_code);
+ HandleNotifyError(frame_number, stream_type, error_code);
} else { // cros::mojom::Camera3MsgType::CAMERA3_MSG_SHUTTER
uint32_t frame_number = message->message->get_shutter()->frame_number;
uint64_t shutter_time = message->message->get_shutter()->timestamp;
- // A new partial result may be created in either ProcessCaptureResult or
- // Notify.
- VLOG(2) << "Received shutter time for frame " << frame_number;
+ DVLOG(2) << "Received shutter time for frame " << frame_number;
if (!shutter_time) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received invalid shutter time: ") +
std::to_string(shutter_time));
return;
}
- CaptureResult& partial_result = partial_results_[frame_number];
- if (partial_results_.size() > stream_context_->stream->max_buffers) {
- device_context_->SetErrorState(
- FROM_HERE,
- "Received more capture results than the maximum number of buffers");
- return;
- }
+ CaptureResult& pending_result = pending_results_[frame_number];
// Shutter timestamp is in ns.
base::TimeTicks reference_time =
base::TimeTicks::FromInternalValue(shutter_time / 1000);
- partial_result.reference_time = reference_time;
+ pending_result.reference_time = reference_time;
if (first_frame_shutter_time_.is_null()) {
// Record the shutter time of the first frame for calculating the
// timestamp.
first_frame_shutter_time_ = reference_time;
}
- partial_result.timestamp = reference_time - first_frame_shutter_time_;
- SubmitCaptureResultIfComplete(frame_number);
+ pending_result.timestamp = reference_time - first_frame_shutter_time_;
+ for (const auto& iter : stream_context_) {
+ SubmitCaptureResultIfComplete(frame_number, iter.first);
+ }
}
}
void StreamBufferManager::HandleNotifyError(
uint32_t frame_number,
- uint64_t error_stream_id,
+ StreamType stream_type,
cros::mojom::Camera3ErrorMsgCode error_code) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
std::string warning_msg;
switch (error_code) {
@@ -400,7 +617,7 @@ void StreamBufferManager::HandleNotifyError(
case cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_BUFFER:
// An error has occurred in placing the output buffer into a stream for
// a request. |frame_number| specifies the request for which the buffer
- // was dropped, and |error_stream_id| specifies the stream that dropped
+ // was dropped, and |stream_type| specifies the stream that dropped
// the buffer.
//
// The HAL will call ProcessCaptureResult with the buffer's state set to
@@ -409,7 +626,7 @@ void StreamBufferManager::HandleNotifyError(
warning_msg =
std::string(
"An error occurred while filling output buffer of stream ") +
- std::to_string(error_stream_id) + std::string(" in frame ") +
+ StreamTypeToString(stream_type) + std::string(" in frame ") +
std::to_string(frame_number);
break;
@@ -418,55 +635,76 @@ void StreamBufferManager::HandleNotifyError(
break;
}
- LOG(WARNING) << warning_msg;
+ LOG(WARNING) << warning_msg << stream_type;
device_context_->LogToClient(warning_msg);
// If the buffer is already returned by the HAL, submit it and we're done.
- auto partial_result = partial_results_.find(frame_number);
- if (partial_result != partial_results_.end() &&
- !partial_result->second.buffer.is_null()) {
- SubmitCaptureResult(frame_number);
+ if (pending_results_.count(frame_number) &&
+ pending_results_[frame_number].buffers.count(stream_type)) {
+ SubmitCaptureResult(frame_number, stream_type);
}
}
-void StreamBufferManager::SubmitCaptureResultIfComplete(uint32_t frame_number) {
+void StreamBufferManager::SubmitCaptureResultIfComplete(
+ uint32_t frame_number,
+ StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(partial_results_.find(frame_number) != partial_results_.end());
-
- CaptureResult& partial_result = partial_results_[frame_number];
- if (partial_result.partial_metadata_received.size() < partial_result_count_ ||
- partial_result.buffer.is_null() ||
- partial_result.reference_time == base::TimeTicks()) {
- // We can only submit the result buffer when:
- // 1. All the result metadata are received, and
- // 2. The result buffer is received, and
- // 3. The the shutter time is received.
+
+ if (!pending_results_.count(frame_number)) {
+ // The capture result may be discarded in case of error.
return;
}
- SubmitCaptureResult(frame_number);
+
+ CaptureResult& pending_result = pending_results_[frame_number];
+ if (!stream_context_[stream_type]->capture_results_with_buffer.count(
+ frame_number) ||
+ pending_result.partial_metadata_received.size() < partial_result_count_ ||
+ pending_result.reference_time == base::TimeTicks()) {
+ // We can only submit the result buffer of |frame_number| for |stream_type|
+ // when:
+ // 1. The result buffer for |stream_type| is received, and
+ // 2. All the result metadata are received, and
+ // 3. The shutter time is received.
+ return;
+ }
+ SubmitCaptureResult(frame_number, stream_type);
}
-void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
+void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number,
+ StreamType stream_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- DCHECK(partial_results_.find(frame_number) != partial_results_.end());
-
- CaptureResult& partial_result = partial_results_[frame_number];
- if (partial_results_.begin()->first != frame_number) {
+ DCHECK(pending_results_.count(frame_number));
+ DCHECK(stream_context_[stream_type]->capture_results_with_buffer.count(
+ frame_number));
+
+ CaptureResult& pending_result =
+ *stream_context_[stream_type]->capture_results_with_buffer[frame_number];
+ if (stream_context_[stream_type]
+ ->capture_results_with_buffer.begin()
+ ->first != frame_number) {
device_context_->SetErrorState(
FROM_HERE, std::string("Received frame is out-of-order; expect ") +
- std::to_string(partial_results_.begin()->first) +
+ std::to_string(pending_results_.begin()->first) +
std::string(" but got ") + std::to_string(frame_number));
return;
}
- VLOG(2) << "Submit capture result of frame " << frame_number;
- uint32_t buffer_id = partial_result.buffer->buffer_id;
+ DVLOG(2) << "Submit capture result of frame " << frame_number
+ << " for stream " << static_cast<int>(stream_type);
+ for (auto* iter : result_metadata_observers_) {
+ iter->OnResultMetadataAvailable(pending_result.metadata);
+ }
+
+ DCHECK(pending_result.buffers[stream_type]);
+ const cros::mojom::Camera3StreamBufferPtr& stream_buffer =
+ pending_result.buffers[stream_type];
+ uint64_t buffer_id = stream_buffer->buffer_id;
// Wait on release fence before delivering the result buffer to client.
- if (partial_result.buffer->release_fence.is_valid()) {
+ if (stream_buffer->release_fence.is_valid()) {
const int kSyncWaitTimeoutMs = 1000;
- mojo::edk::ScopedPlatformHandle fence;
- MojoResult result = mojo::edk::PassWrappedPlatformHandle(
- partial_result.buffer->release_fence.release().value(), &fence);
+ mojo::edk::ScopedInternalPlatformHandle fence;
+ MojoResult result = mojo::edk::PassWrappedInternalPlatformHandle(
+ stream_buffer->release_fence.release().value(), &fence);
if (result != MOJO_RESULT_OK) {
device_context_->SetErrorState(FROM_HERE,
"Failed to unwrap release fence fd");
@@ -479,25 +717,55 @@ void StreamBufferManager::SubmitCaptureResult(uint32_t frame_number) {
}
}
- // Deliver the captured data to client and then re-queue the buffer.
- if (partial_result.buffer->status !=
+ // Deliver the captured data to client.
+ if (stream_buffer->status !=
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_ERROR) {
- gfx::GpuMemoryBuffer* buffer = stream_context_->buffers[buffer_id].get();
- auto buffer_handle = buffer->GetHandle();
- size_t mapped_size = 0;
- for (const auto& plane : buffer_handle.native_pixmap_handle.planes) {
- mapped_size += plane.size;
+ size_t buffer_index = GetBufferIndex(buffer_id);
+ gfx::GpuMemoryBuffer* buffer =
+ stream_context_[stream_type]->buffers[buffer_index].get();
+ if (stream_type == StreamType::kPreview) {
+ device_context_->SubmitCapturedData(
+ buffer, stream_context_[StreamType::kPreview]->capture_format,
+ pending_result.reference_time, pending_result.timestamp);
+ ipc_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&StreamBufferManager::RegisterBuffer,
+ weak_ptr_factory_.GetWeakPtr(), StreamType::kPreview));
+ } else { // StreamType::kStillCapture
+ DCHECK(pending_result.still_capture_callback);
+ const Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
+ reinterpret_cast<uintptr_t>(buffer->memory(0)) +
+ buffer->GetSize().width() - sizeof(Camera3JpegBlob));
+ if (header->jpeg_blob_id != kCamera3JpegBlobId) {
+ device_context_->SetErrorState(FROM_HERE, "Invalid JPEG blob");
+ return;
+ }
+ mojom::BlobPtr blob = blobify_callback_.Run(
+ reinterpret_cast<uint8_t*>(buffer->memory(0)), header->jpeg_size,
+ stream_context_[stream_type]->capture_format);
+ if (blob) {
+ std::move(pending_result.still_capture_callback).Run(std::move(blob));
+ } else {
+ LOG(ERROR) << "Failed to blobify the captured JPEG image";
+ }
+ }
+ }
+
+ stream_context_[stream_type]->free_buffers.push(buffer_id);
+ stream_context_[stream_type]->capture_results_with_buffer.erase(frame_number);
+ pending_result.unsubmitted_buffer_count--;
+ if (!pending_result.unsubmitted_buffer_count) {
+ pending_results_.erase(frame_number);
+ }
+
+ if (stream_type == StreamType::kPreview) {
+ // Always keep the preview stream running.
+ RegisterBuffer(StreamType::kPreview);
+ } else { // stream_type == StreamType::kStillCapture
+ if (!still_capture_callbacks_yet_to_be_processed_.empty()) {
+ RegisterBuffer(StreamType::kStillCapture);
}
- // We are relying on the GpuMemoryBuffer being mapped contiguously on the
- // virtual memory address space.
- device_context_->SubmitCapturedData(
- reinterpret_cast<uint8_t*>(buffer->memory(0)), mapped_size,
- stream_context_->capture_format, partial_result.reference_time,
- partial_result.timestamp);
- }
- stream_context_->free_buffers.push(buffer_id);
- partial_results_.erase(frame_number);
- RegisterBuffer();
+ }
}
StreamBufferManager::StreamContext::StreamContext() = default;
@@ -505,7 +773,8 @@ StreamBufferManager::StreamContext::StreamContext() = default;
StreamBufferManager::StreamContext::~StreamContext() = default;
StreamBufferManager::CaptureResult::CaptureResult()
- : metadata(cros::mojom::CameraMetadata::New()) {}
+ : metadata(cros::mojom::CameraMetadata::New()),
+ unsubmitted_buffer_count(0) {}
StreamBufferManager::CaptureResult::~CaptureResult() = default;
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index a6a94f16b16..76cda4425ab 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -5,8 +5,14 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_STREAM_BUFFER_MANAGER_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_STREAM_BUFFER_MANAGER_H_
+#include <memory>
+#include <queue>
+#include <unordered_map>
+#include <vector>
+
#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
#include "media/capture/video_capture_types.h"
@@ -23,29 +29,65 @@ namespace media {
class CameraBufferFactory;
class CameraDeviceContext;
+// One stream for preview, one stream for still capture.
+constexpr size_t kMaxConfiguredStreams = 2;
+
+// The JPEG transport header as defined by Android camera HAL v3 API. The JPEG
+// transport header is at the end of the blob buffer filled by the HAL.
+constexpr uint16_t kCamera3JpegBlobId = 0x00FF;
+struct Camera3JpegBlob {
+ uint16_t jpeg_blob_id;
+ uint32_t jpeg_size;
+};
+
+class CAPTURE_EXPORT CaptureMetadataDispatcher {
+ public:
+ class ResultMetadataObserver {
+ public:
+ virtual ~ResultMetadataObserver() {}
+ virtual void OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr&) = 0;
+ };
+
+ virtual ~CaptureMetadataDispatcher() {}
+ virtual void AddResultMetadataObserver(ResultMetadataObserver* observer) = 0;
+ virtual void RemoveResultMetadataObserver(
+ ResultMetadataObserver* observer) = 0;
+ virtual void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value) = 0;
+};
+
// StreamBufferManager is responsible for managing the buffers of the
// stream. StreamBufferManager allocates buffers according to the given
// stream configuration, and circulates the buffers along with capture
// requests and results between Chrome and the camera HAL process.
class CAPTURE_EXPORT StreamBufferManager final
- : public cros::mojom::Camera3CallbackOps {
+ : public cros::mojom::Camera3CallbackOps,
+ public CaptureMetadataDispatcher {
public:
StreamBufferManager(
cros::mojom::Camera3CallbackOpsRequest callback_ops_request,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
+ base::RepeatingCallback<mojom::BlobPtr(
+ const uint8_t* buffer,
+ const uint32_t bytesused,
+ const VideoCaptureFormat& capture_format)> blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
- ~StreamBufferManager() final;
+ ~StreamBufferManager() override;
// Sets up the stream context and allocate buffers according to the
// configuration specified in |stream|.
- void SetUpStreamAndBuffers(VideoCaptureFormat capture_format,
- uint32_t partial_result_count,
- cros::mojom::Camera3StreamPtr stream);
+ void SetUpStreamsAndBuffers(
+ VideoCaptureFormat capture_format,
+ const cros::mojom::CameraMetadataPtr& static_metadata,
+ std::vector<cros::mojom::Camera3StreamPtr> streams);
- // StartCapture is the entry point to starting the video capture. The way
+ // StartPreview is the entry point to starting the video capture. The way
// the video capture loop works is:
//
// (1) If there is a free buffer, RegisterBuffer registers the buffer with
@@ -59,24 +101,48 @@ class CAPTURE_EXPORT StreamBufferManager final
// SubmitCaptureResultIfComplete is called to deliver the filled buffer
// to Chrome. After the buffer is consumed by Chrome it is enqueued back
// to the free buffer queue. Goto (1) to start another capture loop.
- void StartCapture(cros::mojom::CameraMetadataPtr settings);
+ //
+ // When TakePhoto() is called, an additional BLOB buffer is queued in step (2)
+ // to let the HAL fill the still capture JPEG image. When the JPEG image is
+ // returned in (4), it's passed to upper layer through the TakePhotoCallback.
+ void StartPreview(cros::mojom::CameraMetadataPtr preview_settings);
- // Stops the capture loop. After StopCapture is called |callback_ops_| is
+ // Stops the capture loop. After StopPreview is called |callback_ops_| is
// unbound, so no new capture request or result will be processed.
- void StopCapture();
+ void StopPreview();
+
+ cros::mojom::Camera3StreamPtr GetStreamConfiguration(StreamType stream_type);
+
+ void TakePhoto(cros::mojom::CameraMetadataPtr settings,
+ VideoCaptureDevice::TakePhotoCallback callback);
+
+ // CaptureMetadataDispatcher implementations.
+ void AddResultMetadataObserver(ResultMetadataObserver* observer) override;
+ void RemoveResultMetadataObserver(ResultMetadataObserver* observer) override;
+ // Queues a capture setting that will be send along with the earliest next
+ // capture request.
+ void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value) override;
+
+ static uint64_t GetBufferIpcId(StreamType stream_type, size_t index);
private:
friend class StreamBufferManagerTest;
- // Registers a free buffer, if any, to the camera HAL.
- void RegisterBuffer();
+ // Registers a free buffer, if any, for the give |stream_type| to the camera
+ // HAL.
+ void RegisterBuffer(StreamType stream_type);
// Calls ProcessCaptureRequest if the buffer specified by |buffer_id| is
// successfully registered.
- void OnRegisteredBuffer(size_t buffer_id, int32_t result);
+ void OnRegisteredBuffer(StreamType stream_type,
+ uint64_t buffer_id,
+ int32_t result);
- // The capture request contains the buffer handle specified by |buffer_id|.
- void ProcessCaptureRequest(size_t buffer_id);
+ // The capture request contains the buffer handles waiting to be filled.
+ void ProcessCaptureRequest();
// Calls RegisterBuffer to attempt to register any remaining free buffers.
void OnProcessedCaptureRequest(int32_t result);
@@ -85,23 +151,27 @@ class CAPTURE_EXPORT StreamBufferManager final
// ProcessCaptureResult receives the result metadata as well as the filled
// buffer from camera HAL. The result metadata may be divided and delivered
// in several stages. Before all the result metadata is received the
- // partial results are kept in |partial_results_|.
- void ProcessCaptureResult(cros::mojom::Camera3CaptureResultPtr result) final;
+ // partial results are kept in |pending_results_|.
+ void ProcessCaptureResult(
+ cros::mojom::Camera3CaptureResultPtr result) override;
// Notify receives the shutter time of capture requests and various errors
// from camera HAL. The shutter time is used as the timestamp in the video
// frame delivered to Chrome.
- void Notify(cros::mojom::Camera3NotifyMsgPtr message) final;
+ void Notify(cros::mojom::Camera3NotifyMsgPtr message) override;
void HandleNotifyError(uint32_t frame_number,
- uint64_t error_stream_id,
+ StreamType stream_type,
cros::mojom::Camera3ErrorMsgCode error_code);
- // Submits the captured buffer of frame |frame_number_| to Chrome if all the
- // required metadata and the captured buffer are received. After the buffer
- // is submitted the function then enqueues the buffer to free buffer queue for
- // the next capture request.
- void SubmitCaptureResultIfComplete(uint32_t frame_number);
- void SubmitCaptureResult(uint32_t frame_number);
+ // Submits the captured buffer of frame |frame_number_| for the give
+ // |stream_type| to Chrome if all the required metadata and the captured
+ // buffer are received. After the buffer is submitted the function then
+ // enqueues the buffer to free buffer queue for the next capture request.
+ void SubmitCaptureResultIfComplete(uint32_t frame_number,
+ StreamType stream_type);
+ void SubmitCaptureResult(uint32_t frame_number, StreamType stream_type);
+
+ void ApplyCaptureSettings(cros::mojom::CameraMetadataPtr* capture_settings);
mojo::Binding<cros::mojom::Camera3CallbackOps> callback_ops_;
@@ -111,6 +181,12 @@ class CAPTURE_EXPORT StreamBufferManager final
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
+ base::RepeatingCallback<mojom::BlobPtr(
+ const uint8_t* buffer,
+ const uint32_t bytesused,
+ const VideoCaptureFormat& capture_format)>
+ blobify_callback_;
+
// Where all the Mojo IPC calls takes place.
const scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner_;
@@ -121,26 +197,7 @@ class CAPTURE_EXPORT StreamBufferManager final
// to zero in AllocateAndStart.
uint32_t frame_number_;
- struct StreamContext {
- StreamContext();
- ~StreamContext();
- // The actual pixel format used in the capture request.
- VideoCaptureFormat capture_format;
- // The camera HAL stream.
- cros::mojom::Camera3StreamPtr stream;
- // The request settings used in the capture request of this stream.
- cros::mojom::CameraMetadataPtr request_settings;
- // The allocated buffers of this stream.
- std::vector<std::unique_ptr<gfx::GpuMemoryBuffer>> buffers;
- // The free buffers of this stream. The queue stores indices into the
- // |buffers| vector.
- base::queue<size_t> free_buffers;
- };
-
- // The stream context of the preview stream.
- std::unique_ptr<StreamContext> stream_context_;
-
- // CaptureResult is used to hold the partial capture results for each frame.
+ // CaptureResult is used to hold the pending capture results for each frame.
struct CaptureResult {
CaptureResult();
~CaptureResult();
@@ -152,14 +209,61 @@ class CAPTURE_EXPORT StreamBufferManager final
// The result metadata. Contains various information about the captured
// frame.
cros::mojom::CameraMetadataPtr metadata;
- // The buffer handle that hold the captured data of this frame.
- cros::mojom::Camera3StreamBufferPtr buffer;
+ // The buffer handles that hold the captured data of this frame.
+ std::unordered_map<StreamType, cros::mojom::Camera3StreamBufferPtr> buffers;
// The set of the partial metadata received. For each capture result, the
// total number of partial metadata should equal to
// |partial_result_count_|.
std::set<uint32_t> partial_metadata_received;
+ // Incremented for every stream buffer requested for the given frame.
+ // StreamBufferManager destructs the CaptureResult when
+ // |unsubmitted_buffer_count| drops to zero.
+ size_t unsubmitted_buffer_count;
+ // The callback used to return the captured still capture JPEG buffer. Set
+ // if and only if the capture request was sent with a still capture buffer.
+ VideoCaptureDevice::TakePhotoCallback still_capture_callback;
+ };
+
+ struct StreamContext {
+ StreamContext();
+ ~StreamContext();
+ // The actual pixel format used in the capture request.
+ VideoCaptureFormat capture_format;
+ // The camera HAL stream.
+ cros::mojom::Camera3StreamPtr stream;
+ // The allocated buffers of this stream.
+ std::vector<std::unique_ptr<gfx::GpuMemoryBuffer>> buffers;
+ // The free buffers of this stream. The queue stores indices into the
+ // |buffers| vector.
+ std::queue<uint64_t> free_buffers;
+ // The buffers that are registered to the HAL, which can be used as the
+ // output buffers for capture requests.
+ std::queue<uint64_t> registered_buffers;
+ // The pointers to the pending capture results that have unsubmitted result
+ // buffers.
+ std::map<uint32_t, CaptureResult*> capture_results_with_buffer;
};
+ // The context for the set of active streams.
+ std::unordered_map<StreamType, std::unique_ptr<StreamContext>>
+ stream_context_;
+
+ // The repeating request settings. The settings come from the default preview
+ // request settings reported by the HAL. |repeating_request_settings_| is the
+ // default settings for each capture request.
+ cros::mojom::CameraMetadataPtr repeating_request_settings_;
+
+ // A queue of oneshot request settings. These are the request settings for
+ // each still capture requests. |oneshot_request_settings_| overrides
+ // |repeating_request_settings_| if present.
+ std::queue<cros::mojom::CameraMetadataPtr> oneshot_request_settings_;
+
+ // The pending callbacks for the TakePhoto requests.
+ std::queue<VideoCaptureDevice::TakePhotoCallback>
+ still_capture_callbacks_yet_to_be_processed_;
+ std::queue<VideoCaptureDevice::TakePhotoCallback>
+ still_capture_callbacks_currently_processing_;
+
// The number of partial stages. |partial_result_count_| is learned by
// querying |static_metadata_|. In case the result count is absent in
// |static_metadata_|, it defaults to one which means all the result
@@ -172,8 +276,15 @@ class CAPTURE_EXPORT StreamBufferManager final
// |first_frame_shutter_time_|.
base::TimeTicks first_frame_shutter_time_;
- // Stores the partial capture results of the current in-flight frames.
- std::map<uint32_t, CaptureResult> partial_results_;
+ // Stores the pending capture results of the current in-flight frames.
+ std::map<uint32_t, CaptureResult> pending_results_;
+
+ // StreamBufferManager does not own the ResultMetadataObservers. The
+ // observers are responsible for removing itself before self-destruction.
+ std::unordered_set<ResultMetadataObserver*> result_metadata_observers_;
+
+ // The list of settings to set/override in the capture request.
+ std::vector<cros::mojom::CameraMetadataEntryPtr> capture_settings_override_;
base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_;
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager_unittest.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager_unittest.cc
index 2d850a3300c..dafe3d3c7b7 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager_unittest.cc
@@ -16,11 +16,12 @@
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h"
#include "media/capture/video/chromeos/mock_video_capture_client.h"
+#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "media/capture/video/blob_utils.h"
using testing::_;
using testing::A;
using testing::AtLeast;
@@ -68,45 +69,31 @@ const VideoCaptureFormat kDefaultCaptureFormat(gfx::Size(1280, 720),
30.0,
PIXEL_FORMAT_NV12);
-class MockCameraBufferFactory : public CameraBufferFactory {
+class FakeCameraBufferFactory : public CameraBufferFactory {
public:
- MOCK_METHOD2(CreateGpuMemoryBuffer,
- std::unique_ptr<gfx::GpuMemoryBuffer>(const gfx::Size& size,
- gfx::BufferFormat format));
-
- MOCK_METHOD1(ResolveStreamBufferFormat,
- ChromiumPixelFormat(cros::mojom::HalPixelFormat hal_format));
-};
+ FakeCameraBufferFactory() {
+ gpu_memory_buffer_manager_ =
+ std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
+ }
+ std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
+ const gfx::Size& size,
+ gfx::BufferFormat format) override {
+ return unittest_internal::MockGpuMemoryBufferManager::
+ CreateFakeGpuMemoryBuffer(size, format,
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
+ gpu::kNullSurfaceHandle);
+ }
-std::unique_ptr<gfx::GpuMemoryBuffer> CreateMockGpuMemoryBuffer(
- const gfx::Size& size,
- gfx::BufferFormat format) {
- auto mock_buffer = std::make_unique<unittest_internal::MockGpuMemoryBuffer>();
- gfx::GpuMemoryBufferHandle fake_handle;
- fake_handle.native_pixmap_handle.fds.push_back(
- base::FileDescriptor(0, false));
- fake_handle.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(1280, 0, 1280 * 720));
- fake_handle.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(1280, 0, 1280 * 720 / 2));
- void* fake_mapped_address = reinterpret_cast<void*>(0xdeadbeef);
-
- EXPECT_CALL(*mock_buffer, Map()).WillRepeatedly(Return(true));
- EXPECT_CALL(*mock_buffer, memory(0))
- .WillRepeatedly(Return(fake_mapped_address));
- EXPECT_CALL(*mock_buffer, GetHandle()).WillRepeatedly(Return(fake_handle));
- return mock_buffer;
-}
+ ChromiumPixelFormat ResolveStreamBufferFormat(
+ cros::mojom::HalPixelFormat hal_format) override {
+ return ChromiumPixelFormat{PIXEL_FORMAT_NV12,
+ gfx::BufferFormat::YUV_420_BIPLANAR};
+ }
-std::unique_ptr<CameraBufferFactory> CreateMockCameraBufferFactory() {
- auto buffer_factory = std::make_unique<MockCameraBufferFactory>();
- EXPECT_CALL(*buffer_factory, CreateGpuMemoryBuffer(_, _))
- .WillRepeatedly(Invoke(CreateMockGpuMemoryBuffer));
- EXPECT_CALL(*buffer_factory, ResolveStreamBufferFormat(_))
- .WillRepeatedly(Return(ChromiumPixelFormat{
- PIXEL_FORMAT_NV12, gfx::BufferFormat::YUV_420_BIPLANAR}));
- return buffer_factory;
-}
+ private:
+ std::unique_ptr<unittest_internal::MockGpuMemoryBufferManager>
+ gpu_memory_buffer_manager_;
+};
} // namespace
@@ -122,7 +109,12 @@ class StreamBufferManagerTest : public ::testing::Test {
stream_buffer_manager_ = std::make_unique<StreamBufferManager>(
std::move(callback_ops_request),
std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
- CreateMockCameraBufferFactory(), base::ThreadTaskRunnerHandle::Get());
+ std::make_unique<FakeCameraBufferFactory>(),
+ base::BindRepeating([](const uint8_t* buffer, const uint32_t bytesused,
+ const VideoCaptureFormat& capture_format) {
+ return mojom::Blob::New();
+ }),
+ base::ThreadTaskRunnerHandle::Get());
}
void TearDown() override { stream_buffer_manager_.reset(); }
@@ -139,6 +131,39 @@ class StreamBufferManagerTest : public ::testing::Test {
}
}
+ cros::mojom::CameraMetadataPtr GetFakeStaticMetadata(
+ int32_t partial_result_count) {
+ cros::mojom::CameraMetadataPtr static_metadata =
+ cros::mojom::CameraMetadata::New();
+ static_metadata->entry_count = 2;
+ static_metadata->entry_capacity = 2;
+ static_metadata->entries =
+ std::vector<cros::mojom::CameraMetadataEntryPtr>();
+
+ cros::mojom::CameraMetadataEntryPtr entry =
+ cros::mojom::CameraMetadataEntry::New();
+ entry->index = 0;
+ entry->tag =
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PARTIAL_RESULT_COUNT;
+ entry->type = cros::mojom::EntryType::TYPE_INT32;
+ entry->count = 1;
+ uint8_t* as_int8 = reinterpret_cast<uint8_t*>(&partial_result_count);
+ entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
+ static_metadata->entries->push_back(std::move(entry));
+
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 1;
+ entry->tag = cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE;
+ entry->type = cros::mojom::EntryType::TYPE_INT32;
+ entry->count = 1;
+ int32_t jpeg_max_size = 65535;
+ as_int8 = reinterpret_cast<uint8_t*>(&jpeg_max_size);
+ entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
+ static_metadata->entries->push_back(std::move(entry));
+
+ return static_metadata;
+ }
+
void RegisterBuffer(uint64_t buffer_id,
cros::mojom::Camera3DeviceOps::BufferType type,
uint32_t drm_format,
@@ -178,25 +203,46 @@ class StreamBufferManagerTest : public ::testing::Test {
device_context_->client_.get());
}
- std::map<uint32_t, StreamBufferManager::CaptureResult>& GetPartialResults() {
+ std::map<uint32_t, StreamBufferManager::CaptureResult>& GetPendingResults() {
EXPECT_NE(nullptr, stream_buffer_manager_.get());
- return stream_buffer_manager_->partial_results_;
+ return stream_buffer_manager_->pending_results_;
}
- cros::mojom::Camera3StreamPtr PrepareCaptureStream(uint32_t max_buffers) {
- auto stream = cros::mojom::Camera3Stream::New();
- stream->id = 0;
- stream->stream_type = cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
- stream->width = kDefaultCaptureFormat.frame_size.width();
- stream->height = kDefaultCaptureFormat.frame_size.height();
- stream->format =
+ std::vector<cros::mojom::Camera3StreamPtr> PrepareCaptureStream(
+ uint32_t max_buffers) {
+ std::vector<cros::mojom::Camera3StreamPtr> streams;
+
+ auto preview_stream = cros::mojom::Camera3Stream::New();
+ preview_stream->id = static_cast<uint64_t>(StreamType::kPreview);
+ preview_stream->stream_type =
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ preview_stream->width = kDefaultCaptureFormat.frame_size.width();
+ preview_stream->height = kDefaultCaptureFormat.frame_size.height();
+ preview_stream->format =
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
- stream->usage = 0;
- stream->max_buffers = max_buffers;
- stream->data_space = 0;
- stream->rotation =
+ preview_stream->usage = 0;
+ preview_stream->max_buffers = max_buffers;
+ preview_stream->data_space = 0;
+ preview_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
- return stream;
+ streams.push_back(std::move(preview_stream));
+
+ auto still_capture_stream = cros::mojom::Camera3Stream::New();
+ still_capture_stream->id = static_cast<uint64_t>(StreamType::kStillCapture);
+ still_capture_stream->stream_type =
+ cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ still_capture_stream->width = kDefaultCaptureFormat.frame_size.width();
+ still_capture_stream->height = kDefaultCaptureFormat.frame_size.height();
+ still_capture_stream->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB;
+ still_capture_stream->usage = 0;
+ still_capture_stream->max_buffers = max_buffers;
+ still_capture_stream->data_space = 0;
+ still_capture_stream->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+ streams.push_back(std::move(still_capture_stream));
+
+ return streams;
}
cros::mojom::Camera3NotifyMsgPtr PrepareErrorNotifyMessage(
@@ -205,7 +251,7 @@ class StreamBufferManagerTest : public ::testing::Test {
auto error_msg = cros::mojom::Camera3ErrorMsg::New();
error_msg->frame_number = frame_number;
// There is only the preview stream.
- error_msg->error_stream_id = 1;
+ error_msg->error_stream_id = static_cast<uint64_t>(StreamType::kPreview);
error_msg->error_code = error_code;
auto notify_msg = cros::mojom::Camera3NotifyMsg::New();
notify_msg->message = cros::mojom::Camera3NotifyMsgMessage::New();
@@ -260,18 +306,20 @@ TEST_F(StreamBufferManagerTest, SimpleCaptureTest) {
&StreamBufferManagerTest::QuitCaptureLoop, base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
.Times(1)
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 1,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until a captured frame is received by MockVideoCaptureClient.
DoLoop();
@@ -282,18 +330,19 @@ TEST_F(StreamBufferManagerTest, SimpleCaptureTest) {
TEST_F(StreamBufferManagerTest, PartialResultTest) {
GetMockVideoCaptureClient()->SetFrameCb(base::BindOnce(
[](StreamBufferManagerTest* test) {
- EXPECT_EQ(1u, test->GetPartialResults().size());
+ EXPECT_EQ(1u, test->GetPendingResults().size());
// Make sure all the three partial metadata are received before the
// captured result is submitted.
EXPECT_EQ(
- 3u, test->GetPartialResults()[0].partial_metadata_received.size());
+ 3u, test->GetPendingResults()[0].partial_metadata_received.size());
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
@@ -306,20 +355,19 @@ TEST_F(StreamBufferManagerTest, PartialResultTest) {
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 1,
std::move(request->output_buffers)));
-
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 2,
std::vector<cros::mojom::Camera3StreamBufferPtr>()));
-
mock_callback_ops_->ProcessCaptureResult(PrepareCapturedResult(
request->frame_number, cros::mojom::CameraMetadata::New(), 3,
std::vector<cros::mojom::Camera3StreamBufferPtr>()));
}));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 3,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 3),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until a captured frame is received by MockVideoCaptureClient.
DoLoop();
@@ -340,8 +388,9 @@ TEST_F(StreamBufferManagerTest, DeviceErrorTest) {
InvokeWithoutArgs(this, &StreamBufferManagerTest::QuitCaptureLoop));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(1)
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
@@ -354,10 +403,11 @@ TEST_F(StreamBufferManagerTest, DeviceErrorTest) {
cros::mojom::Camera3ErrorMsgCode::CAMERA3_MSG_ERROR_DEVICE));
}));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 1,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
@@ -370,17 +420,18 @@ TEST_F(StreamBufferManagerTest, RequestErrorTest) {
[](StreamBufferManagerTest* test) {
// Frame 0 should be dropped, and the frame callback should be called
// with frame 1.
- EXPECT_EQ(test->GetPartialResults().end(),
- test->GetPartialResults().find(0));
- EXPECT_NE(test->GetPartialResults().end(),
- test->GetPartialResults().find(1));
+ EXPECT_EQ(test->GetPendingResults().end(),
+ test->GetPendingResults().find(0));
+ EXPECT_NE(test->GetPendingResults().end(),
+ test->GetPendingResults().find(1));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(2))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
@@ -400,10 +451,11 @@ TEST_F(StreamBufferManagerTest, RequestErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 1,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
@@ -415,19 +467,20 @@ TEST_F(StreamBufferManagerTest, ResultErrorTest) {
GetMockVideoCaptureClient()->SetFrameCb(base::BindOnce(
[](StreamBufferManagerTest* test) {
// Frame 0 should be submitted.
- EXPECT_NE(test->GetPartialResults().end(),
- test->GetPartialResults().find(0));
+ EXPECT_NE(test->GetPendingResults().end(),
+ test->GetPendingResults().find(0));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(1))
- .WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
+ .WillRepeatedly(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
- .Times(1)
+ .Times(AtLeast(1))
.WillOnce(Invoke([this](cros::mojom::Camera3CaptureRequestPtr& request,
base::OnceCallback<void(int32_t)>& callback) {
std::move(callback).Run(0);
@@ -445,10 +498,11 @@ TEST_F(StreamBufferManagerTest, ResultErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 2,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 2),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
@@ -461,17 +515,18 @@ TEST_F(StreamBufferManagerTest, BufferErrorTest) {
[](StreamBufferManagerTest* test) {
// Frame 0 should be dropped, and the frame callback should be called
// with frame 1.
- EXPECT_EQ(test->GetPartialResults().end(),
- test->GetPartialResults().find(0));
- EXPECT_NE(test->GetPartialResults().end(),
- test->GetPartialResults().find(1));
+ EXPECT_EQ(test->GetPendingResults().end(),
+ test->GetPendingResults().find(0));
+ EXPECT_NE(test->GetPendingResults().end(),
+ test->GetPendingResults().find(1));
test->QuitCaptureLoop();
},
base::Unretained(this)));
EXPECT_CALL(
*GetMockCaptureInterface(),
- DoRegisterBuffer(0, cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _,
- _, _, _, _, _))
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
.Times(AtLeast(2))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer))
.WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
@@ -493,13 +548,50 @@ TEST_F(StreamBufferManagerTest, BufferErrorTest) {
}))
.WillOnce(Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
- stream_buffer_manager_->SetUpStreamAndBuffers(
- kDefaultCaptureFormat, /* partial_result_count */ 1,
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
- stream_buffer_manager_->StartCapture(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
// Wait until the MockVideoCaptureClient is deleted.
DoLoop();
}
+// Test that preview and still capture buffers can be correctly submitted.
+TEST_F(StreamBufferManagerTest, TakePhotoTest) {
+ EXPECT_CALL(
+ *GetMockCaptureInterface(),
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kPreview, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
+ EXPECT_CALL(
+ *GetMockCaptureInterface(),
+ DoRegisterBuffer(
+ StreamBufferManager::GetBufferIpcId(StreamType::kStillCapture, 0),
+ cros::mojom::Camera3DeviceOps::BufferType::GRALLOC, _, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(Invoke(this, &StreamBufferManagerTest::RegisterBuffer));
+ EXPECT_CALL(*GetMockCaptureInterface(), DoProcessCaptureRequest(_, _))
+ .Times(AtLeast(1))
+ .WillRepeatedly(
+ Invoke(this, &StreamBufferManagerTest::ProcessCaptureRequest));
+
+ stream_buffer_manager_->SetUpStreamsAndBuffers(
+ kDefaultCaptureFormat,
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
+ PrepareCaptureStream(/* max_buffers */ 1));
+ stream_buffer_manager_->StartPreview(cros::mojom::CameraMetadata::New());
+ stream_buffer_manager_->TakePhoto(
+ GetFakeStaticMetadata(/* partial_result_count */ 1),
+ base::BindOnce([](StreamBufferManagerTest* test,
+ mojom::BlobPtr blob) { test->QuitCaptureLoop(); },
+ base::Unretained(this)));
+
+ // Wait until a captured frame is received by MockVideoCaptureClient.
+ DoLoop();
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index b3e410a6d74..79e07f55201 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -82,7 +82,7 @@ void VideoCaptureDeviceChromeOSHalv3::StopAndDeAllocate() {
if (!camera_device_delegate_) {
return;
}
- CloseDevice(base::Closure());
+ CloseDevice(base::OnceClosure());
camera_device_ipc_thread_.Stop();
camera_device_delegate_.reset();
device_context_.reset();
@@ -154,7 +154,7 @@ void VideoCaptureDeviceChromeOSHalv3::OpenDevice() {
camera_device_delegate_->GetWeakPtr(), rotation_));
}
-void VideoCaptureDeviceChromeOSHalv3::CloseDevice(base::Closure callback) {
+void VideoCaptureDeviceChromeOSHalv3::CloseDevice(base::OnceClosure callback) {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
if (!camera_device_delegate_) {
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index f122e3846d3..b08e0930775 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
#include "chromeos/dbus/power_manager_client.h"
#include "media/capture/video/chromeos/display_rotation_observer.h"
@@ -58,7 +59,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
private:
void OpenDevice();
- void CloseDevice(base::Closure callback);
+ void CloseDevice(base::OnceClosure callback);
// DisplayRotationDelegate implementation.
void SetDisplayRotation(const display::Display& display) final;
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index e1cdbbde372..4507469bb03 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -8,6 +8,7 @@
#include <memory>
#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/video_capture_device_factory.h"
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index ff7f453a3e6..2356c8983a0 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -568,15 +568,13 @@ void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame(
const int arbitrary_frame_feedback_id = 0;
auto capture_buffer = client()->ReserveOutputBuffer(
device_state()->format.frame_size, device_state()->format.pixel_format,
- device_state()->format.pixel_storage, arbitrary_frame_feedback_id);
+ arbitrary_frame_feedback_id);
DLOG_IF(ERROR, !capture_buffer.is_valid())
<< "Couldn't allocate Capture Buffer";
auto buffer_access =
capture_buffer.handle_provider->GetHandleForInProcessAccess();
DCHECK(buffer_access->data()) << "Buffer has NO backing memory";
- DCHECK_EQ(VideoPixelStorage::CPU, device_state()->format.pixel_storage);
-
uint8_t* data_ptr = buffer_access->data();
memset(data_ptr, 0, buffer_access->mapped_size());
frame_painter()->PaintFrame(timestamp_to_paint, data_ptr);
diff --git a/chromium/media/capture/video/fake_video_capture_device.h b/chromium/media/capture/video/fake_video_capture_device.h
index 8db55ca4dad..96857c33328 100644
--- a/chromium/media/capture/video/fake_video_capture_device.h
+++ b/chromium/media/capture/video/fake_video_capture_device.h
@@ -100,8 +100,7 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
// collaborating classes.
struct FakeDeviceState {
FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format)
- : zoom(zoom),
- format(gfx::Size(), frame_rate, pixel_format, VideoPixelStorage::CPU) {}
+ : zoom(zoom), format(gfx::Size(), frame_rate, pixel_format) {}
uint32_t zoom;
VideoCaptureFormat format;
diff --git a/chromium/media/capture/video/fake_video_capture_device_unittest.cc b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
index b606ca0019b..df3d60628ad 100644
--- a/chromium/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
@@ -118,12 +118,18 @@ class MockClient : public VideoCaptureDevice::Client {
int frame_feedback_id) override {
frame_cb_.Run(format);
}
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override {
+ frame_cb_.Run(frame_format);
+ }
// Virtual methods for capturing using Client's Buffers.
Buffer ReserveOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override {
- EXPECT_EQ(VideoPixelStorage::CPU, storage);
EXPECT_GT(dimensions.GetArea(), 0);
const VideoCaptureFormat frame_format(dimensions, 0.0, format);
return CreateStubBuffer(0, frame_format.ImageAllocationSize());
@@ -145,7 +151,6 @@ class MockClient : public VideoCaptureDevice::Client {
}
Buffer ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override {
return Buffer();
}
diff --git a/chromium/media/capture/video/file_video_capture_device_unittest.cc b/chromium/media/capture/video/file_video_capture_device_unittest.cc
index 17df56f94ff..61a32d5efa3 100644
--- a/chromium/media/capture/video/file_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/file_video_capture_device_unittest.cc
@@ -30,11 +30,17 @@ class MockClient : public VideoCaptureDevice::Client {
int clockwise_rotation,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
- int frame_feedback_id = 0) {}
+ int frame_feedback_id = 0) override {}
- MOCK_METHOD4(
- ReserveOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override {}
+
+ MOCK_METHOD3(ReserveOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
void OnIncomingCapturedBuffer(Buffer buffer,
const VideoCaptureFormat& format,
@@ -49,9 +55,8 @@ class MockClient : public VideoCaptureDevice::Client {
gfx::Rect visible_rect,
const VideoFrameMetadata& additional_metadata) override {}
- MOCK_METHOD4(
- ResurrectLastOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ MOCK_METHOD3(ResurrectLastOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
MOCK_METHOD2(OnError, void(const base::Location&, const std::string&));
@@ -140,4 +145,4 @@ TEST_F(FileVideoCaptureDeviceTest, TakePhoto) {
run_loop.Run();
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
index 8ba6719c2b0..bbbd9b72b03 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
@@ -182,9 +182,15 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
base::TimeTicks,
base::TimeDelta,
int));
- MOCK_METHOD4(
- ReserveOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ MOCK_METHOD6(OnIncomingCapturedGfxBuffer,
+ void(gfx::GpuMemoryBuffer* buffer,
+ const media::VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id));
+ MOCK_METHOD3(ReserveOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
void OnIncomingCapturedBuffer(Buffer buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
@@ -202,9 +208,8 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
DoOnIncomingCapturedVideoFrame();
}
MOCK_METHOD0(DoOnIncomingCapturedVideoFrame, void(void));
- MOCK_METHOD4(
- ResurrectLastOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ MOCK_METHOD3(ResurrectLastOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
MOCK_METHOD2(OnError,
void(const base::Location& from_here,
const std::string& reason));
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
new file mode 100644
index 00000000000..69629786d89
--- /dev/null
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -0,0 +1,128 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
+
+#include <memory>
+
+#if defined(OS_CHROMEOS)
+#include "media/capture/video/chromeos/stream_buffer_manager.h"
+#endif
+
+using ::testing::Return;
+
+namespace media {
+namespace unittest_internal {
+
+namespace {
+
+class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
+ public:
+ FakeGpuMemoryBuffer(const gfx::Size& size, gfx::BufferFormat format)
+ : size_(size), format_(format) {
+ // We use only NV12 or R8 in unit tests.
+ EXPECT_TRUE(format == gfx::BufferFormat::YUV_420_BIPLANAR ||
+ format == gfx::BufferFormat::R_8);
+
+ size_t y_plane_size = size_.width() * size_.height();
+ size_t uv_plane_size = size_.width() * size_.height() / 2;
+ data_ = std::vector<uint8_t>(y_plane_size + uv_plane_size);
+
+ handle_.type = gfx::NATIVE_PIXMAP;
+ // Set a dummy id since this is for testing only.
+ handle_.id = gfx::GpuMemoryBufferId(0);
+
+#if defined(OS_CHROMEOS)
+ // Set a dummy fd since this is for testing only.
+ handle_.native_pixmap_handle.fds.push_back(base::FileDescriptor(0, false));
+ handle_.native_pixmap_handle.planes.push_back(
+ gfx::NativePixmapPlane(size_.width(), 0, y_plane_size));
+ handle_.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
+ size_.width(), handle_.native_pixmap_handle.planes[0].size,
+ uv_plane_size));
+
+ // For faking a valid JPEG blob buffer.
+ if (base::checked_cast<size_t>(size_.width()) >= sizeof(Camera3JpegBlob)) {
+ Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
+ reinterpret_cast<uintptr_t>(data_.data()) + size_.width() -
+ sizeof(Camera3JpegBlob));
+ header->jpeg_blob_id = kCamera3JpegBlobId;
+ header->jpeg_size = size_.width();
+ }
+#endif
+ }
+
+ ~FakeGpuMemoryBuffer() override = default;
+
+ bool Map() override { return true; }
+
+ void* memory(size_t plane) override {
+ auto* data_ptr = data_.data();
+ size_t y_plane_size = size_.width() * size_.height();
+ switch (plane) {
+ case 0:
+ return reinterpret_cast<void*>(data_ptr);
+ case 1:
+ return reinterpret_cast<void*>(data_ptr + y_plane_size);
+ default:
+ NOTREACHED() << "Unsupported plane: " << plane;
+ return nullptr;
+ }
+ }
+
+ void Unmap() override {}
+
+ gfx::Size GetSize() const override { return size_; }
+
+ gfx::BufferFormat GetFormat() const override { return format_; }
+
+ int stride(size_t plane) const override {
+ switch (plane) {
+ case 0:
+ return size_.width();
+ case 1:
+ return size_.width();
+ default:
+ NOTREACHED() << "Unsupported plane: " << plane;
+ return 0;
+ }
+ }
+
+ void SetColorSpace(const gfx::ColorSpace& color_space) override {}
+
+ gfx::GpuMemoryBufferId GetId() const override { return handle_.id; }
+
+ gfx::GpuMemoryBufferHandle GetHandle() const override { return handle_; }
+
+ ClientBuffer AsClientBuffer() override {
+ NOTREACHED();
+ return ClientBuffer();
+ }
+
+ private:
+ gfx::Size size_;
+ gfx::BufferFormat format_;
+ std::vector<uint8_t> data_;
+ gfx::GpuMemoryBufferHandle handle_;
+ DISALLOW_IMPLICIT_CONSTRUCTORS(FakeGpuMemoryBuffer);
+};
+
+} // namespace
+
+MockGpuMemoryBufferManager::MockGpuMemoryBufferManager() = default;
+
+MockGpuMemoryBufferManager::~MockGpuMemoryBufferManager() = default;
+
+// static
+std::unique_ptr<gfx::GpuMemoryBuffer>
+MockGpuMemoryBufferManager::CreateFakeGpuMemoryBuffer(
+ const gfx::Size& size,
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage,
+ gpu::SurfaceHandle surface_handle) {
+ return std::make_unique<FakeGpuMemoryBuffer>(size, format);
+}
+
+} // namespace unittest_internal
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
index c03671fe777..20bbf07e4f3 100644
--- a/chromium/media/capture/video/chromeos/mock_gpu_memory_buffer_manager.h
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
+#ifndef MEDIA_CAPTURE_VIDEO_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
+#define MEDIA_CAPTURE_VIDEO_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
#include "gpu/command_buffer/client/gpu_memory_buffer_manager.h"
#include "gpu/command_buffer/common/sync_token.h"
@@ -13,36 +13,6 @@
namespace media {
namespace unittest_internal {
-class MockGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
- public:
- MockGpuMemoryBuffer();
-
- ~MockGpuMemoryBuffer() override;
-
- MOCK_METHOD0(Map, bool());
-
- MOCK_METHOD1(memory, void*(size_t plane));
-
- MOCK_METHOD0(Unmap, void());
-
- MOCK_CONST_METHOD0(GetSize, gfx::Size());
-
- MOCK_CONST_METHOD0(GetFormat, gfx::BufferFormat());
-
- MOCK_CONST_METHOD1(stride, int(size_t plane));
-
- MOCK_METHOD1(SetColorSpace, void(const gfx::ColorSpace& color_space));
-
- MOCK_CONST_METHOD0(GetId, gfx::GpuMemoryBufferId());
-
- MOCK_CONST_METHOD0(GetHandle, gfx::GpuMemoryBufferHandle());
-
- MOCK_METHOD0(AsClientBuffer, ClientBuffer());
-
- private:
- DISALLOW_COPY_AND_ASSIGN(MockGpuMemoryBuffer);
-};
-
class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
public:
MockGpuMemoryBufferManager();
@@ -60,7 +30,7 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
void(gfx::GpuMemoryBuffer* buffer,
const gpu::SyncToken& sync_token));
- std::unique_ptr<gfx::GpuMemoryBuffer> ReturnValidBuffer(
+ static std::unique_ptr<gfx::GpuMemoryBuffer> CreateFakeGpuMemoryBuffer(
const gfx::Size& size,
gfx::BufferFormat format,
gfx::BufferUsage usage,
@@ -73,4 +43,4 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
} // namespace unittest_internal
} // namespace media
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
+#endif // MEDIA_CAPTURE_VIDEO_MOCK_GPU_MEMORY_BUFFER_MANAGER_H_
diff --git a/chromium/media/capture/video/mock_video_frame_receiver.h b/chromium/media/capture/video/mock_video_frame_receiver.h
index 241566cf0e3..71abcf7680d 100644
--- a/chromium/media/capture/video/mock_video_frame_receiver.h
+++ b/chromium/media/capture/video/mock_video_frame_receiver.h
@@ -29,10 +29,8 @@ class MockVideoFrameReceiver : public VideoFrameReceiver {
MOCK_METHOD0(OnStarted, void());
MOCK_METHOD0(OnStartedUsingGpuDecode, void());
- void OnNewBufferHandle(
- int buffer_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::HandleProvider>
- handle_provider) override {
+ void OnNewBuffer(int buffer_id,
+ media::mojom::VideoBufferHandlePtr buffer_handle) override {
MockOnNewBufferHandle(buffer_id);
}
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.cc b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
index 6f8f3b62a51..7cd4a05e580 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
@@ -14,17 +14,14 @@ SharedMemoryBufferTracker::SharedMemoryBufferTracker() = default;
SharedMemoryBufferTracker::~SharedMemoryBufferTracker() = default;
bool SharedMemoryBufferTracker::Init(const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage_type) {
+ VideoPixelFormat format) {
DVLOG(2) << __func__ << "allocating ShMem of " << dimensions.ToString();
set_dimensions(dimensions);
// |dimensions| can be 0x0 for trackers that do not require memory backing.
set_max_pixel_count(dimensions.GetArea());
set_pixel_format(format);
- set_storage_type(storage_type);
return provider_.InitForSize(
- VideoCaptureFormat(dimensions, 0.0f, format, storage_type)
- .ImageAllocationSize());
+ VideoCaptureFormat(dimensions, 0.0f, format).ImageAllocationSize());
}
std::unique_ptr<VideoCaptureBufferHandle>
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.h b/chromium/media/capture/video/shared_memory_buffer_tracker.h
index de3cdcc83d1..2c92ca017e2 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.h
@@ -21,9 +21,7 @@ class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
SharedMemoryBufferTracker();
~SharedMemoryBufferTracker() override;
- bool Init(const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage_type) override;
+ bool Init(const gfx::Size& dimensions, VideoPixelFormat format) override;
// Implementation of VideoCaptureBufferTracker:
std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
diff --git a/chromium/media/capture/video/video_capture_buffer_pool.h b/chromium/media/capture/video/video_capture_buffer_pool.h
index 5fb47c52bb6..1760aea78a4 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool.h
@@ -69,7 +69,6 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
// returned via |buffer_id_to_drop|.
virtual int ReserveForProducer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id,
int* buffer_id_to_drop) = 0;
@@ -87,8 +86,7 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
// A producer may assume the content of the buffer has been preserved and may
// also make modifications.
virtual int ResurrectLastForProducer(const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage) = 0;
+ VideoPixelFormat format) = 0;
// Returns a snapshot of the current number of buffers in-use divided by the
// maximum |count_|.
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index 4c6d696187e..477f5697029 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -68,12 +68,11 @@ VideoCaptureBufferPoolImpl::GetHandleForInProcessAccess(int buffer_id) {
int VideoCaptureBufferPoolImpl::ReserveForProducer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id,
int* buffer_id_to_drop) {
base::AutoLock lock(lock_);
- return ReserveForProducerInternal(dimensions, format, storage,
- frame_feedback_id, buffer_id_to_drop);
+ return ReserveForProducerInternal(dimensions, format, frame_feedback_id,
+ buffer_id_to_drop);
}
void VideoCaptureBufferPoolImpl::RelinquishProducerReservation(int buffer_id) {
@@ -121,8 +120,7 @@ void VideoCaptureBufferPoolImpl::RelinquishConsumerHold(int buffer_id,
int VideoCaptureBufferPoolImpl::ResurrectLastForProducer(
const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage) {
+ VideoPixelFormat format) {
base::AutoLock lock(lock_);
// Return early if the last relinquished buffer has been re-used already.
@@ -132,14 +130,13 @@ int VideoCaptureBufferPoolImpl::ResurrectLastForProducer(
// If there are no consumers reading from this buffer, then it's safe to
// provide this buffer back to the producer (because the producer may
// potentially modify the content). Check that the expected dimensions,
- // format, and storage match.
+ // and format match.
auto it = trackers_.find(last_relinquished_buffer_id_);
DCHECK(it != trackers_.end());
DCHECK(!it->second->held_by_producer());
if (it->second->consumer_hold_count() == 0 &&
it->second->dimensions() == dimensions &&
- it->second->pixel_format() == format &&
- it->second->storage_type() == storage) {
+ it->second->pixel_format() == format) {
it->second->set_held_by_producer(true);
const int resurrected_buffer_id = last_relinquished_buffer_id_;
last_relinquished_buffer_id_ = kInvalidId;
@@ -163,7 +160,6 @@ double VideoCaptureBufferPoolImpl::GetBufferPoolUtilization() const {
int VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
const gfx::Size& dimensions,
VideoPixelFormat pixel_format,
- VideoPixelStorage storage_type,
int frame_feedback_id,
int* buffer_id_to_drop) {
lock_.AssertAcquired();
@@ -179,8 +175,7 @@ int VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
VideoCaptureBufferTracker* const tracker = it->second.get();
if (!tracker->consumer_hold_count() && !tracker->held_by_producer()) {
if (tracker->max_pixel_count() >= size_in_pixels &&
- (tracker->pixel_format() == pixel_format) &&
- (tracker->storage_type() == storage_type)) {
+ (tracker->pixel_format() == pixel_format)) {
if (it->first == last_relinquished_buffer_id_) {
// This buffer would do just fine, but avoid returning it because the
// client may want to resurrect it. It will be returned perforce if
@@ -226,8 +221,8 @@ int VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
const int buffer_id = next_buffer_id_++;
std::unique_ptr<VideoCaptureBufferTracker> tracker =
- buffer_tracker_factory_->CreateTracker(storage_type);
- if (!tracker->Init(dimensions, pixel_format, storage_type)) {
+ buffer_tracker_factory_->CreateTracker();
+ if (!tracker->Init(dimensions, pixel_format)) {
DLOG(ERROR) << "Error initializing VideoCaptureBufferTracker";
return kInvalidId;
}
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.h b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
index 5a0fafddb47..34b585f0885 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
@@ -44,13 +44,11 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
int buffer_id) override;
int ReserveForProducer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id,
int* buffer_id_to_drop) override;
void RelinquishProducerReservation(int buffer_id) override;
int ResurrectLastForProducer(const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage) override;
+ VideoPixelFormat format) override;
double GetBufferPoolUtilization() const override;
void HoldForConsumers(int buffer_id, int num_clients) override;
void RelinquishConsumerHold(int buffer_id, int num_clients) override;
@@ -61,7 +59,6 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
int ReserveForProducerInternal(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id,
int* tracker_id_to_drop);
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker.h b/chromium/media/capture/video/video_capture_buffer_tracker.h
index e7f03da122a..040ff5368bb 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker.h
@@ -14,10 +14,8 @@
namespace media {
-// Keeps track of the state of a given mappable resource. Each
-// VideoCaptureBufferTracker carries indication of pixel format and storage
-// type. This is a base class for implementations using different kinds of
-// storage.
+// Keeps track of the state of a given mappable resource. This is a base class
+// for implementations using different kinds of storage.
class CAPTURE_EXPORT VideoCaptureBufferTracker {
public:
VideoCaptureBufferTracker()
@@ -25,9 +23,7 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
held_by_producer_(false),
consumer_hold_count_(0),
frame_feedback_id_(0) {}
- virtual bool Init(const gfx::Size& dimensions,
- VideoPixelFormat format,
- VideoPixelStorage storage_type) = 0;
+ virtual bool Init(const gfx::Size& dimensions, VideoPixelFormat format) = 0;
virtual ~VideoCaptureBufferTracker(){};
const gfx::Size& dimensions() const { return dimensions_; }
@@ -36,10 +32,6 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
void set_max_pixel_count(size_t count) { max_pixel_count_ = count; }
VideoPixelFormat pixel_format() const { return pixel_format_; }
void set_pixel_format(VideoPixelFormat format) { pixel_format_ = format; }
- VideoPixelStorage storage_type() const { return storage_type_; }
- void set_storage_type(VideoPixelStorage storage_type) {
- storage_type_ = storage_type;
- }
bool held_by_producer() const { return held_by_producer_; }
void set_held_by_producer(bool value) { held_by_producer_ = value; }
int consumer_hold_count() const { return consumer_hold_count_; }
@@ -60,7 +52,6 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
gfx::Size dimensions_;
size_t max_pixel_count_;
VideoPixelFormat pixel_format_;
- VideoPixelStorage storage_type_;
// Indicates whether this VideoCaptureBufferTracker is currently referenced by
// the producer.
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory.h b/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
index 84196e9a131..542252d981b 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
@@ -17,8 +17,7 @@ class VideoCaptureBufferTracker;
class CAPTURE_EXPORT VideoCaptureBufferTrackerFactory {
public:
virtual ~VideoCaptureBufferTrackerFactory() {}
- virtual std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
- VideoPixelStorage storage_type) = 0;
+ virtual std::unique_ptr<VideoCaptureBufferTracker> CreateTracker() = 0;
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
index fe1df4fb9d8..4da6e67567a 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
@@ -11,8 +11,7 @@
namespace media {
std::unique_ptr<VideoCaptureBufferTracker>
-VideoCaptureBufferTrackerFactoryImpl::CreateTracker(VideoPixelStorage storage) {
- DCHECK_EQ(VideoPixelStorage::CPU, storage);
+VideoCaptureBufferTrackerFactoryImpl::CreateTracker() {
return std::make_unique<SharedMemoryBufferTracker>();
}
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
index db6060abc3d..941cbba83b4 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
@@ -15,8 +15,7 @@ namespace media {
class CAPTURE_EXPORT VideoCaptureBufferTrackerFactoryImpl
: public VideoCaptureBufferTrackerFactory {
public:
- std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
- VideoPixelStorage storage) override;
+ std::unique_ptr<VideoCaptureBufferTracker> CreateTracker() override;
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index fda97683df8..a2642bae0c0 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -147,6 +147,22 @@ class CAPTURE_EXPORT VideoCaptureDevice
base::TimeDelta timestamp,
int frame_feedback_id = 0) = 0;
+ // Captured a new video frame, data for which is stored in the
+ // GpuMemoryBuffer pointed to by |buffer|. The format of the frame is
+ // described by |frame_format|. Since the memory buffer pointed to by
+ // |buffer| may be allocated with some size/address alignment requirement,
+ // this method takes into consideration the size and offset of each plane in
+ // |buffer| when creating the content of the output buffer.
+ // |clockwise_rotation|, |reference_time|, |timestamp|, and
+ // |frame_feedback_id| serve the same purposes as in OnIncomingCapturedData.
+ virtual void OnIncomingCapturedGfxBuffer(
+ gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) = 0;
+
// Reserve an output buffer into which contents can be captured directly.
// The returned Buffer will always be allocated with a memory size suitable
// for holding a packed video frame with pixels of |format| format, of
@@ -159,7 +175,6 @@ class CAPTURE_EXPORT VideoCaptureDevice
// holds on to the contained |buffer_read_write_permission|.
virtual Buffer ReserveOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) = 0;
// Provides VCD::Client with a populated Buffer containing the content of
@@ -189,7 +204,6 @@ class CAPTURE_EXPORT VideoCaptureDevice
// When this operation fails, nullptr will be returned.
virtual Buffer ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int new_frame_feedback_id) = 0;
// An error has occurred that cannot be handled and VideoCaptureDevice must
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index d023d853042..1a1bc937b31 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -30,6 +30,40 @@ bool IsFormatSupported(media::VideoPixelFormat pixel_format) {
return (pixel_format == media::PIXEL_FORMAT_I420 ||
pixel_format == media::PIXEL_FORMAT_Y16);
}
+
+libyuv::RotationMode TranslateRotation(int rotation_degrees) {
+ DCHECK_EQ(0, rotation_degrees % 90)
+ << " Rotation must be a multiple of 90, now: " << rotation_degrees;
+ libyuv::RotationMode rotation_mode = libyuv::kRotate0;
+ if (rotation_degrees == 90)
+ rotation_mode = libyuv::kRotate90;
+ else if (rotation_degrees == 180)
+ rotation_mode = libyuv::kRotate180;
+ else if (rotation_degrees == 270)
+ rotation_mode = libyuv::kRotate270;
+ return rotation_mode;
+}
+
+void GetI420BufferAccess(
+ const media::VideoCaptureDevice::Client::Buffer& buffer,
+ const gfx::Size& dimensions,
+ uint8_t** y_plane_data,
+ uint8_t** u_plane_data,
+ uint8_t** v_plane_data,
+ int* y_plane_stride,
+ int* uv_plane_stride) {
+ *y_plane_data = buffer.handle_provider->GetHandleForInProcessAccess()->data();
+ *u_plane_data = *y_plane_data + media::VideoFrame::PlaneSize(
+ media::PIXEL_FORMAT_I420,
+ media::VideoFrame::kYPlane, dimensions)
+ .GetArea();
+ *v_plane_data = *u_plane_data + media::VideoFrame::PlaneSize(
+ media::PIXEL_FORMAT_I420,
+ media::VideoFrame::kUPlane, dimensions)
+ .GetArea();
+ *y_plane_stride = dimensions.width();
+ *uv_plane_stride = *y_plane_stride / 2;
+}
}
namespace media {
@@ -105,7 +139,6 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
base::TimeDelta timestamp,
int frame_feedback_id) {
TRACE_EVENT0("media", "VideoCaptureDeviceClient::OnIncomingCapturedData");
- DCHECK_EQ(VideoPixelStorage::CPU, format.pixel_storage);
if (last_captured_pixel_format_ != format.pixel_format) {
OnLog("Pixel format: " + VideoPixelFormatToString(format.pixel_format));
@@ -140,19 +173,11 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
if (rotation == 90 || rotation == 270)
std::swap(destination_width, destination_height);
- DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: "
- << rotation;
- libyuv::RotationMode rotation_mode = libyuv::kRotate0;
- if (rotation == 90)
- rotation_mode = libyuv::kRotate90;
- else if (rotation == 180)
- rotation_mode = libyuv::kRotate180;
- else if (rotation == 270)
- rotation_mode = libyuv::kRotate270;
+ libyuv::RotationMode rotation_mode = TranslateRotation(rotation);
const gfx::Size dimensions(destination_width, destination_height);
- Buffer buffer = ReserveOutputBuffer(
- dimensions, PIXEL_FORMAT_I420, VideoPixelStorage::CPU, frame_feedback_id);
+ Buffer buffer =
+ ReserveOutputBuffer(dimensions, PIXEL_FORMAT_I420, frame_feedback_id);
#if DCHECK_IS_ON()
dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1;
if (dropped_frame_counter_ >= kMaxDroppedFrames)
@@ -165,19 +190,13 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
DCHECK(dimensions.height());
DCHECK(dimensions.width());
- auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess();
- uint8_t* y_plane_data = buffer_access->data();
- uint8_t* u_plane_data =
- y_plane_data +
- VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, dimensions)
- .GetArea();
- uint8_t* v_plane_data =
- u_plane_data +
- VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, dimensions)
- .GetArea();
-
- const int yplane_stride = dimensions.width();
- const int uv_plane_stride = yplane_stride / 2;
+ uint8_t* y_plane_data;
+ uint8_t* u_plane_data;
+ uint8_t* v_plane_data;
+ int yplane_stride, uv_plane_stride;
+ GetI420BufferAccess(buffer, dimensions, &y_plane_data, &u_plane_data,
+ &v_plane_data, &yplane_stride, &uv_plane_stride);
+
int crop_x = 0;
int crop_y = 0;
libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY;
@@ -278,16 +297,83 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
return;
}
- const VideoCaptureFormat output_format = VideoCaptureFormat(
- dimensions, format.frame_rate, PIXEL_FORMAT_I420, VideoPixelStorage::CPU);
+ const VideoCaptureFormat output_format =
+ VideoCaptureFormat(dimensions, format.frame_rate, PIXEL_FORMAT_I420);
OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
timestamp);
}
+void VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer(
+ gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id) {
+ TRACE_EVENT0("media",
+ "VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer");
+
+ if (last_captured_pixel_format_ != frame_format.pixel_format) {
+ OnLog("Pixel format: " +
+ VideoPixelFormatToString(frame_format.pixel_format));
+ last_captured_pixel_format_ = frame_format.pixel_format;
+ }
+
+ if (!frame_format.IsValid())
+ return;
+
+ int destination_width = buffer->GetSize().width();
+ int destination_height = buffer->GetSize().height();
+ if (clockwise_rotation == 90 || clockwise_rotation == 270)
+ std::swap(destination_width, destination_height);
+
+ libyuv::RotationMode rotation_mode = TranslateRotation(clockwise_rotation);
+
+ const gfx::Size dimensions(destination_width, destination_height);
+ auto output_buffer =
+ ReserveOutputBuffer(dimensions, PIXEL_FORMAT_I420, frame_feedback_id);
+
+ // Failed to reserve I420 output buffer, so drop the frame.
+ if (!output_buffer.is_valid())
+ return;
+
+ uint8_t* y_plane_data;
+ uint8_t* u_plane_data;
+ uint8_t* v_plane_data;
+ int y_plane_stride, uv_plane_stride;
+ GetI420BufferAccess(output_buffer, dimensions, &y_plane_data, &u_plane_data,
+ &v_plane_data, &y_plane_stride, &uv_plane_stride);
+
+ int ret = -EINVAL;
+ switch (frame_format.pixel_format) {
+ case PIXEL_FORMAT_NV12:
+ ret = libyuv::NV12ToI420Rotate(
+ reinterpret_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
+ reinterpret_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1),
+ y_plane_data, y_plane_stride, u_plane_data, uv_plane_stride,
+ v_plane_data, uv_plane_stride, buffer->GetSize().width(),
+ buffer->GetSize().height(), rotation_mode);
+ break;
+
+ default:
+ LOG(ERROR) << "Unsupported format: "
+ << VideoPixelFormatToString(frame_format.pixel_format);
+ }
+ if (ret) {
+ DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from "
+ << VideoPixelFormatToString(frame_format.pixel_format);
+ return;
+ }
+
+ const VideoCaptureFormat output_format = VideoCaptureFormat(
+ dimensions, frame_format.frame_rate, PIXEL_FORMAT_I420);
+ OnIncomingCapturedBuffer(std::move(output_buffer), output_format,
+ reference_time, timestamp);
+}
+
VideoCaptureDevice::Client::Buffer
VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
VideoPixelFormat pixel_format,
- VideoPixelStorage pixel_storage,
int frame_feedback_id) {
DFAKE_SCOPED_RECURSIVE_LOCK(call_from_producer_);
DCHECK_GT(frame_size.width(), 0);
@@ -295,9 +381,8 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
DCHECK(IsFormatSupported(pixel_format));
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
- const int buffer_id =
- buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage,
- frame_feedback_id, &buffer_id_to_drop);
+ const int buffer_id = buffer_pool_->ReserveForProducer(
+ frame_size, pixel_format, frame_feedback_id, &buffer_id_to_drop);
if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
// |buffer_pool_| has decided to release a buffer. Notify receiver in case
// the buffer has already been shared with it.
@@ -313,9 +398,12 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
return Buffer();
if (!base::ContainsValue(buffer_ids_known_by_receiver_, buffer_id)) {
- receiver_->OnNewBufferHandle(
- buffer_id, std::make_unique<BufferPoolBufferHandleProvider>(
- buffer_pool_, buffer_id));
+ media::mojom::VideoBufferHandlePtr buffer_handle =
+ media::mojom::VideoBufferHandle::New();
+ buffer_handle->set_shared_buffer_handle(
+ buffer_pool_->GetHandleForInterProcessTransit(buffer_id,
+ true /*read_only*/));
+ receiver_->OnNewBuffer(buffer_id, std::move(buffer_handle));
buffer_ids_known_by_receiver_.push_back(buffer_id);
}
@@ -350,7 +438,6 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
info->timestamp = timestamp;
info->pixel_format = format.pixel_format;
- info->storage_type = format.pixel_storage;
info->coded_size = format.frame_size;
info->visible_rect = visible_rect;
info->metadata = metadata.GetInternalValues().Clone();
@@ -366,11 +453,10 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
VideoCaptureDevice::Client::Buffer
VideoCaptureDeviceClient::ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int new_frame_feedback_id) {
DFAKE_SCOPED_RECURSIVE_LOCK(call_from_producer_);
const int buffer_id =
- buffer_pool_->ResurrectLastForProducer(dimensions, format, storage);
+ buffer_pool_->ResurrectLastForProducer(dimensions, format);
if (buffer_id == VideoCaptureBufferPool::kInvalidId)
return Buffer();
return MakeBufferStruct(buffer_pool_, buffer_id, new_frame_feedback_id);
@@ -407,9 +493,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedY16Data(
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id) {
- Buffer buffer =
- ReserveOutputBuffer(format.frame_size, PIXEL_FORMAT_Y16,
- VideoPixelStorage::CPU, frame_feedback_id);
+ Buffer buffer = ReserveOutputBuffer(format.frame_size, PIXEL_FORMAT_Y16,
+ frame_feedback_id);
// The input |length| can be greater than the required buffer size because of
// paddings and/or alignments, but it cannot be smaller.
DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize());
@@ -423,9 +508,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedY16Data(
return;
auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess();
memcpy(buffer_access->data(), data, length);
- const VideoCaptureFormat output_format =
- VideoCaptureFormat(format.frame_size, format.frame_rate, PIXEL_FORMAT_Y16,
- VideoPixelStorage::CPU);
+ const VideoCaptureFormat output_format = VideoCaptureFormat(
+ format.frame_size, format.frame_rate, PIXEL_FORMAT_Y16);
OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
timestamp);
}
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 77bc796a1c6..7708de91fc7 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -53,13 +53,18 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
void OnIncomingCapturedData(const uint8_t* data,
int length,
const VideoCaptureFormat& frame_format,
- int rotation,
+ int clockwise_rotation,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id = 0) override;
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override;
Buffer ReserveOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override;
void OnIncomingCapturedBuffer(Buffer buffer,
const VideoCaptureFormat& format,
@@ -74,7 +79,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
const VideoFrameMetadata& additional_metadata) override;
Buffer ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int new_frame_feedback_id) override;
void OnError(const base::Location& from_here,
const std::string& reason) override;
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index 1fcaa7241ac..a3467e7e17a 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -13,6 +13,7 @@
#include "base/macros.h"
#include "build/build_config.h"
#include "media/base/limits.h"
+#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "media/capture/video/mock_video_frame_receiver.h"
#include "media/capture/video/video_capture_buffer_pool_impl.h"
#include "media/capture/video/video_capture_buffer_tracker_factory_impl.h"
@@ -47,9 +48,11 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
VideoCaptureDeviceClientTest() {
scoped_refptr<VideoCaptureBufferPoolImpl> buffer_pool(
new VideoCaptureBufferPoolImpl(
- std::make_unique<VideoCaptureBufferTrackerFactoryImpl>(), 1));
+ std::make_unique<VideoCaptureBufferTrackerFactoryImpl>(), 2));
auto controller = std::make_unique<MockVideoFrameReceiver>();
receiver_ = controller.get();
+ gpu_memory_buffer_manager_ =
+ std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
std::move(controller), buffer_pool,
base::Bind(&ReturnNullPtrAsJpecDecoder));
@@ -58,6 +61,8 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
protected:
MockVideoFrameReceiver* receiver_;
+ std::unique_ptr<unittest_internal::MockGpuMemoryBufferManager>
+ gpu_memory_buffer_manager_;
std::unique_ptr<VideoCaptureDeviceClient> device_client_;
private:
@@ -70,8 +75,7 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
const size_t kScratchpadSizeInBytes = 400;
unsigned char data[kScratchpadSizeInBytes] = {};
const VideoCaptureFormat kFrameFormat(gfx::Size(10, 10), 30.0f /*frame_rate*/,
- PIXEL_FORMAT_I420,
- VideoPixelStorage::CPU);
+ PIXEL_FORMAT_I420);
DCHECK(device_client_.get());
{
InSequence s;
@@ -84,6 +88,26 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
kFrameFormat, 0 /*clockwise rotation*/,
base::TimeTicks(), base::TimeDelta());
+
+ const gfx::Size kBufferDimensions(10, 10);
+ const VideoCaptureFormat kFrameFormatNV12(
+ kBufferDimensions, 30.0f /*frame_rate*/, PIXEL_FORMAT_NV12);
+ std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
+ gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
+ kBufferDimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE, gpu::kNullSurfaceHandle);
+ {
+ InSequence s;
+ const int expected_buffer_id = 1;
+ EXPECT_CALL(*receiver_, OnLog(_));
+ EXPECT_CALL(*receiver_, MockOnNewBufferHandle(expected_buffer_id));
+ EXPECT_CALL(*receiver_, MockOnFrameReadyInBuffer(expected_buffer_id, _, _));
+ EXPECT_CALL(*receiver_, OnBufferRetired(expected_buffer_id));
+ }
+ device_client_->OnIncomingCapturedGfxBuffer(
+ buffer.get(), kFrameFormatNV12, 0 /*clockwise rotation*/,
+ base::TimeTicks(), base::TimeDelta());
+
// Releasing |device_client_| will also release |receiver_|.
device_client_.reset();
}
@@ -95,8 +119,7 @@ TEST_F(VideoCaptureDeviceClientTest, FailsSilentlyGivenInvalidFrameFormat) {
// kFrameFormat is invalid in a number of ways.
const VideoCaptureFormat kFrameFormat(
gfx::Size(limits::kMaxDimension + 1, limits::kMaxDimension),
- limits::kMaxFramesPerSecond + 1, VideoPixelFormat::PIXEL_FORMAT_I420,
- VideoPixelStorage::CPU);
+ limits::kMaxFramesPerSecond + 1, VideoPixelFormat::PIXEL_FORMAT_I420);
DCHECK(device_client_.get());
// Expect the the call to fail silently inside the VideoCaptureDeviceClient.
EXPECT_CALL(*receiver_, OnLog(_)).Times(1);
@@ -104,6 +127,19 @@ TEST_F(VideoCaptureDeviceClientTest, FailsSilentlyGivenInvalidFrameFormat) {
device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
kFrameFormat, 0 /*clockwise rotation*/,
base::TimeTicks(), base::TimeDelta());
+
+ const gfx::Size kBufferDimensions(10, 10);
+ const VideoCaptureFormat kFrameFormatNV12(
+ kBufferDimensions, 30.0f /*frame_rate*/, PIXEL_FORMAT_NV12);
+ std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
+ gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
+ kBufferDimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE, gpu::kNullSurfaceHandle);
+ EXPECT_CALL(*receiver_, MockOnFrameReadyInBuffer(_, _, _)).Times(0);
+ device_client_->OnIncomingCapturedGfxBuffer(
+ buffer.get(), kFrameFormat, 0 /*clockwise rotation*/, base::TimeTicks(),
+ base::TimeDelta());
+
Mock::VerifyAndClearExpectations(receiver_);
}
@@ -112,26 +148,30 @@ TEST_F(VideoCaptureDeviceClientTest, DropsFrameIfNoBuffer) {
const size_t kScratchpadSizeInBytes = 400;
unsigned char data[kScratchpadSizeInBytes] = {};
const VideoCaptureFormat kFrameFormat(gfx::Size(10, 10), 30.0f /*frame_rate*/,
- PIXEL_FORMAT_I420,
- VideoPixelStorage::CPU);
+ PIXEL_FORMAT_I420);
EXPECT_CALL(*receiver_, OnLog(_)).Times(1);
// Simulate that receiver still holds |buffer_read_permission| for the first
- // buffer when the second call to OnIncomingCapturedData comes in.
- // Since we set up the buffer pool to max out at 1 buffer, this should cause
+ // two buffers when the third call to OnIncomingCapturedData comes in.
+ // Since we set up the buffer pool to max out at 2 buffer, this should cause
// |device_client_| to drop the frame.
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ std::vector<std::unique_ptr<
+ VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>>
read_permission;
EXPECT_CALL(*receiver_, MockOnFrameReadyInBuffer(_, _, _))
- .WillOnce(Invoke(
+ .Times(2)
+ .WillRepeatedly(Invoke(
[&read_permission](
int buffer_id,
std::unique_ptr<
VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>*
buffer_read_permission,
const gfx::Size&) {
- read_permission = std::move(*buffer_read_permission);
+ read_permission.push_back(std::move(*buffer_read_permission));
}));
- // Pass two frames. The second will be dropped.
+ // Pass three frames. The third will be dropped.
+ device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
+ kFrameFormat, 0 /*clockwise rotation*/,
+ base::TimeTicks(), base::TimeDelta());
device_client_->OnIncomingCapturedData(data, kScratchpadSizeInBytes,
kFrameFormat, 0 /*clockwise rotation*/,
base::TimeTicks(), base::TimeDelta());
@@ -230,6 +270,37 @@ TEST_F(VideoCaptureDeviceClientTest, CheckRotationsAndCrops) {
Mock::VerifyAndClearExpectations(receiver_);
}
+
+ SizeAndRotation kSizeAndRotationsNV12[] = {{{6, 4}, 0, {6, 4}},
+ {{6, 4}, 90, {4, 6}},
+ {{6, 4}, 180, {6, 4}},
+ {{6, 4}, 270, {4, 6}}};
+ EXPECT_CALL(*receiver_, OnLog(_)).Times(1);
+
+ for (const auto& size_and_rotation : kSizeAndRotationsNV12) {
+ params.requested_format = VideoCaptureFormat(
+ size_and_rotation.input_resolution, 30.0f, PIXEL_FORMAT_NV12);
+ std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
+ gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
+ size_and_rotation.input_resolution,
+ gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
+ gpu::kNullSurfaceHandle);
+
+ gfx::Size coded_size;
+ EXPECT_CALL(*receiver_, MockOnFrameReadyInBuffer(_, _, _))
+ .Times(1)
+ .WillOnce(SaveArg<2>(&coded_size));
+ device_client_->OnIncomingCapturedGfxBuffer(
+ buffer.get(), params.requested_format, size_and_rotation.rotation,
+ base::TimeTicks(), base::TimeDelta());
+
+ EXPECT_EQ(coded_size.width(), size_and_rotation.output_resolution.width());
+ EXPECT_EQ(coded_size.height(),
+ size_and_rotation.output_resolution.height());
+
+ Mock::VerifyAndClearExpectations(receiver_);
+ }
}
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.cc b/chromium/media/capture/video/video_capture_device_descriptor.cc
index 89dd1b7a6ef..3a718ed8c6c 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.cc
+++ b/chromium/media/capture/video/video_capture_device_descriptor.cc
@@ -90,10 +90,8 @@ const char* VideoCaptureDeviceDescriptor::GetCaptureApiTypeString() const {
return "Camera API2 Full";
case VideoCaptureApi::ANDROID_API2_LIMITED:
return "Camera API2 Limited";
- default:
- NOTREACHED() << "Unknown Video Capture API type: "
- << static_cast<int>(capture_api);
- return "Unknown API";
+ case VideoCaptureApi::UNKNOWN:
+ return "Unknown";
}
}
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 0fef3eb2d74..21143688320 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -13,7 +13,6 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/memory/ref_counted.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/test/scoped_task_environment.h"
@@ -72,6 +71,13 @@
#define MAYBE_CaptureMjpeg CaptureMjpeg
#define MAYBE_TakePhoto TakePhoto
#define MAYBE_GetPhotoState GetPhotoState
+#define MAYBE_CaptureWithSize CaptureWithSize
+#elif defined(OS_CHROMEOS)
+#define MAYBE_AllocateBadSize DISABLED_AllocateBadSize
+#define MAYBE_CaptureMjpeg CaptureMjpeg
+#define MAYBE_TakePhoto TakePhoto
+#define MAYBE_GetPhotoState GetPhotoState
+#define MAYBE_CaptureWithSize CaptureWithSize
#elif defined(OS_LINUX)
// AllocateBadSize will hang when a real camera is attached and if more than one
// test is trying to use the camera (even across processes). Do NOT renable
@@ -169,10 +175,20 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
main_thread_->PostTask(FROM_HERE, base::BindOnce(frame_cb_, format));
}
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override {
+ ASSERT_TRUE(buffer);
+ ASSERT_GT(buffer->GetSize().width() * buffer->GetSize().height(), 0);
+ main_thread_->PostTask(FROM_HERE, base::BindOnce(frame_cb_, frame_format));
+ }
+
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
Buffer ReserveOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
int frame_feedback_id) override {
DoReserveOutputBuffer();
NOTREACHED() << "This should never be called";
@@ -195,8 +211,7 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
}
Buffer ResurrectLastOutputBuffer(const gfx::Size& dimensions,
VideoPixelFormat format,
- VideoPixelStorage storage,
- int frame_feedback_id) {
+ int frame_feedback_id) override {
DoResurrectLastOutputBuffer();
NOTREACHED() << "This should never be called";
return Buffer();
@@ -649,13 +664,6 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_TakePhoto) {
if (!descriptor)
return;
-#if defined(OS_CHROMEOS)
- // TODO(jcliang): Remove this after we implement TakePhoto.
- if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
- return;
- }
-#endif
-
#if defined(OS_ANDROID)
// TODO(mcasas): fails on Lollipop devices, reconnect https://crbug.com/646840
if (base::android::BuildInfo::GetInstance()->sdk_int() <
@@ -698,13 +706,6 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
if (!descriptor)
return;
-#if defined(OS_CHROMEOS)
- // TODO(jcliang): Remove this after we implement GetPhotoCapabilities.
- if (VideoCaptureDeviceFactoryChromeOS::ShouldEnable()) {
- return;
- }
-#endif
-
#if defined(OS_ANDROID)
// TODO(mcasas): fails on Lollipop devices, reconnect https://crbug.com/646840
if (base::android::BuildInfo::GetInstance()->sdk_int() <
@@ -730,6 +731,9 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_GetPhotoState) {
base::BindOnce(&MockImageCaptureClient::DoOnGetPhotoState,
image_capture_client_);
+ // On Chrome OS AllocateAndStart() is asynchronous, so wait until we get the
+ // first frame.
+ WaitForCapturedFrame();
base::RunLoop run_loop;
base::Closure quit_closure = BindToCurrentLoop(run_loop.QuitClosure());
EXPECT_CALL(*image_capture_client_.get(), OnCorrectGetPhotoState())
diff --git a/chromium/media/capture/video/video_capture_system_impl.cc b/chromium/media/capture/video/video_capture_system_impl.cc
index 98cbe01d955..2c41b18c970 100644
--- a/chromium/media/capture/video/video_capture_system_impl.cc
+++ b/chromium/media/capture/video/video_capture_system_impl.cc
@@ -143,10 +143,17 @@ void VideoCaptureSystemImpl::DeviceInfosReady(
}
devices_info_cache_.swap(new_devices_info_cache);
- base::ResetAndReturn(&device_enum_request_queue_.front())
- .Run(devices_info_cache_);
-
+ auto request_cb = std::move(device_enum_request_queue_.front());
device_enum_request_queue_.pop_front();
+ // If |request_cb| was the last callback in |device_enum_request_queue_|,
+ // |this| may be out of scope after running it. We need to be careful to
+ // not touch the state of |this| after running the callback in this case.
+ if (device_enum_request_queue_.empty()) {
+ std::move(request_cb).Run(devices_info_cache_);
+ return;
+ }
+ std::move(request_cb).Run(devices_info_cache_);
ProcessDeviceInfoRequest();
}
+
} // namespace media
diff --git a/chromium/media/capture/video/video_frame_receiver.h b/chromium/media/capture/video/video_frame_receiver.h
index 3d140cabca6..226f5379d9b 100644
--- a/chromium/media/capture/video/video_frame_receiver.h
+++ b/chromium/media/capture/video/video_frame_receiver.h
@@ -21,11 +21,12 @@ class CAPTURE_EXPORT VideoFrameReceiver {
// Tells the VideoFrameReceiver that the producer is going to subsequently use
// the provided buffer as one of possibly many for frame delivery via
// OnFrameReadyInBuffer(). Note, that a call to this method does not mean that
- // the caller allows the receiver to read from or write to the buffer.
- virtual void OnNewBufferHandle(
- int buffer_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::HandleProvider>
- handle_provider) = 0;
+ // the caller allows the receiver to read from or write to the buffer just
+ // yet. Temporary permission to read will be given with subsequent calls to
+ // OnFrameReadyInBuffer().
+ virtual void OnNewBuffer(
+ int32_t buffer_id,
+ media::mojom::VideoBufferHandlePtr buffer_handle) = 0;
// Tells the VideoFrameReceiver that a new frame is ready for consumption
// in the buffer with id |buffer_id| and allows it to read the data from
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
index da8cf8f8df7..a49f93d380f 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
@@ -15,14 +15,13 @@ VideoFrameReceiverOnTaskRunner::VideoFrameReceiverOnTaskRunner(
VideoFrameReceiverOnTaskRunner::~VideoFrameReceiverOnTaskRunner() = default;
-void VideoFrameReceiverOnTaskRunner::OnNewBufferHandle(
+void VideoFrameReceiverOnTaskRunner::OnNewBuffer(
int buffer_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::HandleProvider>
- handle_provider) {
+ media::mojom::VideoBufferHandlePtr buffer_handle) {
task_runner_->PostTask(
FROM_HERE,
- base::Bind(&VideoFrameReceiver::OnNewBufferHandle, receiver_, buffer_id,
- base::Passed(std::move(handle_provider))));
+ base::BindOnce(&VideoFrameReceiver::OnNewBuffer, receiver_, buffer_id,
+ base::Passed(std::move(buffer_handle))));
}
void VideoFrameReceiverOnTaskRunner::OnFrameReadyInBuffer(
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
index 3ee757d52e3..7e7c70d551f 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
@@ -23,10 +23,8 @@ class CAPTURE_EXPORT VideoFrameReceiverOnTaskRunner
scoped_refptr<base::SingleThreadTaskRunner> task_runner);
~VideoFrameReceiverOnTaskRunner() override;
- void OnNewBufferHandle(
- int buffer_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::HandleProvider>
- handle_provider) override;
+ void OnNewBuffer(int32_t buffer_id,
+ media::mojom::VideoBufferHandlePtr buffer_handle) override;
void OnFrameReadyInBuffer(
int buffer_id,
int frame_feedback_id,
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index 9b3a88c71da..ef04526c06f 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -161,21 +161,6 @@ bool CreateVideoCaptureDeviceMediaFoundation(const Descriptor& descriptor,
return SUCCEEDED(MFCreateDeviceSource(attributes.Get(), source));
}
-bool EnumerateVideoDevicesMediaFoundation(
- const std::vector<std::pair<GUID, GUID>>& attributes_data,
- VideoCaptureDeviceFactoryWin::MFEnumDeviceSourcesFunc
- mf_enum_device_sources_func,
- IMFActivate*** devices,
- UINT32* count) {
- ComPtr<IMFAttributes> attributes;
- if (!PrepareVideoCaptureAttributesMediaFoundation(
- attributes_data, attributes_data.size(), attributes.GetAddressOf())) {
- return false;
- }
- return SUCCEEDED(
- mf_enum_device_sources_func(attributes.Get(), devices, count));
-}
-
bool IsDeviceBlackListed(const std::string& name) {
DCHECK_EQ(BLACKLISTED_CAMERA_MAX + 1,
static_cast<int>(arraysize(kBlacklistedCameraNames)));
@@ -223,59 +208,6 @@ HRESULT EnumerateDirectShowDevices(IEnumMoniker** enum_moniker) {
return hr;
}
-void GetDeviceDescriptorsDirectShow(
- VideoCaptureDeviceFactoryWin::DirectShowEnumDevicesFunc
- direct_show_enum_devices_func,
- Descriptors* device_descriptors) {
- DCHECK(device_descriptors);
- DVLOG(1) << __func__;
-
- ComPtr<IEnumMoniker> enum_moniker;
- HRESULT hr = direct_show_enum_devices_func.Run(enum_moniker.GetAddressOf());
- // CreateClassEnumerator returns S_FALSE on some Windows OS
- // when no camera exist. Therefore the FAILED macro can't be used.
- if (hr != S_OK)
- return;
-
- // Enumerate all video capture devices.
- for (ComPtr<IMoniker> moniker;
- enum_moniker->Next(1, moniker.GetAddressOf(), NULL) == S_OK;
- moniker.Reset()) {
- ComPtr<IPropertyBag> prop_bag;
- hr = moniker->BindToStorage(0, 0, IID_PPV_ARGS(&prop_bag));
- if (FAILED(hr))
- continue;
-
- // Find the description or friendly name.
- ScopedVariant name;
- hr = prop_bag->Read(L"Description", name.Receive(), 0);
- if (FAILED(hr))
- hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
-
- if (FAILED(hr) || name.type() != VT_BSTR)
- continue;
-
- const std::string device_name(base::SysWideToUTF8(V_BSTR(name.ptr())));
- if (IsDeviceBlackListed(device_name))
- continue;
-
- name.Reset();
- hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
- std::string id;
- if (FAILED(hr) || name.type() != VT_BSTR) {
- id = device_name;
- } else {
- DCHECK_EQ(name.type(), VT_BSTR);
- id = base::SysWideToUTF8(V_BSTR(name.ptr()));
- }
-
- const std::string model_id = GetDeviceModelId(id);
-
- device_descriptors->emplace_back(device_name, id, model_id,
- VideoCaptureApi::WIN_DIRECT_SHOW);
- }
-}
-
bool DescriptorsContainDeviceId(const Descriptors& descriptors,
const std::string& device_id) {
return std::find_if(
@@ -285,88 +217,16 @@ bool DescriptorsContainDeviceId(const Descriptors& descriptors,
}) != descriptors.end();
}
-// Returns true if the provided descriptors contains a non DirectShow descriptor
-// using the provided name and model
-bool DescriptorsContainNonDirectShowNameAndModel(
+// Returns a non DirectShow descriptor using the provided name and model
+Descriptors::const_iterator FindNonDirectShowDescriptorByNameAndModel(
const Descriptors& descriptors,
const std::string& name_and_model) {
return std::find_if(
- descriptors.begin(), descriptors.end(),
- [name_and_model](const VideoCaptureDeviceDescriptor& descriptor) {
- return descriptor.capture_api !=
- VideoCaptureApi::WIN_DIRECT_SHOW &&
- name_and_model == descriptor.GetNameAndModel();
- }) != descriptors.end();
-}
-
-void GetDeviceDescriptorsMediaFoundation(
- VideoCaptureDeviceFactoryWin::MFEnumDeviceSourcesFunc
- mf_enum_device_sources_func,
- VideoCaptureDeviceFactoryWin::DirectShowEnumDevicesFunc
- direct_show_enum_devices_func,
- Descriptors* device_descriptors) {
- DVLOG(1) << " GetDeviceDescriptorsMediaFoundation";
- // Recent non-RGB (depth, IR) cameras could be marked as sensor cameras in
- // driver inf file and MFEnumDeviceSources enumerates them only if attribute
- // KSCATEGORY_SENSOR_CAMERA is supplied. We enumerate twice. As it is possible
- // that SENSOR_CAMERA is also in VIDEO_CAMERA category, we prevent duplicate
- // entries. https://crbug.com/807293
- for (const auto& api_attributes : kMfAttributes) {
- ScopedCoMem<IMFActivate*> devices;
- UINT32 count;
- if (!EnumerateVideoDevicesMediaFoundation(api_attributes.second,
- mf_enum_device_sources_func,
- &devices, &count)) {
- return;
- }
- const bool list_was_empty = !device_descriptors->size();
- for (UINT32 i = 0; i < count; ++i) {
- ScopedCoMem<wchar_t> name;
- UINT32 name_size;
- HRESULT hr = devices[i]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &name_size);
- if (SUCCEEDED(hr)) {
- ScopedCoMem<wchar_t> id;
- UINT32 id_size;
- hr = devices[i]->GetAllocatedString(
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &id,
- &id_size);
- if (SUCCEEDED(hr)) {
- const std::string device_id =
- base::SysWideToUTF8(std::wstring(id, id_size));
- const std::string model_id = GetDeviceModelId(device_id);
- if (list_was_empty ||
- !DescriptorsContainDeviceId(*device_descriptors, device_id)) {
- device_descriptors->emplace_back(
- base::SysWideToUTF8(std::wstring(name, name_size)), device_id,
- model_id, api_attributes.first);
- }
- }
- }
- DLOG_IF(ERROR, FAILED(hr)) << "GetAllocatedString failed: "
- << logging::SystemErrorCodeToString(hr);
- devices[i]->Release();
- }
- }
-
- // DirectShow virtual cameras are not supported by MediaFoundation.
- // To overcome this, based on device name and model, we append
- // missing DirectShow device descriptor to full descriptors list.
- Descriptors direct_show_descriptors;
- GetDeviceDescriptorsDirectShow(direct_show_enum_devices_func,
- &direct_show_descriptors);
- for (const auto& direct_show_descriptor : direct_show_descriptors) {
- // DirectShow can produce two descriptors with same name and model.
- // If those descriptors are missing from MediaFoundation, we want them both
- // appended to the full descriptors list.
- // Therefore, we prevent duplication by always comparing a DirectShow
- // descriptor with a MediaFoundation one.
- if (DescriptorsContainNonDirectShowNameAndModel(
- *device_descriptors, direct_show_descriptor.GetNameAndModel())) {
- continue;
- }
- device_descriptors->emplace_back(direct_show_descriptor);
- }
+ descriptors.begin(), descriptors.end(),
+ [name_and_model](const VideoCaptureDeviceDescriptor& descriptor) {
+ return descriptor.capture_api != VideoCaptureApi::WIN_DIRECT_SHOW &&
+ name_and_model == descriptor.GetNameAndModel();
+ });
}
void GetDeviceSupportedFormatsDirectShow(const Descriptor& descriptor,
@@ -488,6 +348,12 @@ VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
PlatformSupportsMediaFoundation() ? MFEnumDeviceSources : nullptr;
direct_show_enum_devices_func_ =
base::BindRepeating(&EnumerateDirectShowDevices);
+
+ mf_get_supported_formats_func_ =
+ base::BindRepeating(&GetDeviceSupportedFormatsMediaFoundation);
+ direct_show_get_supported_formats_func_ =
+ base::BindRepeating(&GetDeviceSupportedFormatsDirectShow);
+
if (!PlatformSupportsMediaFoundation()) {
use_media_foundation_ = false;
LogVideoCaptureWinBackendUsed(
@@ -544,12 +410,10 @@ void VideoCaptureDeviceFactoryWin::GetDeviceDescriptors(
DCHECK(thread_checker_.CalledOnValidThread());
if (use_media_foundation_) {
- GetDeviceDescriptorsMediaFoundation(mf_enum_device_sources_func_,
- direct_show_enum_devices_func_,
- device_descriptors);
+ GetDeviceDescriptorsMediaFoundation(device_descriptors);
+ AugmentDescriptorListWithDirectShowOnlyDevices(device_descriptors);
} else {
- GetDeviceDescriptorsDirectShow(direct_show_enum_devices_func_,
- device_descriptors);
+ GetDeviceDescriptorsDirectShow(device_descriptors);
}
}
@@ -717,14 +581,176 @@ void VideoCaptureDeviceFactoryWin::DeviceInfoReady(
base::ResetAndReturn(&result_callback).Run(std::move(device_descriptors));
}
+void VideoCaptureDeviceFactoryWin::GetDeviceDescriptorsMediaFoundation(
+ Descriptors* device_descriptors) {
+ DVLOG(1) << " GetDeviceDescriptorsMediaFoundation";
+ // Recent non-RGB (depth, IR) cameras could be marked as sensor cameras in
+ // driver inf file and MFEnumDeviceSources enumerates them only if attribute
+ // KSCATEGORY_SENSOR_CAMERA is supplied. We enumerate twice. As it is possible
+ // that SENSOR_CAMERA is also in VIDEO_CAMERA category, we prevent duplicate
+ // entries. https://crbug.com/807293
+ for (const auto& api_attributes : kMfAttributes) {
+ ScopedCoMem<IMFActivate*> devices;
+ UINT32 count;
+ if (!EnumerateVideoDevicesMediaFoundation(api_attributes.second, &devices,
+ &count)) {
+ return;
+ }
+ const bool list_was_empty = !device_descriptors->size();
+ for (UINT32 i = 0; i < count; ++i) {
+ ScopedCoMem<wchar_t> name;
+ UINT32 name_size;
+ HRESULT hr = devices[i]->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &name_size);
+ if (SUCCEEDED(hr)) {
+ ScopedCoMem<wchar_t> id;
+ UINT32 id_size;
+ hr = devices[i]->GetAllocatedString(
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &id,
+ &id_size);
+ if (SUCCEEDED(hr)) {
+ const std::string device_id =
+ base::SysWideToUTF8(std::wstring(id, id_size));
+ const std::string model_id = GetDeviceModelId(device_id);
+ if (list_was_empty ||
+ !DescriptorsContainDeviceId(*device_descriptors, device_id)) {
+ device_descriptors->emplace_back(
+ base::SysWideToUTF8(std::wstring(name, name_size)), device_id,
+ model_id, api_attributes.first);
+ }
+ }
+ }
+ DLOG_IF(ERROR, FAILED(hr)) << "GetAllocatedString failed: "
+ << logging::SystemErrorCodeToString(hr);
+ devices[i]->Release();
+ }
+ }
+}
+
+// Adds descriptors that are only reported by the DirectShow API.
+// Replaces MediaFoundation descriptors with corresponding DirectShow
+// ones if the MediaFoundation one has no supported formats,
+// but the DirectShow one does.
+void VideoCaptureDeviceFactoryWin::
+ AugmentDescriptorListWithDirectShowOnlyDevices(
+ VideoCaptureDeviceDescriptors* device_descriptors) {
+ // DirectShow virtual cameras are not supported by MediaFoundation.
+ // To overcome this, based on device name and model, we append
+ // missing DirectShow device descriptor to full descriptors list.
+ Descriptors direct_show_descriptors;
+ GetDeviceDescriptorsDirectShow(&direct_show_descriptors);
+ for (const auto& direct_show_descriptor : direct_show_descriptors) {
+ // DirectShow can produce two descriptors with same name and model.
+ // If those descriptors are missing from MediaFoundation, we want them both
+ // appended to the full descriptors list.
+ // Therefore, we prevent duplication by always comparing a DirectShow
+ // descriptor with a MediaFoundation one.
+
+ Descriptors::const_iterator matching_non_direct_show_descriptor =
+ FindNonDirectShowDescriptorByNameAndModel(
+ *device_descriptors, direct_show_descriptor.GetNameAndModel());
+
+ // Devices like the Pinnacle Dazzle, appear both in DirectShow and
+ // MediaFoundation. In MediaFoundation, they will have no supported video
+ // format while in DirectShow they will have at least one video format.
+ // Therefore, we must prioritize the MediaFoundation descriptor if it has at
+ // least one supported format
+ if (matching_non_direct_show_descriptor != device_descriptors->end()) {
+ if (GetNumberOfSupportedFormats(*matching_non_direct_show_descriptor) > 0)
+ continue;
+ if (GetNumberOfSupportedFormats(direct_show_descriptor) == 0)
+ continue;
+ device_descriptors->erase(matching_non_direct_show_descriptor);
+ }
+ device_descriptors->emplace_back(direct_show_descriptor);
+ }
+}
+
+bool VideoCaptureDeviceFactoryWin::EnumerateVideoDevicesMediaFoundation(
+ const std::vector<std::pair<GUID, GUID>>& attributes_data,
+ IMFActivate*** devices,
+ UINT32* count) {
+ ComPtr<IMFAttributes> attributes;
+ if (!PrepareVideoCaptureAttributesMediaFoundation(
+ attributes_data, attributes_data.size(), attributes.GetAddressOf())) {
+ return false;
+ }
+ return SUCCEEDED(
+ mf_enum_device_sources_func_(attributes.Get(), devices, count));
+}
+
+void VideoCaptureDeviceFactoryWin::GetDeviceDescriptorsDirectShow(
+ Descriptors* device_descriptors) {
+ DCHECK(device_descriptors);
+ DVLOG(1) << __func__;
+
+ ComPtr<IEnumMoniker> enum_moniker;
+ HRESULT hr = direct_show_enum_devices_func_.Run(enum_moniker.GetAddressOf());
+ // CreateClassEnumerator returns S_FALSE on some Windows OS
+ // when no camera exist. Therefore the FAILED macro can't be used.
+ if (hr != S_OK)
+ return;
+
+ // Enumerate all video capture devices.
+ for (ComPtr<IMoniker> moniker;
+ enum_moniker->Next(1, moniker.GetAddressOf(), NULL) == S_OK;
+ moniker.Reset()) {
+ ComPtr<IPropertyBag> prop_bag;
+ hr = moniker->BindToStorage(0, 0, IID_PPV_ARGS(&prop_bag));
+ if (FAILED(hr))
+ continue;
+
+ // Find the description or friendly name.
+ ScopedVariant name;
+ hr = prop_bag->Read(L"Description", name.Receive(), 0);
+ if (FAILED(hr))
+ hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
+
+ if (FAILED(hr) || name.type() != VT_BSTR)
+ continue;
+
+ const std::string device_name(base::SysWideToUTF8(V_BSTR(name.ptr())));
+ if (IsDeviceBlackListed(device_name))
+ continue;
+
+ name.Reset();
+ hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
+ std::string id;
+ if (FAILED(hr) || name.type() != VT_BSTR) {
+ id = device_name;
+ } else {
+ DCHECK_EQ(name.type(), VT_BSTR);
+ id = base::SysWideToUTF8(V_BSTR(name.ptr()));
+ }
+
+ const std::string model_id = GetDeviceModelId(id);
+
+ device_descriptors->emplace_back(device_name, id, model_id,
+ VideoCaptureApi::WIN_DIRECT_SHOW);
+ }
+}
+
+int VideoCaptureDeviceFactoryWin::GetNumberOfSupportedFormats(
+ const Descriptor& device) {
+ VideoCaptureFormats formats;
+ GetApiSpecificSupportedFormats(device, &formats);
+ return formats.size();
+}
+
+void VideoCaptureDeviceFactoryWin::GetApiSpecificSupportedFormats(
+ const Descriptor& device,
+ VideoCaptureFormats* formats) {
+ if (device.capture_api != VideoCaptureApi::WIN_DIRECT_SHOW)
+ mf_get_supported_formats_func_.Run(device, formats);
+ else
+ direct_show_get_supported_formats_func_.Run(device, formats);
+}
+
void VideoCaptureDeviceFactoryWin::GetSupportedFormats(
const Descriptor& device,
VideoCaptureFormats* formats) {
DCHECK(thread_checker_.CalledOnValidThread());
- if (use_media_foundation_)
- GetDeviceSupportedFormatsMediaFoundation(device, formats);
- else
- GetDeviceSupportedFormatsDirectShow(device, formats);
+ GetApiSpecificSupportedFormats(device, formats);
}
// static
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index f825be083a5..3248a0cf37d 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -32,6 +32,9 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
using MFEnumDeviceSourcesFunc = decltype(&MFEnumDeviceSources);
using DirectShowEnumDevicesFunc =
base::RepeatingCallback<HRESULT(IEnumMoniker**)>;
+ using GetSupportedFormatsFunc =
+ base::RepeatingCallback<void(const VideoCaptureDeviceDescriptor&,
+ VideoCaptureFormats*)>;
std::unique_ptr<VideoCaptureDevice> CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) override;
@@ -55,6 +58,14 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
DirectShowEnumDevicesFunc func) {
direct_show_enum_devices_func_ = func;
}
+ void set_mf_get_supported_formats_func_for_testing(
+ GetSupportedFormatsFunc func) {
+ mf_get_supported_formats_func_ = func;
+ }
+ void set_direct_show_get_supported_formats_func_for_testing(
+ GetSupportedFormatsFunc func) {
+ direct_show_get_supported_formats_func_ = func;
+ }
private:
void EnumerateDevicesUWP(
@@ -67,6 +78,20 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
void DeviceInfoReady(
std::unique_ptr<VideoCaptureDeviceDescriptors> device_descriptors,
DeviceDescriptorsCallback result_callback);
+ void GetDeviceDescriptorsMediaFoundation(
+ VideoCaptureDeviceDescriptors* device_descriptors);
+ void AugmentDescriptorListWithDirectShowOnlyDevices(
+ VideoCaptureDeviceDescriptors* device_descriptors);
+ bool EnumerateVideoDevicesMediaFoundation(
+ const std::vector<std::pair<GUID, GUID>>& attributes_data,
+ IMFActivate*** devices,
+ UINT32* count);
+ void GetDeviceDescriptorsDirectShow(
+ VideoCaptureDeviceDescriptors* device_descriptors);
+ int GetNumberOfSupportedFormats(const VideoCaptureDeviceDescriptor& device);
+ void GetApiSpecificSupportedFormats(
+ const VideoCaptureDeviceDescriptor& device,
+ VideoCaptureFormats* formats);
bool use_media_foundation_;
// In production code, when Media Foundation libraries are available,
@@ -75,6 +100,9 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
MFEnumDeviceSourcesFunc mf_enum_device_sources_func_ = nullptr;
DirectShowEnumDevicesFunc direct_show_enum_devices_func_;
+ GetSupportedFormatsFunc mf_get_supported_formats_func_;
+ GetSupportedFormatsFunc direct_show_get_supported_formats_func_;
+
// For calling WinRT methods on a COM initiated thread.
base::Thread com_thread_;
scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
index 3e4ea914dad..80b5532a6a0 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
@@ -30,6 +30,12 @@ const wchar_t* kMFDeviceName1 = L"Device 1";
const wchar_t* kMFDeviceId2 = L"\\\\?\\usb#vid_0002&pid_0002&mi_00";
const wchar_t* kMFDeviceName2 = L"Device 2";
+const wchar_t* kMFDeviceId5 = L"\\\\?\\usb#vid_0005&pid_0005&mi_00";
+const wchar_t* kMFDeviceName5 = L"Dazzle";
+
+void GetMFSupportedFormats(const VideoCaptureDeviceDescriptor& device,
+ VideoCaptureFormats* formats) {}
+
// DirectShow devices
const wchar_t* kDirectShowDeviceId0 = L"\\\\?\\usb#vid_0000&pid_0000&mi_00";
const wchar_t* kDirectShowDeviceName0 = L"Device 0";
@@ -43,6 +49,17 @@ const wchar_t* kDirectShowDeviceName3 = L"Virtual Camera";
const wchar_t* kDirectShowDeviceId4 = L"Virtual Camera 4";
const wchar_t* kDirectShowDeviceName4 = L"Virtual Camera";
+const wchar_t* kDirectShowDeviceId5 = L"\\\\?\\usb#vid_0005&pid_0005&mi_00#5";
+const wchar_t* kDirectShowDeviceName5 = L"Dazzle";
+
+void GetDirectShowSupportedFormats(const VideoCaptureDeviceDescriptor& device,
+ VideoCaptureFormats* formats) {
+ if (device.device_id == base::SysWideToUTF8(kDirectShowDeviceId5)) {
+ VideoCaptureFormat arbitrary_format;
+ formats->emplace_back(arbitrary_format);
+ }
+}
+
using iterator = VideoCaptureDeviceDescriptors::const_iterator;
iterator FindDescriptorInRange(iterator begin,
iterator end,
@@ -408,7 +425,8 @@ HRESULT __stdcall MockMFEnumDeviceSources(IMFAttributes* attributes,
MockMFActivate* mock_devices[] = {
new MockMFActivate(kMFDeviceId0, kMFDeviceName0, true, false),
new MockMFActivate(kMFDeviceId1, kMFDeviceName1, true, true),
- new MockMFActivate(kMFDeviceId2, kMFDeviceName2, false, true)};
+ new MockMFActivate(kMFDeviceId2, kMFDeviceName2, false, true),
+ new MockMFActivate(kMFDeviceId5, kMFDeviceName5, true, false)};
// Iterate once to get the match count and check for errors.
*count = 0U;
HRESULT hr;
@@ -437,7 +455,8 @@ HRESULT EnumerateStubDirectShowDevices(IEnumMoniker** enum_moniker) {
new StubMoniker(kDirectShowDeviceId0, kDirectShowDeviceName0),
new StubMoniker(kDirectShowDeviceId1, kDirectShowDeviceName1),
new StubMoniker(kDirectShowDeviceId3, kDirectShowDeviceName3),
- new StubMoniker(kDirectShowDeviceId4, kDirectShowDeviceName4)};
+ new StubMoniker(kDirectShowDeviceId4, kDirectShowDeviceName4),
+ new StubMoniker(kDirectShowDeviceId5, kDirectShowDeviceName5)};
StubEnumMoniker* stub_enum_moniker = new StubEnumMoniker();
for (StubMoniker* moniker : monikers)
@@ -456,6 +475,13 @@ class VideoCaptureDeviceFactoryWinTest : public ::testing::Test {
: media_foundation_supported_(
VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation()) {}
+ void SetUp() override {
+ factory_.set_mf_get_supported_formats_func_for_testing(
+ base::BindRepeating(&GetMFSupportedFormats));
+ factory_.set_direct_show_get_supported_formats_func_for_testing(
+ base::BindRepeating(&GetDirectShowSupportedFormats));
+ }
+
bool ShouldSkipMFTest() {
if (media_foundation_supported_)
return false;
@@ -470,7 +496,10 @@ class VideoCaptureDeviceFactoryWinTest : public ::testing::Test {
class VideoCaptureDeviceFactoryMFWinTest
: public VideoCaptureDeviceFactoryWinTest {
- void SetUp() override { factory_.set_use_media_foundation_for_testing(true); }
+ void SetUp() override {
+ VideoCaptureDeviceFactoryWinTest::SetUp();
+ factory_.set_use_media_foundation_for_testing(true);
+ }
};
TEST_F(VideoCaptureDeviceFactoryMFWinTest, GetDeviceDescriptors) {
@@ -482,7 +511,7 @@ TEST_F(VideoCaptureDeviceFactoryMFWinTest, GetDeviceDescriptors) {
base::BindRepeating(&EnumerateStubDirectShowDevices));
VideoCaptureDeviceDescriptors descriptors;
factory_.GetDeviceDescriptors(&descriptors);
- EXPECT_EQ(descriptors.size(), 5U);
+ EXPECT_EQ(descriptors.size(), 6U);
for (auto it = descriptors.begin(); it != descriptors.end(); it++) {
// Verify that there are no duplicates.
EXPECT_EQ(FindDescriptorInRange(descriptors.begin(), it, it->device_id),
@@ -517,6 +546,15 @@ TEST_F(VideoCaptureDeviceFactoryMFWinTest, GetDeviceDescriptors) {
EXPECT_NE(it, descriptors.end());
EXPECT_EQ(it->capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
EXPECT_EQ(it->display_name(), base::SysWideToUTF8(kDirectShowDeviceName4));
+
+ // Devices that are listed in MediaFoundation but only report supported
+ // formats in DirectShow are expected to get enumerated with
+ // VideoCaptureApi::WIN_DIRECT_SHOW
+ it = FindDescriptorInRange(descriptors.begin(), descriptors.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId5));
+ EXPECT_NE(it, descriptors.end());
+ EXPECT_EQ(it->capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->display_name(), base::SysWideToUTF8(kDirectShowDeviceName5));
}
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index cc7c0deecaa..bcccee0b0aa 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -34,9 +34,15 @@ class MockClient : public VideoCaptureDevice::Client {
base::TimeDelta timestamp,
int frame_feedback_id = 0) override {}
- MOCK_METHOD4(
- ReserveOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ void OnIncomingCapturedGfxBuffer(gfx::GpuMemoryBuffer* buffer,
+ const VideoCaptureFormat& frame_format,
+ int clockwise_rotation,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ int frame_feedback_id = 0) override {}
+
+ MOCK_METHOD3(ReserveOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
void OnIncomingCapturedBuffer(Buffer buffer,
const VideoCaptureFormat& format,
@@ -51,9 +57,8 @@ class MockClient : public VideoCaptureDevice::Client {
gfx::Rect visible_rect,
const VideoFrameMetadata& additional_metadata) override {}
- MOCK_METHOD4(
- ResurrectLastOutputBuffer,
- Buffer(const gfx::Size&, VideoPixelFormat, VideoPixelStorage, int));
+ MOCK_METHOD3(ResurrectLastOutputBuffer,
+ Buffer(const gfx::Size&, VideoPixelFormat, int));
MOCK_METHOD2(OnError, void(const base::Location&, const std::string&));
@@ -1271,4 +1276,4 @@ TEST_P(DepthCameraDeviceMFWinTest, AllocateAndStartDepthCamera) {
device_->AllocateAndStart(video_capture_params, std::move(client_));
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/capture/video_capture_types.cc b/chromium/media/capture/video_capture_types.cc
index 1db3a957c1c..b6fc66e20c3 100644
--- a/chromium/media/capture/video_capture_types.cc
+++ b/chromium/media/capture/video_capture_types.cc
@@ -21,26 +21,14 @@ static VideoPixelFormat const kSupportedCapturePixelFormats[] = {
};
VideoCaptureFormat::VideoCaptureFormat()
- : frame_rate(0.0f),
- pixel_format(PIXEL_FORMAT_UNKNOWN),
- pixel_storage(VideoPixelStorage::CPU) {}
+ : frame_rate(0.0f), pixel_format(PIXEL_FORMAT_UNKNOWN) {}
VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
float frame_rate,
VideoPixelFormat pixel_format)
: frame_size(frame_size),
frame_rate(frame_rate),
- pixel_format(pixel_format),
- pixel_storage(VideoPixelStorage::CPU) {}
-
-VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
- float frame_rate,
- VideoPixelFormat pixel_format,
- VideoPixelStorage pixel_storage)
- : frame_size(frame_size),
- frame_rate(frame_rate),
- pixel_format(pixel_format),
- pixel_storage(pixel_storage) {}
+ pixel_format(pixel_format) {}
bool VideoCaptureFormat::IsValid() const {
return (frame_size.width() < media::limits::kMaxDimension) &&
@@ -62,22 +50,8 @@ std::string VideoCaptureFormat::ToString(const VideoCaptureFormat& format) {
// Beware: This string is parsed by manager.js:parseVideoCaptureFormat_,
// take care when changing the formatting.
return base::StringPrintf(
- "(%s)@%.3ffps, pixel format: %s, storage: %s",
- format.frame_size.ToString().c_str(), format.frame_rate,
- VideoPixelFormatToString(format.pixel_format).c_str(),
- PixelStorageToString(format.pixel_storage).c_str());
-}
-
-// static
-std::string VideoCaptureFormat::PixelStorageToString(
- VideoPixelStorage storage) {
- switch (storage) {
- case VideoPixelStorage::CPU:
- return "CPU";
- }
- NOTREACHED() << "Invalid VideoPixelStorage provided: "
- << static_cast<int>(storage);
- return std::string();
+ "(%s)@%.3ffps, pixel format: %s", format.frame_size.ToString().c_str(),
+ format.frame_rate, VideoPixelFormatToString(format.pixel_format).c_str());
}
// static
diff --git a/chromium/media/capture/video_capture_types.h b/chromium/media/capture/video_capture_types.h
index 7ddad1a40b2..bb43a319454 100644
--- a/chromium/media/capture/video_capture_types.h
+++ b/chromium/media/capture/video_capture_types.h
@@ -20,13 +20,6 @@ namespace media {
// shared with device manager.
typedef int VideoCaptureSessionId;
-// Storage type for the pixels.
-// TODO(chfremer): https://crbug.com/788798 Extend or remove this enum.
-enum class VideoPixelStorage {
- CPU,
- MAX = CPU,
-};
-
// Policies for capture devices that have source content that varies in size.
// It is up to the implementation how the captured content will be transformed
// (e.g., scaling and/or letterboxing) in order to produce video frames that
@@ -82,13 +75,8 @@ struct CAPTURE_EXPORT VideoCaptureFormat {
VideoCaptureFormat(const gfx::Size& frame_size,
float frame_rate,
VideoPixelFormat pixel_format);
- VideoCaptureFormat(const gfx::Size& frame_size,
- float frame_rate,
- VideoPixelFormat pixel_format,
- VideoPixelStorage pixel_storage);
static std::string ToString(const VideoCaptureFormat& format);
- static std::string PixelStorageToString(VideoPixelStorage storage);
// Compares the priority of the pixel formats. Returns true if |lhs| is the
// preferred pixel format in comparison with |rhs|. Returns false otherwise.
@@ -111,7 +99,6 @@ struct CAPTURE_EXPORT VideoCaptureFormat {
gfx::Size frame_size;
float frame_rate;
VideoPixelFormat pixel_format;
- VideoPixelStorage pixel_storage;
};
typedef std::vector<VideoCaptureFormat> VideoCaptureFormats;
diff --git a/chromium/media/cast/OWNERS b/chromium/media/cast/OWNERS
index 41c70ee8aee..6234a6a3dc5 100644
--- a/chromium/media/cast/OWNERS
+++ b/chromium/media/cast/OWNERS
@@ -1,4 +1,4 @@
-hubbe@chromium.org
miu@chromium.org
+xjz@chromium.org
# COMPONENT: Internals>Cast>Streaming
diff --git a/chromium/media/cast/cast_sender_impl.cc b/chromium/media/cast/cast_sender_impl.cc
index 5f7d5475310..20411431b3d 100644
--- a/chromium/media/cast/cast_sender_impl.cc
+++ b/chromium/media/cast/cast_sender_impl.cc
@@ -8,7 +8,6 @@
#include "base/callback.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "media/base/video_frame.h"
#include "media/cast/sender/video_frame_factory.h"
diff --git a/chromium/media/cast/net/cast_transport_config.h b/chromium/media/cast/net/cast_transport_config.h
index 81b7c655546..e4cc3135e40 100644
--- a/chromium/media/cast/net/cast_transport_config.h
+++ b/chromium/media/cast/net/cast_transport_config.h
@@ -69,11 +69,10 @@ struct EncodedFrame {
// Convenience accessors to data as an array of uint8_t elements.
const uint8_t* bytes() const {
- return reinterpret_cast<uint8_t*>(
- base::string_as_array(const_cast<std::string*>(&data)));
+ return reinterpret_cast<const uint8_t*>(base::data(data));
}
uint8_t* mutable_bytes() {
- return reinterpret_cast<uint8_t*>(base::string_as_array(&data));
+ return reinterpret_cast<uint8_t*>(base::data(data));
}
// Copies all data members except |data| to |dest|.
diff --git a/chromium/media/cast/net/pacing/mock_paced_packet_sender.h b/chromium/media/cast/net/pacing/mock_paced_packet_sender.h
index 1920ef23aea..0193ce89e16 100644
--- a/chromium/media/cast/net/pacing/mock_paced_packet_sender.h
+++ b/chromium/media/cast/net/pacing/mock_paced_packet_sender.h
@@ -14,7 +14,7 @@ namespace cast {
class MockPacedPacketSender : public PacedPacketSender {
public:
MockPacedPacketSender();
- virtual ~MockPacedPacketSender();
+ ~MockPacedPacketSender() override;
MOCK_METHOD1(SendPackets, bool(const SendPacketVector& packets));
MOCK_METHOD2(ResendPackets, bool(const SendPacketVector& packets,
diff --git a/chromium/media/cast/net/pacing/paced_sender.cc b/chromium/media/cast/net/pacing/paced_sender.cc
index 4129778a097..43bbbb6dc1e 100644
--- a/chromium/media/cast/net/pacing/paced_sender.cc
+++ b/chromium/media/cast/net/pacing/paced_sender.cc
@@ -6,7 +6,6 @@
#include "base/big_endian.h"
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
#include "base/numerics/safe_conversions.h"
namespace media {
diff --git a/chromium/media/cast/net/rtp/mock_rtp_payload_feedback.h b/chromium/media/cast/net/rtp/mock_rtp_payload_feedback.h
index ab329d19388..90c09437ecc 100644
--- a/chromium/media/cast/net/rtp/mock_rtp_payload_feedback.h
+++ b/chromium/media/cast/net/rtp/mock_rtp_payload_feedback.h
@@ -14,7 +14,7 @@ namespace cast {
class MockRtpPayloadFeedback : public RtpPayloadFeedback {
public:
MockRtpPayloadFeedback();
- virtual ~MockRtpPayloadFeedback();
+ ~MockRtpPayloadFeedback() override;
MOCK_METHOD1(CastFeedback, void(const RtcpCastMessage& cast_feedback));
};
diff --git a/chromium/media/cast/net/udp_transport_impl.cc b/chromium/media/cast/net/udp_transport_impl.cc
index 22cae9143e4..52273116da0 100644
--- a/chromium/media/cast/net/udp_transport_impl.cc
+++ b/chromium/media/cast/net/udp_transport_impl.cc
@@ -10,7 +10,6 @@
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "build/build_config.h"
#include "media/cast/net/udp_packet_pipe.h"
#include "net/base/io_buffer.h"
diff --git a/chromium/media/cast/receiver/audio_decoder_unittest.cc b/chromium/media/cast/receiver/audio_decoder_unittest.cc
index 4e92199778a..ced088fd41d 100644
--- a/chromium/media/cast/receiver/audio_decoder_unittest.cc
+++ b/chromium/media/cast/receiver/audio_decoder_unittest.cc
@@ -7,12 +7,12 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
-#include "base/cfi_buildflags.h"
#include "base/macros.h"
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
#include "base/sys_byteorder.h"
#include "base/time/time.h"
+#include "build/build_config.h"
#include "media/cast/cast_config.h"
#include "media/cast/receiver/audio_decoder.h"
#include "media/cast/test/utility/audio_utility.h"
@@ -238,7 +238,7 @@ TEST_P(AudioDecoderTest, RecoversFromDroppedFrames) {
WaitForAllAudioToBeDecoded();
}
-#if !BUILDFLAG(CFI_CAST_CHECK) // https://crbug.com/831999
+#if !defined(OS_ANDROID) // https://crbug.com/831999
INSTANTIATE_TEST_CASE_P(
AudioDecoderTestScenarios,
AudioDecoderTest,
diff --git a/chromium/media/cast/receiver/cast_receiver_impl.cc b/chromium/media/cast/receiver/cast_receiver_impl.cc
index ac2d3f95df0..01e6cb4bc92 100644
--- a/chromium/media/cast/receiver/cast_receiver_impl.cc
+++ b/chromium/media/cast/receiver/cast_receiver_impl.cc
@@ -13,7 +13,6 @@
#include "base/callback.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
-#include "base/message_loop/message_loop.h"
#include "base/trace_event/trace_event.h"
#include "media/cast/net/rtcp/rtcp_utility.h"
#include "media/cast/receiver/audio_decoder.h"
diff --git a/chromium/media/cast/receiver/frame_receiver.cc b/chromium/media/cast/receiver/frame_receiver.cc
index d700538a673..aa6c5832073 100644
--- a/chromium/media/cast/receiver/frame_receiver.cc
+++ b/chromium/media/cast/receiver/frame_receiver.cc
@@ -10,7 +10,6 @@
#include "base/big_endian.h"
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/numerics/safe_conversions.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
diff --git a/chromium/media/cast/sender/audio_encoder.cc b/chromium/media/cast/sender/audio_encoder.cc
index 05e368a0c81..30319124815 100644
--- a/chromium/media/cast/sender/audio_encoder.cc
+++ b/chromium/media/cast/sender/audio_encoder.cc
@@ -292,8 +292,7 @@ class AudioEncoder::OpusImpl : public AudioEncoder::ImplBase {
out->resize(kOpusMaxPayloadSize);
const opus_int32 result = opus_encode_float(
opus_encoder_, buffer_.get(), samples_per_frame_,
- reinterpret_cast<uint8_t*>(base::string_as_array(out)),
- kOpusMaxPayloadSize);
+ reinterpret_cast<uint8_t*>(base::data(*out)), kOpusMaxPayloadSize);
if (result > 1) {
out->resize(result);
return true;
diff --git a/chromium/media/cast/sender/audio_sender.cc b/chromium/media/cast/sender/audio_sender.cc
index bc0a577d759..9dbc1c6a08b 100644
--- a/chromium/media/cast/sender/audio_sender.cc
+++ b/chromium/media/cast/sender/audio_sender.cc
@@ -8,7 +8,6 @@
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "media/cast/common/rtp_time.h"
#include "media/cast/net/cast_transport_config.h"
#include "media/cast/sender/audio_encoder.h"
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index 6f839e7a834..305af37053b 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -12,7 +12,6 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/shared_memory.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
diff --git a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
index cd4bbca5532..41ae937af49 100644
--- a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -5,6 +5,7 @@
#include <stdint.h>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/command_line.h"
#include "base/containers/queue.h"
#include "base/macros.h"
@@ -136,7 +137,7 @@ class EndToEndFrameChecker
base::Bind(&SaveDecoderInitResult, &decoder_init_result),
base::Bind(&EndToEndFrameChecker::CompareFrameWithExpected,
base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(decoder_init_result);
}
diff --git a/chromium/media/cast/sender/video_encoder_impl.cc b/chromium/media/cast/sender/video_encoder_impl.cc
index 41651729690..7993895f93a 100644
--- a/chromium/media/cast/sender/video_encoder_impl.cc
+++ b/chromium/media/cast/sender/video_encoder_impl.cc
@@ -8,7 +8,6 @@
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "media/base/video_frame.h"
#include "media/cast/sender/fake_software_video_encoder.h"
#include "media/cast/sender/vp8_encoder.h"
diff --git a/chromium/media/cdm/BUILD.gn b/chromium/media/cdm/BUILD.gn
index eb542b24082..f3dbef75fb6 100644
--- a/chromium/media/cdm/BUILD.gn
+++ b/chromium/media/cdm/BUILD.gn
@@ -26,10 +26,16 @@ source_set("cdm") {
]
sources = [
+ "aes_cbc_crypto.cc",
+ "aes_cbc_crypto.h",
"aes_decryptor.cc",
"aes_decryptor.h",
+ "cbcs_decryptor.cc",
+ "cbcs_decryptor.h",
"cdm_context_ref_impl.cc",
"cdm_context_ref_impl.h",
+ "cenc_decryptor.cc",
+ "cenc_decryptor.h",
# TODO(crbug.com/676224): Move this to |enable_library_cdms| block below
# when EnabledIf attribute is supported in mojom.
@@ -52,6 +58,7 @@ source_set("cdm") {
"//crypto",
"//media/base",
"//media/formats",
+ "//third_party/boringssl",
"//ui/gfx/geometry",
"//url",
]
@@ -139,15 +146,20 @@ static_library("cdm_paths") {
source_set("unit_tests") {
testonly = true
sources = [
+ "aes_cbc_crypto_unittest.cc",
"aes_decryptor_unittest.cc",
+ "cbcs_decryptor_unittest.cc",
+ "cenc_decryptor_unittest.cc",
"json_web_key_unittest.cc",
]
deps = [
"//base/test:test_support",
+ "//crypto",
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
+ "//third_party/boringssl",
"//ui/gfx:test_support",
"//url",
]
@@ -160,7 +172,6 @@ source_set("unit_tests") {
data_deps = []
- # If ExternalClearKey is built, we can test CdmAdapter.
if (enable_library_cdms) {
sources += [
"cdm_adapter_unittest.cc",
@@ -179,6 +190,8 @@ source_set("unit_tests") {
deps += [
":cdm_api",
":cdm_paths",
+ "//media/cdm/library_cdm:cdm_host_proxy",
+ "//media/cdm/library_cdm:test_support",
]
}
diff --git a/chromium/media/cdm/DEPS b/chromium/media/cdm/DEPS
index b4addf5d8a8..c8585a64633 100644
--- a/chromium/media/cdm/DEPS
+++ b/chromium/media/cdm/DEPS
@@ -1,4 +1,5 @@
include_rules = [
"+components/crash/core/common/crash_key.h",
"+crypto",
+ "+third_party/boringssl/src/include",
]
diff --git a/chromium/media/cdm/aes_cbc_crypto.cc b/chromium/media/cdm/aes_cbc_crypto.cc
new file mode 100644
index 00000000000..b16f2c17cbe
--- /dev/null
+++ b/chromium/media/cdm/aes_cbc_crypto.cc
@@ -0,0 +1,99 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/aes_cbc_crypto.h"
+
+#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "crypto/openssl_util.h"
+#include "crypto/symmetric_key.h"
+#include "third_party/boringssl/src/include/openssl/aes.h"
+#include "third_party/boringssl/src/include/openssl/crypto.h"
+#include "third_party/boringssl/src/include/openssl/err.h"
+#include "third_party/boringssl/src/include/openssl/evp.h"
+
+// Notes on using OpenSSL:
+// https://www.openssl.org/docs/man1.1.0/crypto/EVP_DecryptUpdate.html
+// The documentation for EVP_DecryptUpdate() only states
+// "EVP_DecryptInit_ex(), EVP_DecryptUpdate() and EVP_DecryptFinal_ex()
+// are the corresponding decryption operations. EVP_DecryptFinal() will
+// return an error code if padding is enabled and the final block is not
+// correctly formatted. The parameters and restrictions are identical
+// to the encryption operations except that if padding is enabled ..."
+// As this implementation does not use padding, the last part should not be
+// an issue. However, there is no mention whether data can be decrypted
+// block-by-block or if all the data must be unencrypted at once.
+//
+// The documentation for EVP_EncryptUpdate() (same page as above) states
+// "EVP_EncryptUpdate() encrypts inl bytes from the buffer in and writes
+// the encrypted version to out. This function can be called multiple times
+// to encrypt successive blocks of data."
+// Given that the EVP_Decrypt* methods have the same restrictions, the code
+// below assumes that EVP_DecryptUpdate() can be called on a block-by-block
+// basis. A test in aes_cbc_crypto_unittest.cc verifies this.
+
+namespace media {
+
+AesCbcCrypto::AesCbcCrypto() {
+ // Ensure the crypto library is initialized. CRYPTO_library_init may be
+ // safely called concurrently.
+ CRYPTO_library_init();
+ EVP_CIPHER_CTX_init(&ctx_);
+}
+
+AesCbcCrypto::~AesCbcCrypto() {
+ EVP_CIPHER_CTX_cleanup(&ctx_);
+}
+
+bool AesCbcCrypto::Initialize(const crypto::SymmetricKey& key,
+ base::span<const uint8_t> iv) {
+ crypto::OpenSSLErrStackTracer err_tracer(FROM_HERE);
+
+ // This uses AES-CBC-128, so the key must be 128 bits.
+ const EVP_CIPHER* cipher = EVP_aes_128_cbc();
+ const uint8_t* key_data = reinterpret_cast<const uint8_t*>(key.key().data());
+ if (key.key().length() != EVP_CIPHER_key_length(cipher)) {
+ DVLOG(1) << "Key length is incorrect.";
+ return false;
+ }
+
+ // |iv| must also be 128 bits.
+ if (iv.size_bytes() != EVP_CIPHER_iv_length(cipher)) {
+ DVLOG(1) << "IV length is incorrect.";
+ return false;
+ }
+
+ if (!EVP_DecryptInit_ex(&ctx_, cipher, nullptr, key_data, iv.data())) {
+ DVLOG(1) << "EVP_DecryptInit_ex() failed.";
+ return false;
+ }
+
+ if (!EVP_CIPHER_CTX_set_padding(&ctx_, 0)) {
+ DVLOG(1) << "EVP_CIPHER_CTX_set_padding() failed.";
+ return false;
+ }
+
+ return true;
+}
+
+bool AesCbcCrypto::Decrypt(base::span<const uint8_t> encrypted_data,
+ uint8_t* decrypted_data) {
+ crypto::OpenSSLErrStackTracer err_tracer(FROM_HERE);
+
+ if (encrypted_data.size_bytes() % EVP_CIPHER_CTX_block_size(&ctx_) != 0) {
+ DVLOG(1) << "Encrypted bytes not a multiple of block size.";
+ return false;
+ }
+
+ int out_length;
+ if (!EVP_DecryptUpdate(&ctx_, decrypted_data, &out_length,
+ encrypted_data.data(), encrypted_data.size_bytes())) {
+ DVLOG(1) << "EVP_DecryptUpdate() failed.";
+ return false;
+ }
+
+ return encrypted_data.size_bytes() == base::checked_cast<size_t>(out_length);
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/aes_cbc_crypto.h b/chromium/media/cdm/aes_cbc_crypto.h
new file mode 100644
index 00000000000..b6e0ee5965f
--- /dev/null
+++ b/chromium/media/cdm/aes_cbc_crypto.h
@@ -0,0 +1,53 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CDM_AES_CBC_CRYPTO_H_
+#define MEDIA_CDM_AES_CBC_CRYPTO_H_
+
+#include <stdint.h>
+
+#include <string>
+
+#include "base/containers/span.h"
+#include "base/macros.h"
+#include "media/base/media_export.h"
+#include "third_party/boringssl/src/include/openssl/evp.h"
+
+namespace crypto {
+class SymmetricKey;
+}
+
+namespace media {
+
+// This class implements AES-CBC-128 decryption as described in the Advanced
+// Encryption Standard specified by AES [FIPS-197, https://www.nist.gov]
+// using 128-bit keys in Cipher Block Chaining mode, as specified in Block
+// Cipher Modes [NIST 800-38A, https://www.nist.gov].
+
+class MEDIA_EXPORT AesCbcCrypto {
+ public:
+ AesCbcCrypto();
+ ~AesCbcCrypto();
+
+ // Initializes the encryptor using |key| and |iv|. Returns false if either
+ // the key or the initialization vector cannot be used.
+ bool Initialize(const crypto::SymmetricKey& key,
+ base::span<const uint8_t> iv);
+
+ // Decrypts |encrypted_data| into |decrypted_data|. |encrypted_data| must be
+ // a multiple of the blocksize (128 bits), and |decrypted_data| must have
+ // enough space for |encrypted_data|.size(). Returns false if the decryption
+ // fails.
+ bool Decrypt(base::span<const uint8_t> encrypted_data,
+ uint8_t* decrypted_data);
+
+ private:
+ EVP_CIPHER_CTX ctx_;
+
+ DISALLOW_COPY_AND_ASSIGN(AesCbcCrypto);
+};
+
+} // namespace media
+
+#endif // MEDIA_CDM_AES_CBC_CRYPTO_H_
diff --git a/chromium/media/cdm/aes_cbc_crypto_unittest.cc b/chromium/media/cdm/aes_cbc_crypto_unittest.cc
new file mode 100644
index 00000000000..4b996017215
--- /dev/null
+++ b/chromium/media/cdm/aes_cbc_crypto_unittest.cc
@@ -0,0 +1,208 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/aes_cbc_crypto.h"
+
+#include <memory>
+
+#include "base/containers/span.h"
+#include "base/optional.h"
+#include "base/stl_util.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/boringssl/src/include/openssl/aes.h"
+#include "third_party/boringssl/src/include/openssl/crypto.h"
+#include "third_party/boringssl/src/include/openssl/err.h"
+#include "third_party/boringssl/src/include/openssl/evp.h"
+
+namespace media {
+
+namespace {
+
+// Pattern decryption uses 16-byte blocks.
+constexpr size_t kBlockSize = 16;
+
+// Keys and IV have to be 128 bits.
+const uint8_t kKey1[] = {0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+ 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13};
+static_assert(base::size(kKey1) == 128 / 8, "kKey1 must be 128 bits");
+
+const uint8_t kKey2[] = {0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
+ 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b};
+static_assert(base::size(kKey2) == 128 / 8, "kKey2 must be 128 bits");
+
+const uint8_t kIv[] = {0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
+static_assert(base::size(kIv) == 128 / 8, "kIv must be 128 bits");
+
+const uint8_t kOneBlock[] = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
+ 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'};
+static_assert(base::size(kOneBlock) == kBlockSize, "kOneBlock not block sized");
+
+std::string MakeString(const std::vector<uint8_t>& chars) {
+ return std::string(chars.begin(), chars.end());
+}
+
+// Returns a std::vector<uint8_t> containing |count| copies of |input|.
+std::vector<uint8_t> Repeat(const std::vector<uint8_t>& input, size_t count) {
+ std::vector<uint8_t> result;
+ for (size_t i = 0; i < count; ++i)
+ result.insert(result.end(), input.begin(), input.end());
+ return result;
+}
+
+} // namespace
+
+class AesCbcCryptoTest : public testing::Test {
+ public:
+ AesCbcCryptoTest()
+ : key1_(crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(std::begin(kKey1), std::end(kKey1)))),
+ key2_(crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(std::begin(kKey2), std::end(kKey2)))),
+ iv_(std::begin(kIv), std::end(kIv)),
+ one_block_(std::begin(kOneBlock), std::end(kOneBlock)) {}
+
+ // Encrypt |original| using AES-CBC encryption with |key| and |iv|.
+ std::vector<uint8_t> Encrypt(const std::vector<uint8_t>& original,
+ const crypto::SymmetricKey& key,
+ base::span<const uint8_t> iv) {
+ // This code uses crypto::Encryptor to encrypt |original| rather than
+ // calling EVP_EncryptInit_ex() / EVP_EncryptUpdate() / etc. This is done
+ // for simplicity, as the crypto:: code wraps all the calls up nicely.
+ // However, for AES-CBC encryption, the crypto:: code does add padding to
+ // the output, which is simply stripped off.
+ crypto::Encryptor encryptor;
+ std::string iv_as_string(std::begin(iv), std::end(iv));
+ EXPECT_TRUE(encryptor.Init(&key, crypto::Encryptor::CBC, iv_as_string));
+
+ std::string ciphertext;
+ EXPECT_TRUE(encryptor.Encrypt(MakeString(original), &ciphertext));
+
+ // CBC encyption adds a block of padding at the end, so discard it.
+ EXPECT_GT(ciphertext.size(), original.size());
+ ciphertext.resize(original.size());
+
+ return std::vector<uint8_t>(ciphertext.begin(), ciphertext.end());
+ }
+
+ // Constants for testing.
+ std::unique_ptr<crypto::SymmetricKey> key1_;
+ std::unique_ptr<crypto::SymmetricKey> key2_;
+ base::span<const uint8_t> iv_;
+ const std::vector<uint8_t> one_block_;
+};
+
+TEST_F(AesCbcCryptoTest, OneBlock) {
+ auto encrypted_block = Encrypt(one_block_, *key1_, iv_);
+ EXPECT_EQ(kBlockSize, encrypted_block.size());
+
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key1_, iv_));
+
+ std::vector<uint8_t> output(encrypted_block.size());
+ EXPECT_TRUE(crypto.Decrypt(encrypted_block, output.data()));
+ EXPECT_EQ(output, one_block_);
+}
+
+TEST_F(AesCbcCryptoTest, WrongKey) {
+ auto encrypted_block = Encrypt(one_block_, *key1_, iv_);
+ EXPECT_EQ(kBlockSize, encrypted_block.size());
+
+ // Use |key2_| when trying to decrypt.
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key2_, iv_));
+
+ std::vector<uint8_t> output(encrypted_block.size());
+ EXPECT_TRUE(crypto.Decrypt(encrypted_block, output.data()));
+ EXPECT_NE(output, one_block_);
+}
+
+TEST_F(AesCbcCryptoTest, WrongIV) {
+ auto encrypted_block = Encrypt(one_block_, *key1_, iv_);
+ EXPECT_EQ(kBlockSize, encrypted_block.size());
+
+ // Use a different IV when trying to decrypt.
+ AesCbcCrypto crypto;
+ std::vector<uint8_t> alternate_iv(iv_.size(), 'a');
+ EXPECT_TRUE(crypto.Initialize(*key1_, alternate_iv));
+
+ std::vector<uint8_t> output(encrypted_block.size());
+ EXPECT_TRUE(crypto.Decrypt(encrypted_block, output.data()));
+ EXPECT_NE(output, one_block_);
+}
+
+TEST_F(AesCbcCryptoTest, PartialBlock) {
+ auto encrypted_block = Encrypt(one_block_, *key1_, iv_);
+ EXPECT_EQ(kBlockSize, encrypted_block.size());
+
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key2_, iv_));
+
+ // Try to decrypt less than a full block.
+ std::vector<uint8_t> output(encrypted_block.size());
+ EXPECT_FALSE(crypto.Decrypt(
+ base::make_span(encrypted_block).first(encrypted_block.size() - 5),
+ output.data()));
+}
+
+TEST_F(AesCbcCryptoTest, MultipleBlocks) {
+ // Encrypt 10 copies of |one_block_| together.
+ constexpr size_t kNumBlocksInData = 10;
+ auto encrypted_block =
+ Encrypt(Repeat(one_block_, kNumBlocksInData), *key2_, iv_);
+ ASSERT_EQ(kNumBlocksInData * kBlockSize, encrypted_block.size());
+
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key2_, iv_));
+
+ std::vector<uint8_t> output(encrypted_block.size());
+ EXPECT_TRUE(crypto.Decrypt(encrypted_block, output.data()));
+ EXPECT_EQ(output, Repeat(one_block_, kNumBlocksInData));
+}
+
+// As the code in aes_cbc_crypto.cc relies on decrypting the data block by
+// block, ensure that the crypto routines work the same way whether it
+// decrypts one block at a time or all the blocks in one call.
+TEST_F(AesCbcCryptoTest, BlockDecryptionWorks) {
+ constexpr size_t kNumBlocksInData = 5;
+ std::vector<uint8_t> data = {1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2,
+ 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6,
+ 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8,
+ 9, 9, 9, 9, 9, 9, 9, 9, 0, 0, 0, 0, 0, 0, 0, 0};
+ ASSERT_EQ(data.size(), kNumBlocksInData * kBlockSize);
+ auto encrypted_data = Encrypt(data, *key1_, iv_);
+
+ // Decrypt |encrypted_data| in one pass.
+ {
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key1_, iv_));
+
+ std::vector<uint8_t> output(kNumBlocksInData * kBlockSize);
+ EXPECT_TRUE(crypto.Decrypt(encrypted_data, output.data()));
+ EXPECT_EQ(output, data);
+ }
+
+ // Repeat but call Decrypt() once for each block.
+ {
+ AesCbcCrypto crypto;
+ EXPECT_TRUE(crypto.Initialize(*key1_, iv_));
+
+ std::vector<uint8_t> output(kNumBlocksInData * kBlockSize);
+ auto input = base::make_span(encrypted_data);
+ for (size_t offset = 0; offset < output.size(); offset += kBlockSize) {
+ EXPECT_TRUE(
+ crypto.Decrypt(input.subspan(offset, kBlockSize), &output[offset]));
+ }
+ EXPECT_EQ(output, data);
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/aes_decryptor.cc b/chromium/media/cdm/aes_decryptor.cc
index 9b250f7ae50..b1a95211ef6 100644
--- a/chromium/media/cdm/aes_decryptor.cc
+++ b/chromium/media/cdm/aes_decryptor.cc
@@ -10,23 +10,23 @@
#include <utility>
#include <vector>
-#include "base/bind.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/strings/string_number_conversions.h"
#include "base/time/time.h"
-#include "crypto/encryptor.h"
#include "crypto/symmetric_key.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/callback_registry.h"
#include "media/base/cdm_promise.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "media/base/limits.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
+#include "media/cdm/cbcs_decryptor.h"
+#include "media/cdm/cenc_decryptor.h"
#include "media/cdm/cenc_utils.h"
#include "media/cdm/json_web_key.h"
-#include "media/media_buildflags.h"
namespace media {
@@ -147,123 +147,22 @@ void AesDecryptor::SessionIdDecryptionKeyMap::Erase(
key_list_.erase(position);
}
-enum ClearBytesBufferSel {
- kSrcContainsClearBytes,
- kDstContainsClearBytes
-};
-
-static void CopySubsamples(const std::vector<SubsampleEntry>& subsamples,
- const ClearBytesBufferSel sel,
- const uint8_t* src,
- uint8_t* dst) {
- for (size_t i = 0; i < subsamples.size(); i++) {
- const SubsampleEntry& subsample = subsamples[i];
- if (sel == kSrcContainsClearBytes) {
- src += subsample.clear_bytes;
- } else {
- dst += subsample.clear_bytes;
- }
- memcpy(dst, src, subsample.cypher_bytes);
- src += subsample.cypher_bytes;
- dst += subsample.cypher_bytes;
- }
-}
-
// Decrypts |input| using |key|. Returns a DecoderBuffer with the decrypted
// data if decryption succeeded or NULL if decryption failed.
static scoped_refptr<DecoderBuffer> DecryptData(
const DecoderBuffer& input,
- const crypto::SymmetricKey* key) {
+ const crypto::SymmetricKey& key) {
CHECK(input.data_size());
CHECK(input.decrypt_config());
- CHECK(key);
-
- crypto::Encryptor encryptor;
- if (!encryptor.Init(key, crypto::Encryptor::CTR, "")) {
- DVLOG(1) << "Could not initialize decryptor.";
- return NULL;
- }
-
- DCHECK_EQ(input.decrypt_config()->iv().size(),
- static_cast<size_t>(DecryptConfig::kDecryptionKeySize));
- if (!encryptor.SetCounter(input.decrypt_config()->iv())) {
- DVLOG(1) << "Could not set counter block.";
- return NULL;
- }
-
- const char* sample = reinterpret_cast<const char*>(input.data());
- size_t sample_size = static_cast<size_t>(input.data_size());
-
- DCHECK_GT(sample_size, 0U) << "No sample data to be decrypted.";
- if (sample_size == 0)
- return NULL;
-
- if (input.decrypt_config()->subsamples().empty()) {
- std::string decrypted_text;
- base::StringPiece encrypted_text(sample, sample_size);
- if (!encryptor.Decrypt(encrypted_text, &decrypted_text)) {
- DVLOG(1) << "Could not decrypt data.";
- return NULL;
- }
-
- // TODO(xhwang): Find a way to avoid this data copy.
- return DecoderBuffer::CopyFrom(
- reinterpret_cast<const uint8_t*>(decrypted_text.data()),
- decrypted_text.size());
- }
-
- const std::vector<SubsampleEntry>& subsamples =
- input.decrypt_config()->subsamples();
-
- size_t total_clear_size = 0;
- size_t total_encrypted_size = 0;
- for (size_t i = 0; i < subsamples.size(); i++) {
- total_clear_size += subsamples[i].clear_bytes;
- total_encrypted_size += subsamples[i].cypher_bytes;
- // Check for overflow. This check is valid because *_size is unsigned.
- DCHECK(total_clear_size >= subsamples[i].clear_bytes);
- if (total_encrypted_size < subsamples[i].cypher_bytes)
- return NULL;
- }
- size_t total_size = total_clear_size + total_encrypted_size;
- if (total_size < total_clear_size || total_size != sample_size) {
- DVLOG(1) << "Subsample sizes do not equal input size";
- return NULL;
- }
- // No need to decrypt if there is no encrypted data.
- if (total_encrypted_size <= 0) {
- return DecoderBuffer::CopyFrom(reinterpret_cast<const uint8_t*>(sample),
- sample_size);
- }
+ if (input.decrypt_config()->encryption_mode() == EncryptionMode::kCenc)
+ return DecryptCencBuffer(input, key);
- // The encrypted portions of all subsamples must form a contiguous block,
- // such that an encrypted subsample that ends away from a block boundary is
- // immediately followed by the start of the next encrypted subsample. We
- // copy all encrypted subsamples to a contiguous buffer, decrypt them, then
- // copy the decrypted bytes over the encrypted bytes in the output.
- // TODO(strobe): attempt to reduce number of memory copies
- std::unique_ptr<uint8_t[]> encrypted_bytes(new uint8_t[total_encrypted_size]);
- CopySubsamples(subsamples, kSrcContainsClearBytes,
- reinterpret_cast<const uint8_t*>(sample),
- encrypted_bytes.get());
-
- base::StringPiece encrypted_text(
- reinterpret_cast<const char*>(encrypted_bytes.get()),
- total_encrypted_size);
- std::string decrypted_text;
- if (!encryptor.Decrypt(encrypted_text, &decrypted_text)) {
- DVLOG(1) << "Could not decrypt data.";
- return NULL;
- }
- DCHECK_EQ(decrypted_text.size(), encrypted_text.size());
+ if (input.decrypt_config()->encryption_mode() == EncryptionMode::kCbcs)
+ return DecryptCbcsBuffer(input, key);
- scoped_refptr<DecoderBuffer> output = DecoderBuffer::CopyFrom(
- reinterpret_cast<const uint8_t*>(sample), sample_size);
- CopySubsamples(subsamples, kDstContainsClearBytes,
- reinterpret_cast<const uint8_t*>(decrypted_text.data()),
- output->writable_data());
- return output;
+ DVLOG(1) << "Only 'cenc' and 'cbcs' modes supported.";
+ return nullptr;
}
AesDecryptor::AesDecryptor(
@@ -554,6 +453,12 @@ CdmContext* AesDecryptor::GetCdmContext() {
return this;
}
+std::unique_ptr<CallbackRegistration> AesDecryptor::RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) {
+ NOTIMPLEMENTED();
+ return nullptr;
+}
+
Decryptor* AesDecryptor::GetDecryptor() {
return this;
}
@@ -581,32 +486,32 @@ void AesDecryptor::RegisterNewKeyCB(StreamType stream_type,
void AesDecryptor::Decrypt(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
const DecryptCB& decrypt_cb) {
- CHECK(encrypted->decrypt_config());
-
- scoped_refptr<DecoderBuffer> decrypted;
- if (!encrypted->decrypt_config()->is_encrypted()) {
- decrypted = DecoderBuffer::CopyFrom(encrypted->data(),
- encrypted->data_size());
- } else {
- const std::string& key_id = encrypted->decrypt_config()->key_id();
- base::AutoLock auto_lock(key_map_lock_);
- DecryptionKey* key = GetKey_Locked(key_id);
- if (!key) {
- DVLOG(1) << "Could not find a matching key for the given key ID.";
- decrypt_cb.Run(kNoKey, NULL);
- return;
- }
+ if (!encrypted->decrypt_config()) {
+ // If there is no DecryptConfig, then the data is unencrypted so return it
+ // immediately.
+ decrypt_cb.Run(kSuccess, encrypted);
+ return;
+ }
- decrypted = DecryptData(*encrypted.get(), key->decryption_key());
- if (!decrypted) {
- DVLOG(1) << "Decryption failed.";
- decrypt_cb.Run(kError, NULL);
- return;
- }
+ const std::string& key_id = encrypted->decrypt_config()->key_id();
+ base::AutoLock auto_lock(key_map_lock_);
+ DecryptionKey* key = GetKey_Locked(key_id);
+ if (!key) {
+ DVLOG(1) << "Could not find a matching key for the given key ID.";
+ decrypt_cb.Run(kNoKey, nullptr);
+ return;
}
- decrypted->set_timestamp(encrypted->timestamp());
- decrypted->set_duration(encrypted->duration());
+ scoped_refptr<DecoderBuffer> decrypted =
+ DecryptData(*encrypted.get(), *key->decryption_key());
+ if (!decrypted) {
+ DVLOG(1) << "Decryption failed.";
+ decrypt_cb.Run(kError, nullptr);
+ return;
+ }
+
+ DCHECK_EQ(decrypted->timestamp(), encrypted->timestamp());
+ DCHECK_EQ(decrypted->duration(), encrypted->duration());
decrypt_cb.Run(kSuccess, std::move(decrypted));
}
@@ -757,8 +662,7 @@ CdmKeysInfo AesDecryptor::GenerateKeysInfoList(
}
AesDecryptor::DecryptionKey::DecryptionKey(const std::string& secret)
- : secret_(secret) {
-}
+ : secret_(secret) {}
AesDecryptor::DecryptionKey::~DecryptionKey() = default;
diff --git a/chromium/media/cdm/aes_decryptor.h b/chromium/media/cdm/aes_decryptor.h
index f9d984955cc..943e76d87cc 100644
--- a/chromium/media/cdm/aes_decryptor.h
+++ b/chromium/media/cdm/aes_decryptor.h
@@ -61,6 +61,8 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
CdmContext* GetCdmContext() override;
// CdmContext implementation.
+ std::unique_ptr<CallbackRegistration> RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) override;
Decryptor* GetDecryptor() override;
int GetCdmId() const override;
@@ -180,7 +182,7 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
// Since only Decrypt() is called off the renderer thread, we only need to
// protect |key_map_|, the only member variable that is shared between
// Decrypt() and other methods.
- KeyIdToSessionKeysMap key_map_; // Protected by |key_map_lock_|.
+ KeyIdToSessionKeysMap key_map_; // Protected by |key_map_lock_|.
mutable base::Lock key_map_lock_; // Protects the |key_map_|.
// Keeps track of current open sessions and their type. Although publicly
diff --git a/chromium/media/cdm/aes_decryptor_unittest.cc b/chromium/media/cdm/aes_decryptor_unittest.cc
index 802190b47a2..cb06e066713 100644
--- a/chromium/media/cdm/aes_decryptor_unittest.cc
+++ b/chromium/media/cdm/aes_decryptor_unittest.cc
@@ -51,7 +51,9 @@ using ::testing::StrictMock;
using ::testing::StrNe;
using ::testing::Unused;
-MATCHER(IsEmpty, "") { return arg.empty(); }
+MATCHER(IsEmpty, "") {
+ return arg.empty();
+}
MATCHER(NotEmpty, "") {
return !arg.empty();
}
@@ -178,35 +180,21 @@ const uint8_t kEncryptedData2[] = {
// all entries must be equal to kOriginalDataSize to make the subsample entries
// valid.
-const SubsampleEntry kSubsampleEntriesNormal[] = {
- { 2, 7 },
- { 3, 11 },
- { 1, 0 }
-};
+const SubsampleEntry kSubsampleEntriesNormal[] = {{2, 7}, {3, 11}, {1, 0}};
const SubsampleEntry kSubsampleEntriesWrongSize[] = {
- { 3, 6 }, // This entry doesn't match the correct entry.
- { 3, 11 },
- { 1, 0 }
-};
+ {3, 6}, // This entry doesn't match the correct entry.
+ {3, 11},
+ {1, 0}};
const SubsampleEntry kSubsampleEntriesInvalidTotalSize[] = {
- { 1, 1000 }, // This entry is too large.
- { 3, 11 },
- { 1, 0 }
-};
+ {1, 1000}, // This entry is too large.
+ {3, 11},
+ {1, 0}};
-const SubsampleEntry kSubsampleEntriesClearOnly[] = {
- { 7, 0 },
- { 8, 0 },
- { 9, 0 }
-};
+const SubsampleEntry kSubsampleEntriesClearOnly[] = {{7, 0}, {8, 0}, {9, 0}};
-const SubsampleEntry kSubsampleEntriesCypherOnly[] = {
- { 0, 6 },
- { 0, 8 },
- { 0, 10 }
-};
+const SubsampleEntry kSubsampleEntriesCypherOnly[] = {{0, 6}, {0, 8}, {0, 10}};
scoped_refptr<DecoderBuffer> CreateEncryptedBuffer(
const std::vector<uint8_t>& data,
@@ -214,16 +202,21 @@ scoped_refptr<DecoderBuffer> CreateEncryptedBuffer(
const std::vector<uint8_t>& iv,
const std::vector<SubsampleEntry>& subsample_entries) {
DCHECK(!data.empty());
+ DCHECK(!iv.empty());
+ scoped_refptr<DecoderBuffer> encrypted_buffer(new DecoderBuffer(data.size()));
+ memcpy(encrypted_buffer->writable_data(), data.data(), data.size());
+ std::string key_id_string(key_id.begin(), key_id.end());
+ std::string iv_string(iv.begin(), iv.end());
+ encrypted_buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
+ key_id_string, iv_string, subsample_entries));
+ return encrypted_buffer;
+}
+
+scoped_refptr<DecoderBuffer> CreateClearBuffer(
+ const std::vector<uint8_t>& data) {
+ DCHECK(!data.empty());
scoped_refptr<DecoderBuffer> encrypted_buffer(new DecoderBuffer(data.size()));
- memcpy(encrypted_buffer->writable_data(), &data[0], data.size());
- CHECK(encrypted_buffer.get());
- std::string key_id_string(
- reinterpret_cast<const char*>(key_id.empty() ? NULL : &key_id[0]),
- key_id.size());
- std::string iv_string(
- reinterpret_cast<const char*>(iv.empty() ? NULL : &iv[0]), iv.size());
- encrypted_buffer->set_decrypt_config(std::unique_ptr<DecryptConfig>(
- new DecryptConfig(key_id_string, iv_string, subsample_entries)));
+ memcpy(encrypted_buffer->writable_data(), data.data(), data.size());
return encrypted_buffer;
}
@@ -283,12 +276,15 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
CdmModule::GetInstance()->Initialize(helper_->LibraryPath());
#endif // BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
+ CdmAdapter::CreateCdmFunc create_cdm_func =
+ CdmModule::GetInstance()->GetCreateCdmFunc();
+
std::unique_ptr<CdmAllocator> allocator(new SimpleCdmAllocator());
std::unique_ptr<CdmAuxiliaryHelper> cdm_helper(
new MockCdmAuxiliaryHelper(std::move(allocator)));
CdmAdapter::Create(
helper_->KeySystemName(), url::Origin::Create(GURL("http://foo.com")),
- cdm_config, std::move(cdm_helper),
+ cdm_config, create_cdm_func, std::move(cdm_helper),
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionClosed,
@@ -473,8 +469,8 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
std::vector<uint8_t> decrypted_text;
if (decrypted.get() && decrypted->data_size()) {
- decrypted_text.assign(
- decrypted->data(), decrypted->data() + decrypted->data_size());
+ decrypted_text.assign(decrypted->data(),
+ decrypted->data() + decrypted->data_size());
}
switch (result) {
@@ -620,9 +616,9 @@ TEST_P(AesDecryptorTest, NormalDecryption) {
TEST_P(AesDecryptorTest, UnencryptedFrame) {
// An empty iv string signals that the frame is unencrypted.
- scoped_refptr<DecoderBuffer> encrypted_buffer = CreateEncryptedBuffer(
- original_data_, key_id_, std::vector<uint8_t>(), no_subsample_entries_);
- DecryptAndExpect(encrypted_buffer, original_data_, SUCCESS);
+ scoped_refptr<DecoderBuffer> unencrypted_buffer =
+ CreateClearBuffer(original_data_);
+ DecryptAndExpect(unencrypted_buffer, original_data_, SUCCESS);
}
TEST_P(AesDecryptorTest, WrongKey) {
@@ -646,8 +642,8 @@ TEST_P(AesDecryptorTest, KeyReplacement) {
encrypted_data_, key_id_, iv_, no_subsample_entries_);
UpdateSessionAndExpect(session_id, kWrongKeyAsJWK, RESOLVED, true);
- ASSERT_NO_FATAL_FAILURE(DecryptAndExpect(
- encrypted_buffer, original_data_, DATA_MISMATCH));
+ ASSERT_NO_FATAL_FAILURE(
+ DecryptAndExpect(encrypted_buffer, original_data_, DATA_MISMATCH));
UpdateSessionAndExpect(session_id, kKeyAsJWK, RESOLVED, false);
ASSERT_NO_FATAL_FAILURE(
@@ -707,17 +703,17 @@ TEST_P(AesDecryptorTest, CorruptedData) {
std::vector<uint8_t> bad_data = encrypted_data_;
bad_data[1]++;
- scoped_refptr<DecoderBuffer> encrypted_buffer = CreateEncryptedBuffer(
- bad_data, key_id_, iv_, no_subsample_entries_);
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ CreateEncryptedBuffer(bad_data, key_id_, iv_, no_subsample_entries_);
DecryptAndExpect(encrypted_buffer, original_data_, DATA_MISMATCH);
}
TEST_P(AesDecryptorTest, EncryptedAsUnencryptedFailure) {
std::string session_id = CreateSession(key_id_);
UpdateSessionAndExpect(session_id, kKeyAsJWK, RESOLVED, true);
- scoped_refptr<DecoderBuffer> encrypted_buffer = CreateEncryptedBuffer(
- encrypted_data_, key_id_, std::vector<uint8_t>(), no_subsample_entries_);
- DecryptAndExpect(encrypted_buffer, original_data_, DATA_MISMATCH);
+ scoped_refptr<DecoderBuffer> unencrypted_buffer =
+ CreateClearBuffer(encrypted_data_);
+ DecryptAndExpect(unencrypted_buffer, original_data_, DATA_MISMATCH);
}
TEST_P(AesDecryptorTest, SubsampleDecryption) {
@@ -761,9 +757,9 @@ TEST_P(AesDecryptorTest, SubsampleInvalidTotalSize) {
kSubsampleEntriesInvalidTotalSize +
arraysize(kSubsampleEntriesInvalidTotalSize));
- scoped_refptr<DecoderBuffer> encrypted_buffer = CreateEncryptedBuffer(
- subsample_encrypted_data_, key_id_, iv_,
- subsample_entries_invalid_total_size);
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ CreateEncryptedBuffer(subsample_encrypted_data_, key_id_, iv_,
+ subsample_entries_invalid_total_size);
DecryptAndExpect(encrypted_buffer, original_data_, DECRYPT_ERROR);
}
diff --git a/chromium/media/cdm/api/README b/chromium/media/cdm/api/README
deleted file mode 100644
index 3e7dcf2ffcc..00000000000
--- a/chromium/media/cdm/api/README
+++ /dev/null
@@ -1,3 +0,0 @@
-This directory contains files that define the interface between an
-Encrypted Media Extensions (EME) Content Decryption Module (CDM) and Chromium
-or other user agents. It is used to build both user agents and CDMs.
diff --git a/chromium/media/cdm/api/README.md b/chromium/media/cdm/api/README.md
new file mode 100644
index 00000000000..226e7cf226a
--- /dev/null
+++ b/chromium/media/cdm/api/README.md
@@ -0,0 +1,24 @@
+# Library CDM Interface
+
+This directory contains files that define the shared library interface between
+an Encrypted Media Extensions (EME) Content Decryption Module (CDM) and Chromium
+or other user agents. It is used to build both user agents and CDMs.
+
+This is also referred to as the "CDM interface" in the context of library CDM
+and in this doc.
+
+TODO(xhwang): Add more sections describing the CDM interface.
+
+## Experimental and Stable CDM interface
+
+A new CDM interface that's still under development is subject to change. This
+is called an "experimental CDM interface". To avoid compatibility issues, a user
+agent should not support an experimental CDM interface by default (it's okay to
+support it behind a flag). Similarly, a CDM vendor should not ship a CDM using
+an experimental CDM interface to end users.
+
+The experimental status of a CDM interface ends when the development is complete
+and the CDM interface is marked as stable.
+
+On newer CDM interfaces, a static boolean member kIsStable is present to
+indicate whether the CDM interface is stable or experimental.
diff --git a/chromium/media/cdm/api/content_decryption_module.h b/chromium/media/cdm/api/content_decryption_module.h
index 9d4073ee016..0dde3ff4c91 100644
--- a/chromium/media/cdm/api/content_decryption_module.h
+++ b/chromium/media/cdm/api/content_decryption_module.h
@@ -46,6 +46,7 @@ typedef __int64 int64_t;
#type " size mismatch")
extern "C" {
+
CDM_API void INITIALIZE_CDM_MODULE();
CDM_API void DeinitializeCdmModule();
@@ -63,23 +64,17 @@ typedef void* (*GetCdmHostFunc)(int host_interface_version, void* user_data);
// |cdm_interface_version|.
// Caller retains ownership of arguments and must call Destroy() on the returned
// object.
-CDM_API void* CreateCdmInstance(
- int cdm_interface_version,
- const char* key_system, uint32_t key_system_size,
- GetCdmHostFunc get_cdm_host_func, void* user_data);
+CDM_API void* CreateCdmInstance(int cdm_interface_version,
+ const char* key_system,
+ uint32_t key_system_size,
+ GetCdmHostFunc get_cdm_host_func,
+ void* user_data);
CDM_API const char* GetCdmVersion();
-}
-
-namespace cdm {
-class CDM_CLASS_API AudioFrames;
-class CDM_CLASS_API DecryptedBlock;
-class CDM_CLASS_API VideoFrame;
+} // extern "C"
-class CDM_CLASS_API Host_8;
-class CDM_CLASS_API Host_9;
-class CDM_CLASS_API Host_10;
+namespace cdm {
enum Status : uint32_t {
kSuccess = 0,
@@ -92,29 +87,6 @@ enum Status : uint32_t {
};
CHECK_TYPE(Status, 4, 4);
-// This must at least contain the exceptions defined in the spec:
-// https://w3c.github.io/encrypted-media/#exceptions
-// The following starts with the list of DOM4 exceptions from:
-// http://www.w3.org/TR/dom/#domexception
-// Some DOM4 exceptions are not included as they are not expected to be used.
-// Should only be used on Host_8 and before.
-enum Error : uint32_t {
- kNotSupportedError = 9,
- kInvalidStateError = 11,
- kInvalidAccessError = 15,
- kQuotaExceededError = 22,
-
- // Additional exceptions that do not have assigned codes.
- // There are other non-EME-specific values, not included in this list.
- kUnknownError = 30,
-
- // Additional values from previous EME versions. They currently have no
- // matching DOMException.
- kClientError = 100,
- kOutputError = 101
-};
-CHECK_TYPE(Error, 4, 4);
-
// Exceptions used by the CDM to reject promises.
// https://w3c.github.io/encrypted-media/#exceptions
enum Exception : uint32_t {
@@ -439,7 +411,7 @@ enum QueryResult : uint32_t { kQuerySucceeded = 0, kQueryFailed };
CHECK_TYPE(QueryResult, 4, 4);
// The Initialization Data Type. The valid types are defined in the spec:
-// http://w3c.github.io/encrypted-media/initdata-format-registry.html#registry
+// https://w3c.github.io/encrypted-media/format-registry/initdata/index.html#registry
enum InitDataType : uint32_t { kCenc = 0, kKeyIds = 1, kWebM = 2 };
CHECK_TYPE(InitDataType, 4, 4);
@@ -483,6 +455,101 @@ struct Policy {
};
CHECK_TYPE(Policy, 4, 4);
+// Represents a buffer created by Allocator implementations.
+class CDM_CLASS_API Buffer {
+ public:
+ // Destroys the buffer in the same context as it was created.
+ virtual void Destroy() = 0;
+
+ virtual uint32_t Capacity() const = 0;
+ virtual uint8_t* Data() = 0;
+ virtual void SetSize(uint32_t size) = 0;
+ virtual uint32_t Size() const = 0;
+
+ protected:
+ Buffer() {}
+ virtual ~Buffer() {}
+
+ private:
+ Buffer(const Buffer&);
+ void operator=(const Buffer&);
+};
+
+// Represents a decrypted block that has not been decoded.
+class CDM_CLASS_API DecryptedBlock {
+ public:
+ virtual void SetDecryptedBuffer(Buffer* buffer) = 0;
+ virtual Buffer* DecryptedBuffer() = 0;
+
+ // TODO(tomfinegan): Figure out if timestamp is really needed. If it is not,
+ // we can just pass Buffer pointers around.
+ virtual void SetTimestamp(int64_t timestamp) = 0;
+ virtual int64_t Timestamp() const = 0;
+
+ protected:
+ DecryptedBlock() {}
+ virtual ~DecryptedBlock() {}
+};
+
+class CDM_CLASS_API VideoFrame {
+ public:
+ enum VideoPlane : uint32_t {
+ kYPlane = 0,
+ kUPlane = 1,
+ kVPlane = 2,
+ kMaxPlanes = 3,
+ };
+
+ virtual void SetFormat(VideoFormat format) = 0;
+ virtual VideoFormat Format() const = 0;
+
+ virtual void SetSize(cdm::Size size) = 0;
+ virtual cdm::Size Size() const = 0;
+
+ virtual void SetFrameBuffer(Buffer* frame_buffer) = 0;
+ virtual Buffer* FrameBuffer() = 0;
+
+ virtual void SetPlaneOffset(VideoPlane plane, uint32_t offset) = 0;
+ virtual uint32_t PlaneOffset(VideoPlane plane) = 0;
+
+ virtual void SetStride(VideoPlane plane, uint32_t stride) = 0;
+ virtual uint32_t Stride(VideoPlane plane) = 0;
+
+ virtual void SetTimestamp(int64_t timestamp) = 0;
+ virtual int64_t Timestamp() const = 0;
+
+ protected:
+ VideoFrame() {}
+ virtual ~VideoFrame() {}
+};
+
+// Represents decrypted and decoded audio frames. AudioFrames can contain
+// multiple audio output buffers, which are serialized into this format:
+//
+// |<------------------- serialized audio buffer ------------------->|
+// | int64_t timestamp | int64_t length | length bytes of audio data |
+//
+// For example, with three audio output buffers, the AudioFrames will look
+// like this:
+//
+// |<----------------- AudioFrames ------------------>|
+// | audio buffer 0 | audio buffer 1 | audio buffer 2 |
+class CDM_CLASS_API AudioFrames {
+ public:
+ virtual void SetFrameBuffer(Buffer* buffer) = 0;
+ virtual Buffer* FrameBuffer() = 0;
+
+ // The CDM must call this method, providing a valid format, when providing
+ // frame buffers. Planar data should be stored end to end; e.g.,
+ // |ch1 sample1||ch1 sample2|....|ch1 sample_last||ch2 sample1|...
+ virtual void SetFormat(AudioFormat format) = 0;
+ virtual AudioFormat Format() const = 0;
+
+ protected:
+ AudioFrames() {}
+ virtual ~AudioFrames() {}
+};
+
// FileIO interface provides a way for the CDM to store data in a file in
// persistent storage. This interface aims only at providing basic read/write
// capabilities and should not be used as a full fledged file IO API.
@@ -548,7 +615,8 @@ class CDM_CLASS_API FileIOClient {
// - kError indicates read failure, e.g. the storage is not open or cannot be
// fully read.
virtual void OnReadComplete(Status status,
- const uint8_t* data, uint32_t data_size) = 0;
+ const uint8_t* data,
+ uint32_t data_size) = 0;
// Response to a FileIO::Write() call.
// - kSuccess indicates that all the data has been written into the file
@@ -564,16 +632,20 @@ class CDM_CLASS_API FileIOClient {
virtual ~FileIOClient() {}
};
+class CDM_CLASS_API Host_9;
+class CDM_CLASS_API Host_10;
+class CDM_CLASS_API Host_11;
+
// ContentDecryptionModule interface that all CDMs need to implement.
// The interface is versioned for backward compatibility.
// Note: ContentDecryptionModule implementations must use the allocator
// provided in CreateCdmInstance() to allocate any Buffer that needs to
// be passed back to the caller. Implementations must call Buffer::Destroy()
// when a Buffer is created that will never be returned to the caller.
-class CDM_CLASS_API ContentDecryptionModule_8 {
+class CDM_CLASS_API ContentDecryptionModule_9 {
public:
- static const int kVersion = 8;
- typedef Host_8 Host;
+ static const int kVersion = 9;
+ typedef Host_9 Host;
// Initializes the CDM instance, providing information about permitted
// functionalities.
@@ -585,6 +657,13 @@ class CDM_CLASS_API ContentDecryptionModule_8 {
virtual void Initialize(bool allow_distinctive_identifier,
bool allow_persistent_state) = 0;
+ // Gets the key status if the CDM has a hypothetical key with the |policy|.
+ // The CDM must respond by calling either Host::OnResolveKeyStatusPromise()
+ // with the result key status or Host::OnRejectPromise() if an unexpected
+ // error happened or this method is not supported.
+ virtual void GetStatusForPolicy(uint32_t promise_id,
+ const Policy& policy) = 0;
+
// SetServerCertificate(), CreateSessionAndGenerateRequest(), LoadSession(),
// UpdateSession(), CloseSession(), and RemoveSession() all accept a
// |promise_id|, which must be passed to the completion Host method
@@ -660,8 +739,8 @@ class CDM_CLASS_API ContentDecryptionModule_8 {
//
// Returns kSuccess if the |audio_decoder_config| is supported and the CDM
// audio decoder is successfully initialized.
- // Returns kSessionError if |audio_decoder_config| is not supported. The CDM
- // may still be able to do Decrypt().
+ // Returns kInitializationError if |audio_decoder_config| is not supported.
+ // The CDM may still be able to do Decrypt().
// Returns kDeferredInitialization if the CDM is not ready to initialize the
// decoder at this time. Must call Host::OnDeferredInitializationDone() once
// initialization is complete.
@@ -673,8 +752,8 @@ class CDM_CLASS_API ContentDecryptionModule_8 {
//
// Returns kSuccess if the |video_decoder_config| is supported and the CDM
// video decoder is successfully initialized.
- // Returns kSessionError if |video_decoder_config| is not supported. The CDM
- // may still be able to do Decrypt().
+ // Returns kInitializationError if |video_decoder_config| is not supported.
+ // The CDM may still be able to do Decrypt().
// Returns kDeferredInitialization if the CDM is not ready to initialize the
// decoder at this time. Must call Host::OnDeferredInitializationDone() once
// initialization is complete.
@@ -744,12 +823,24 @@ class CDM_CLASS_API ContentDecryptionModule_8 {
uint32_t link_mask,
uint32_t output_protection_mask) = 0;
+ // Called by the host after a call to Host::RequestStorageId(). If the
+ // version of the storage ID requested is available, |storage_id| and
+ // |storage_id_size| are set appropriately. |version| will be the same as
+ // what was requested, unless 0 (latest) was requested, in which case
+ // |version| will be the actual version number for the |storage_id| returned.
+ // If the requested version is not available, null/zero will be provided as
+ // |storage_id| and |storage_id_size|, respectively, and |version| should be
+ // ignored.
+ virtual void OnStorageId(uint32_t version,
+ const uint8_t* storage_id,
+ uint32_t storage_id_size) = 0;
+
// Destroys the object in the same context as it was created.
virtual void Destroy() = 0;
protected:
- ContentDecryptionModule_8() {}
- virtual ~ContentDecryptionModule_8() {}
+ ContentDecryptionModule_9() {}
+ virtual ~ContentDecryptionModule_9() {}
};
// ContentDecryptionModule interface that all CDMs need to implement.
@@ -758,20 +849,27 @@ class CDM_CLASS_API ContentDecryptionModule_8 {
// provided in CreateCdmInstance() to allocate any Buffer that needs to
// be passed back to the caller. Implementations must call Buffer::Destroy()
// when a Buffer is created that will never be returned to the caller.
-class CDM_CLASS_API ContentDecryptionModule_9 {
+class CDM_CLASS_API ContentDecryptionModule_10 {
public:
- static const int kVersion = 9;
- typedef Host_9 Host;
+ static const int kVersion = 10;
+ static const bool kIsStable = true;
+ typedef Host_10 Host;
// Initializes the CDM instance, providing information about permitted
- // functionalities.
+ // functionalities. The CDM must respond by calling Host::OnInitialized()
+ // with whether the initialization succeeded. No other calls will be made by
+ // the host before Host::OnInitialized() returns.
// If |allow_distinctive_identifier| is false, messages from the CDM,
// such as message events, must not contain a Distinctive Identifier,
// even in an encrypted form.
// If |allow_persistent_state| is false, the CDM must not attempt to
// persist state. Calls to CreateFileIO() will fail.
+ // If |use_hw_secure_codecs| is true, the CDM must ensure the decryption key
+ // and video buffers (compressed and uncompressed) are securely protected by
+ // hardware.
virtual void Initialize(bool allow_distinctive_identifier,
- bool allow_persistent_state) = 0;
+ bool allow_persistent_state,
+ bool use_hw_secure_codecs) = 0;
// Gets the key status if the CDM has a hypothetical key with the |policy|.
// The CDM must respond by calling either Host::OnResolveKeyStatusPromise()
@@ -788,6 +886,12 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// Provides a server certificate to be used to encrypt messages to the
// license server. The CDM must respond by calling either
// Host::OnResolvePromise() or Host::OnRejectPromise().
+ // If the CDM does not support server certificates, the promise should be
+ // rejected with kExceptionNotSupportedError. If |server_certificate_data|
+ // is empty, reject with kExceptionTypeError. Any other error should be
+ // rejected with kExceptionInvalidStateError or kExceptionQuotaExceededError.
+ // TODO(crbug.com/796417): Add support for the promise to return true or
+ // false, rather than using kExceptionNotSupportedError to mean false.
virtual void SetServerCertificate(uint32_t promise_id,
const uint8_t* server_certificate_data,
uint32_t server_certificate_data_size) = 0;
@@ -847,7 +951,7 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// Returns kDecryptError if any other error happened.
// If the return value is not kSuccess, |decrypted_buffer| should be ignored
// by the caller.
- virtual Status Decrypt(const InputBuffer_1& encrypted_buffer,
+ virtual Status Decrypt(const InputBuffer_2& encrypted_buffer,
DecryptedBlock* decrypted_buffer) = 0;
// Initializes the CDM audio decoder with |audio_decoder_config|. This
@@ -861,7 +965,7 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// decoder at this time. Must call Host::OnDeferredInitializationDone() once
// initialization is complete.
virtual Status InitializeAudioDecoder(
- const AudioDecoderConfig_1& audio_decoder_config) = 0;
+ const AudioDecoderConfig_2& audio_decoder_config) = 0;
// Initializes the CDM video decoder with |video_decoder_config|. This
// function must be called before DecryptAndDecodeFrame() is called.
@@ -874,7 +978,7 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// decoder at this time. Must call Host::OnDeferredInitializationDone() once
// initialization is complete.
virtual Status InitializeVideoDecoder(
- const VideoDecoderConfig_1& video_decoder_config) = 0;
+ const VideoDecoderConfig_2& video_decoder_config) = 0;
// De-initializes the CDM decoder and sets it to an uninitialized state. The
// caller can initialize the decoder again after this call to re-initialize
@@ -902,7 +1006,7 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// Returns kDecodeError if any decoding error happened.
// If the return value is not kSuccess, |video_frame| should be ignored by
// the caller.
- virtual Status DecryptAndDecodeFrame(const InputBuffer_1& encrypted_buffer,
+ virtual Status DecryptAndDecodeFrame(const InputBuffer_2& encrypted_buffer,
VideoFrame* video_frame) = 0;
// Decrypts the |encrypted_buffer| and decodes the decrypted buffer into
@@ -921,7 +1025,7 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
// Returns kDecodeError if any decoding error happened.
// If the return value is not kSuccess, |audio_frames| should be ignored by
// the caller.
- virtual Status DecryptAndDecodeSamples(const InputBuffer_1& encrypted_buffer,
+ virtual Status DecryptAndDecodeSamples(const InputBuffer_2& encrypted_buffer,
AudioFrames* audio_frames) = 0;
// Called by the host after a platform challenge was initiated via
@@ -955,22 +1059,23 @@ class CDM_CLASS_API ContentDecryptionModule_9 {
virtual void Destroy() = 0;
protected:
- ContentDecryptionModule_9() {}
- virtual ~ContentDecryptionModule_9() {}
+ ContentDecryptionModule_10() {}
+ virtual ~ContentDecryptionModule_10() {}
};
-// ----- Note: This interface is still in development and not stable! -----
-//
+// ----- Note: CDM interface(s) below still in development and not stable! -----
+
// ContentDecryptionModule interface that all CDMs need to implement.
// The interface is versioned for backward compatibility.
// Note: ContentDecryptionModule implementations must use the allocator
// provided in CreateCdmInstance() to allocate any Buffer that needs to
// be passed back to the caller. Implementations must call Buffer::Destroy()
// when a Buffer is created that will never be returned to the caller.
-class CDM_CLASS_API ContentDecryptionModule_10 {
+class CDM_CLASS_API ContentDecryptionModule_11 {
public:
- static const int kVersion = 10;
- typedef Host_10 Host;
+ static const int kVersion = 11;
+ static const bool kIsStable = false;
+ typedef Host_11 Host;
// Initializes the CDM instance, providing information about permitted
// functionalities. The CDM must respond by calling Host::OnInitialized()
@@ -1176,36 +1281,13 @@ class CDM_CLASS_API ContentDecryptionModule_10 {
virtual void Destroy() = 0;
protected:
- ContentDecryptionModule_10() {}
- virtual ~ContentDecryptionModule_10() {}
-};
-
-// The latest stable ContentDecryptionModule interface.
-typedef ContentDecryptionModule_9 ContentDecryptionModule;
-
-// Represents a buffer created by Allocator implementations.
-class CDM_CLASS_API Buffer {
- public:
- // Destroys the buffer in the same context as it was created.
- virtual void Destroy() = 0;
-
- virtual uint32_t Capacity() const = 0;
- virtual uint8_t* Data() = 0;
- virtual void SetSize(uint32_t size) = 0;
- virtual uint32_t Size() const = 0;
-
- protected:
- Buffer() {}
- virtual ~Buffer() {}
-
- private:
- Buffer(const Buffer&);
- void operator=(const Buffer&);
+ ContentDecryptionModule_11() {}
+ virtual ~ContentDecryptionModule_11() {}
};
-class CDM_CLASS_API Host_8 {
+class CDM_CLASS_API Host_9 {
public:
- static const int kVersion = 8;
+ static const int kVersion = 9;
// Returns a Buffer* containing non-zero members upon success, or NULL on
// failure. The caller owns the Buffer* after this call. The buffer is not
@@ -1220,6 +1302,11 @@ class CDM_CLASS_API Host_8 {
// Returns the current wall time.
virtual Time GetCurrentWallTime() = 0;
+ // Called by the CDM when a key status is available in response to
+ // GetStatusForPolicy().
+ virtual void OnResolveKeyStatusPromise(uint32_t promise_id,
+ KeyStatus key_status) = 0;
+
// Called by the CDM when a session is created or loaded and the value for the
// MediaKeySession's sessionId attribute is available (|session_id|).
// This must be called before OnSessionMessage() or
@@ -1237,26 +1324,21 @@ class CDM_CLASS_API Host_8 {
// Called by the CDM when an error occurs as a result of one of the
// ContentDecryptionModule calls that accept a |promise_id|.
- // |error| must be specified, |error_message| and |system_code|
+ // |exception| must be specified. |error_message| and |system_code|
// are optional. |error_message_size| should not include null termination.
virtual void OnRejectPromise(uint32_t promise_id,
- Error error,
+ Exception exception,
uint32_t system_code,
const char* error_message,
uint32_t error_message_size) = 0;
// Called by the CDM when it has a message for session |session_id|.
// Size parameters should not include null termination.
- // |legacy_destination_url| is only for supporting the prefixed EME API and
- // is ignored by unprefixed EME. It should only be non-null if |message_type|
- // is kLicenseRenewal.
virtual void OnSessionMessage(const char* session_id,
uint32_t session_id_size,
MessageType message_type,
const char* message,
- uint32_t message_size,
- const char* legacy_destination_url,
- uint32_t legacy_destination_url_length) = 0;
+ uint32_t message_size) = 0;
// Called by the CDM when there has been a change in keys or their status for
// session |session_id|. |has_additional_usable_key| should be set if a
@@ -1288,21 +1370,6 @@ class CDM_CLASS_API Host_8 {
virtual void OnSessionClosed(const char* session_id,
uint32_t session_id_size) = 0;
- // Called by the CDM when an error occurs in session |session_id|
- // unrelated to one of the ContentDecryptionModule calls that accept a
- // |promise_id|. |error| must be specified, |error_message| and
- // |system_code| are optional. Length parameters should not include null
- // termination.
- // Note:
- // - This method is only for supporting prefixed EME API.
- // - This method will be ignored by unprefixed EME. All errors reported
- // in this method should probably also be reported by one of other methods.
- virtual void OnLegacySessionError(
- const char* session_id, uint32_t session_id_length,
- Error error,
- uint32_t system_code,
- const char* error_message, uint32_t error_message_length) = 0;
-
// The following are optional methods that may not be implemented on all
// platforms.
@@ -1337,14 +1404,23 @@ class CDM_CLASS_API Host_8 {
// CDM can call this method multiple times to operate on different files.
virtual FileIO* CreateFileIO(FileIOClient* client) = 0;
+ // Requests a specific version of the storage ID. A storage ID is a stable,
+ // device specific ID used by the CDM to securely store persistent data. The
+ // ID will be returned by the host via ContentDecryptionModule::OnStorageId().
+ // If |version| is 0, the latest version will be returned. All |version|s
+ // that are greater than or equal to 0x80000000 are reserved for the CDM and
+ // should not be supported or returned by the host. The CDM must not expose
+ // the ID outside the client device, even in encrypted form.
+ virtual void RequestStorageId(uint32_t version) = 0;
+
protected:
- Host_8() {}
- virtual ~Host_8() {}
+ Host_9() {}
+ virtual ~Host_9() {}
};
-class CDM_CLASS_API Host_9 {
+class CDM_CLASS_API Host_10 {
public:
- static const int kVersion = 9;
+ static const int kVersion = 10;
// Returns a Buffer* containing non-zero members upon success, or NULL on
// failure. The caller owns the Buffer* after this call. The buffer is not
@@ -1359,6 +1435,9 @@ class CDM_CLASS_API Host_9 {
// Returns the current wall time.
virtual Time GetCurrentWallTime() = 0;
+ // Called by the CDM with the result after the CDM instance was initialized.
+ virtual void OnInitialized(bool success) = 0;
+
// Called by the CDM when a key status is available in response to
// GetStatusForPolicy().
virtual void OnResolveKeyStatusPromise(uint32_t promise_id,
@@ -1471,13 +1550,13 @@ class CDM_CLASS_API Host_9 {
virtual void RequestStorageId(uint32_t version) = 0;
protected:
- Host_9() {}
- virtual ~Host_9() {}
+ Host_10() {}
+ virtual ~Host_10() {}
};
-class CDM_CLASS_API Host_10 {
+class CDM_CLASS_API Host_11 {
public:
- static const int kVersion = 10;
+ static const int kVersion = 11;
// Returns a Buffer* containing non-zero members upon success, or NULL on
// failure. The caller owns the Buffer* after this call. The buffer is not
@@ -1598,14 +1677,15 @@ class CDM_CLASS_API Host_10 {
virtual FileIO* CreateFileIO(FileIOClient* client) = 0;
// Requests a CdmProxy that proxies part of CDM functionalities to a different
- // entity, e.g. hardware CDM modules. A CDM instance can have at most one
+ // entity, e.g. a hardware CDM module. A CDM instance can have at most one
// CdmProxy throughout its lifetime, which must be requested and initialized
- // during CDM instance initialization time, i.e. before OnInitialized() is
- // called, to ensure proper connection of the CdmProxy and the media player
- // (e.g. hardware decoder). The CdmProxy is owned by the host and is
- // guaranteed to be valid throughout the CDM instance's lifetime. The CDM must
- // ensure that the |client| remain valid before the CDM instance is destroyed.
- // Returns null if CdmProxy is not supported, RequestCdmProxy() is called
+ // during CDM instance initialization time, i.e. in or after CDM::Initialize()
+ // and before OnInitialized() is called, to ensure proper connection of the
+ // CdmProxy and the media player (e.g. hardware decoder). The CdmProxy is
+ // owned by the host and is guaranteed to be valid throughout the CDM
+ // instance's lifetime. The CDM must ensure that the |client| remain valid
+ // before the CDM instance is destroyed. Returns null if CdmProxy is not
+ // supported, called before CDM::Initialize(), RequestCdmProxy() is called
// more than once, or called after the CDM instance has been initialized.
virtual CdmProxy* RequestCdmProxy(CdmProxyClient* client) = 0;
@@ -1619,83 +1699,8 @@ class CDM_CLASS_API Host_10 {
virtual void RequestStorageId(uint32_t version) = 0;
protected:
- Host_10() {}
- virtual ~Host_10() {}
-};
-
-// Represents a decrypted block that has not been decoded.
-class CDM_CLASS_API DecryptedBlock {
- public:
- virtual void SetDecryptedBuffer(Buffer* buffer) = 0;
- virtual Buffer* DecryptedBuffer() = 0;
-
- // TODO(tomfinegan): Figure out if timestamp is really needed. If it is not,
- // we can just pass Buffer pointers around.
- virtual void SetTimestamp(int64_t timestamp) = 0;
- virtual int64_t Timestamp() const = 0;
-
- protected:
- DecryptedBlock() {}
- virtual ~DecryptedBlock() {}
-};
-
-class CDM_CLASS_API VideoFrame {
- public:
- enum VideoPlane : uint32_t {
- kYPlane = 0,
- kUPlane = 1,
- kVPlane = 2,
- kMaxPlanes = 3,
- };
-
- virtual void SetFormat(VideoFormat format) = 0;
- virtual VideoFormat Format() const = 0;
-
- virtual void SetSize(cdm::Size size) = 0;
- virtual cdm::Size Size() const = 0;
-
- virtual void SetFrameBuffer(Buffer* frame_buffer) = 0;
- virtual Buffer* FrameBuffer() = 0;
-
- virtual void SetPlaneOffset(VideoPlane plane, uint32_t offset) = 0;
- virtual uint32_t PlaneOffset(VideoPlane plane) = 0;
-
- virtual void SetStride(VideoPlane plane, uint32_t stride) = 0;
- virtual uint32_t Stride(VideoPlane plane) = 0;
-
- virtual void SetTimestamp(int64_t timestamp) = 0;
- virtual int64_t Timestamp() const = 0;
-
- protected:
- VideoFrame() {}
- virtual ~VideoFrame() {}
-};
-
-// Represents decrypted and decoded audio frames. AudioFrames can contain
-// multiple audio output buffers, which are serialized into this format:
-//
-// |<------------------- serialized audio buffer ------------------->|
-// | int64_t timestamp | int64_t length | length bytes of audio data |
-//
-// For example, with three audio output buffers, the AudioFrames will look
-// like this:
-//
-// |<----------------- AudioFrames ------------------>|
-// | audio buffer 0 | audio buffer 1 | audio buffer 2 |
-class CDM_CLASS_API AudioFrames {
- public:
- virtual void SetFrameBuffer(Buffer* buffer) = 0;
- virtual Buffer* FrameBuffer() = 0;
-
- // The CDM must call this method, providing a valid format, when providing
- // frame buffers. Planar data should be stored end to end; e.g.,
- // |ch1 sample1||ch1 sample2|....|ch1 sample_last||ch2 sample1|...
- virtual void SetFormat(AudioFormat format) = 0;
- virtual AudioFormat Format() const = 0;
-
- protected:
- AudioFrames() {}
- virtual ~AudioFrames() {}
+ Host_11() {}
+ virtual ~Host_11() {}
};
} // namespace cdm
diff --git a/chromium/media/cdm/cbcs_decryptor.cc b/chromium/media/cdm/cbcs_decryptor.cc
new file mode 100644
index 00000000000..c1e2189d667
--- /dev/null
+++ b/chromium/media/cdm/cbcs_decryptor.cc
@@ -0,0 +1,184 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/cbcs_decryptor.h"
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <string>
+#include <vector>
+
+#include "base/containers/span.h"
+#include "base/logging.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/numerics/checked_math.h"
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "media/base/encryption_pattern.h"
+#include "media/base/subsample_entry.h"
+#include "media/cdm/aes_cbc_crypto.h"
+
+namespace media {
+
+namespace {
+
+constexpr size_t kAesBlockSizeInBytes = 16;
+
+// Decrypts |input_data| into |output_data|, using the pattern specified in
+// |pattern|. |pattern| only applies to full blocks. Any partial block at
+// the end is considered unencrypted. |output_data| must have enough room to
+// hold |input_data|.size() bytes.
+bool DecryptWithPattern(const crypto::SymmetricKey& key,
+ base::span<const uint8_t> iv,
+ const EncryptionPattern& pattern,
+ base::span<const uint8_t> input_data,
+ uint8_t* output_data) {
+ // The AES_CBC decryption is reset for each subsample.
+ AesCbcCrypto aes_cbc_crypto;
+ if (!aes_cbc_crypto.Initialize(key, iv))
+ return false;
+
+ // |total_blocks| is the number of blocks in the buffer, ignoring any
+ // partial block at the end. |remaining_bytes| is the number of bytes
+ // in the partial block at the end of the buffer, if any.
+ size_t total_blocks = input_data.size_bytes() / kAesBlockSizeInBytes;
+ size_t remaining_bytes = input_data.size_bytes() % kAesBlockSizeInBytes;
+
+ size_t crypt_byte_block =
+ base::strict_cast<size_t>(pattern.crypt_byte_block());
+ size_t skip_byte_block = base::strict_cast<size_t>(pattern.skip_byte_block());
+
+ // |crypt_byte_block| and |skip_byte_block| come from 4 bit values, so fail
+ // if these are too large.
+ if (crypt_byte_block >= 16 || skip_byte_block >= 16)
+ return false;
+
+ if (crypt_byte_block == 0 && skip_byte_block == 0) {
+ // From ISO/IEC 23001-7:2016(E), section 9.6.1:
+ // "When the fields default_crypt_byte_block and default_skip_byte_block
+ // in a version 1 Track Encryption Box ('tenc') are non-zero numbers,
+ // pattern encryption SHALL be applied."
+ // So for the pattern 0:0, assume that all blocks are encrypted.
+ crypt_byte_block = total_blocks;
+ }
+
+ // Apply the pattern to |input_data|.
+ // Example (using Pattern(2,3), Ex is encrypted, Ux unencrypted)
+ // input_data: |E1|E2|U3|U4|U5|E6|E7|U8|U9|U10|E11|
+ // We must decrypt 2 blocks, then simply copy the next 3 blocks, and
+ // repeat until the end. Note that the input does not have to contain
+ // a full pattern at the end (although see the comment below).
+ size_t blocks_processed = 0;
+ const uint8_t* src = input_data.data();
+ uint8_t* dest = output_data;
+ bool is_encrypted_blocks = false;
+ while (blocks_processed < total_blocks) {
+ is_encrypted_blocks = !is_encrypted_blocks;
+ size_t blocks_to_process =
+ std::min(is_encrypted_blocks ? crypt_byte_block : skip_byte_block,
+ total_blocks - blocks_processed);
+
+ if (blocks_to_process == 0)
+ continue;
+
+ size_t bytes_to_process = blocks_to_process * kAesBlockSizeInBytes;
+
+ // From ISO/IEC 23001-7:2016(E), section 9.6.1:
+ // "If the last Block pattern in a Subsample is incomplete, the partial
+ // pattern SHALL be followed until truncated by the BytesOfProtectedData
+ // size and any partial crypt_byte_block SHALL remain unencrypted."
+ // So if the last Block pattern is incomplete, it needs to have at least
+ // |crypt_byte_block| blocks to be considered encrypted. If it doesn't,
+ // it is treated as unencrypted and simply copied over.
+ if (is_encrypted_blocks && blocks_to_process == crypt_byte_block) {
+ if (!aes_cbc_crypto.Decrypt(base::make_span(src, bytes_to_process),
+ dest)) {
+ return false;
+ }
+ } else {
+ memcpy(dest, src, bytes_to_process);
+ }
+
+ blocks_processed += blocks_to_process;
+ src += bytes_to_process;
+ dest += bytes_to_process;
+ }
+
+ // Any partial block data remaining in this subsample is considered
+ // unencrypted so simply copy it into |dest|.
+ if (remaining_bytes > 0)
+ memcpy(dest, src, remaining_bytes);
+
+ return true;
+}
+
+} // namespace
+
+scoped_refptr<DecoderBuffer> DecryptCbcsBuffer(
+ const DecoderBuffer& input,
+ const crypto::SymmetricKey& key) {
+ size_t sample_size = input.data_size();
+ DCHECK(sample_size) << "No data to decrypt.";
+
+ const DecryptConfig* decrypt_config = input.decrypt_config();
+ DCHECK(decrypt_config) << "No need to call Decrypt() on unencrypted buffer.";
+ DCHECK_EQ(EncryptionMode::kCbcs, decrypt_config->encryption_mode());
+
+ DCHECK(decrypt_config->HasPattern());
+ const EncryptionPattern pattern =
+ decrypt_config->encryption_pattern().value();
+
+ // Decrypted data will be the same size as |input| size.
+ auto buffer = base::MakeRefCounted<DecoderBuffer>(sample_size);
+ uint8_t* output_data = buffer->writable_data();
+ buffer->set_timestamp(input.timestamp());
+ buffer->set_duration(input.duration());
+ buffer->set_is_key_frame(input.is_key_frame());
+ buffer->CopySideDataFrom(input.side_data(), input.side_data_size());
+
+ const std::vector<SubsampleEntry>& subsamples = decrypt_config->subsamples();
+ if (subsamples.empty()) {
+ // Assume the whole buffer is encrypted.
+ return DecryptWithPattern(
+ key, base::as_bytes(base::make_span(decrypt_config->iv())),
+ pattern, base::make_span(input.data(), sample_size), output_data)
+ ? buffer
+ : nullptr;
+ }
+
+ if (!VerifySubsamplesMatchSize(subsamples, sample_size)) {
+ DVLOG(1) << "Subsample sizes do not equal input size";
+ return nullptr;
+ }
+
+ const uint8_t* src = input.data();
+ uint8_t* dest = output_data;
+ for (const auto& subsample : subsamples) {
+ if (subsample.clear_bytes) {
+ DVLOG(4) << "Copying clear_bytes: " << subsample.clear_bytes;
+ memcpy(dest, src, subsample.clear_bytes);
+ src += subsample.clear_bytes;
+ dest += subsample.clear_bytes;
+ }
+
+ if (subsample.cypher_bytes) {
+ DVLOG(4) << "Processing cypher_bytes: " << subsample.cypher_bytes
+ << ", pattern(" << pattern.crypt_byte_block() << ","
+ << pattern.skip_byte_block() << ")";
+ if (!DecryptWithPattern(
+ key, base::as_bytes(base::make_span(decrypt_config->iv())),
+ pattern, base::make_span(src, subsample.cypher_bytes), dest)) {
+ return nullptr;
+ }
+ src += subsample.cypher_bytes;
+ dest += subsample.cypher_bytes;
+ }
+ }
+
+ return buffer;
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/cbcs_decryptor.h b/chromium/media/cdm/cbcs_decryptor.h
new file mode 100644
index 00000000000..702eb7a0879
--- /dev/null
+++ b/chromium/media/cdm/cbcs_decryptor.h
@@ -0,0 +1,55 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CDM_CBCS_DECRYPTOR_H_
+#define MEDIA_CDM_CBCS_DECRYPTOR_H_
+
+#include "base/memory/ref_counted.h"
+#include "media/base/media_export.h"
+
+namespace crypto {
+class SymmetricKey;
+}
+
+namespace media {
+class DecoderBuffer;
+
+// This class implements pattern decryption as specified by
+// ISO/IEC 23001-7:2016, section 10.4 (https://www.iso.org),
+// using AES-CBC-128 decryption.
+//
+// Subsample encryption divides each input buffer into one or more contiguous
+// subsamples. Each subsample has an unprotected part (unencrypted) followed
+// by a protected part (encrypted), only one of which may be zero bytes in
+// length. For example:
+// | DecoderBuffer.data() |
+// | Subsample#1 | Subsample#2 | Subsample#3 |
+// |uuuuu|eeeeeeeeee|uuuu|eeeeeeeeeeee|uu|eeeeeeeeeeee|
+// Within the protected part of each subsample, the data is treated as a
+// chain of 16 byte cipher blocks, starting with the initialization vector
+// associated with the sample. The IV is applied to the first encrypted
+// cipher block of each subsample.
+//
+// A partial block at the end of a subsample (if any) is unencrypted.
+//
+// This supports pattern decryption, where a pattern of encrypted and clear
+// (skipped) blocks is used. The Pattern is specified with each DecoderBuffer
+// (in the DecryptConfig). Typically encrypted video tracks use a pattern of
+// (1,9) which indicates that one 16 byte block is encrypted followed by 9
+// blocks unencrypted, and then the pattern repeats through all the blocks in
+// the protected part. Tracks other than video usually use full-sample
+// encryption.
+//
+// If a pattern is not specified, the protected part will use full-sample
+// encryption.
+
+// Decrypts the encrypted buffer |input| using |key| and values found in
+// |input|->DecryptConfig. The key size must be 128 bits.
+MEDIA_EXPORT scoped_refptr<DecoderBuffer> DecryptCbcsBuffer(
+ const DecoderBuffer& input,
+ const crypto::SymmetricKey& key);
+
+} // namespace media
+
+#endif // MEDIA_CDM_CBCS_DECRYPTOR_H_
diff --git a/chromium/media/cdm/cbcs_decryptor_fuzzer.cc b/chromium/media/cdm/cbcs_decryptor_fuzzer.cc
new file mode 100644
index 00000000000..8b3bdad5ebe
--- /dev/null
+++ b/chromium/media/cdm/cbcs_decryptor_fuzzer.cc
@@ -0,0 +1,75 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include <array>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/encryption_pattern.h"
+#include "media/base/subsample_entry.h"
+#include "media/cdm/cbcs_decryptor.h"
+
+const std::array<uint8_t, 16> kKey = {0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+ 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13};
+
+const std::array<uint8_t, 16> kIv = {0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
+ 0x26, 0x27, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00};
+
+// For disabling noisy logging.
+struct Environment {
+ Environment() { logging::SetMinLogLevel(logging::LOG_FATAL); }
+};
+
+Environment* env = new Environment();
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ // From the data provided:
+ // 1) Use the first byte to determine how much of the buffer is "clear".
+ // 2) Use the second byte to determine the pattern.
+ // 3) Rest of the buffer is the input data (which must be at least 1 byte).
+ // So the input buffer needs at least 3 bytes.
+ if (size < 3)
+ return 0;
+
+ const uint8_t clear_bytes = data[0];
+ const uint8_t encryption_pattern = data[1];
+ data += 2;
+ size -= 2;
+
+ static std::unique_ptr<crypto::SymmetricKey> key =
+ crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(std::begin(kKey), std::end(kKey)));
+
+ // |clear_bytes| is used to determine how much of the buffer is "clear".
+ // Since the code checks SubsampleEntries, use |clear_bytes| as the actual
+ // number of bytes clear, and the rest as encrypted. To avoid size_t problems,
+ // only set |subsamples| if |clear_bytes| <= |size|. If |subsamples| is
+ // empty, the complete buffer is treated as encrypted.
+ std::vector<media::SubsampleEntry> subsamples;
+ if (clear_bytes <= size)
+ subsamples.push_back({clear_bytes, size - clear_bytes});
+
+ // |encryption_pattern| is used to determine the encryption pattern. Since
+ // |crypt_byte_block| must be > 0, use 1 for it. |skip_byte_block| can be 0.
+ // This will try patterns (1,0), (1,1), ... (1,9), which should be sufficient.
+ media::EncryptionPattern pattern(1, encryption_pattern % 10);
+
+ auto encrypted_buffer = media::DecoderBuffer::CopyFrom(data, size);
+
+ // Key_ID is never used.
+ encrypted_buffer->set_decrypt_config(media::DecryptConfig::CreateCbcsConfig(
+ "key_id", std::string(std::begin(kIv), std::end(kIv)), subsamples,
+ pattern));
+
+ media::DecryptCbcsBuffer(*encrypted_buffer, *key);
+ return 0;
+}
diff --git a/chromium/media/cdm/cbcs_decryptor_unittest.cc b/chromium/media/cdm/cbcs_decryptor_unittest.cc
new file mode 100644
index 00000000000..da2f48a0cd1
--- /dev/null
+++ b/chromium/media/cdm/cbcs_decryptor_unittest.cc
@@ -0,0 +1,408 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/cbcs_decryptor.h"
+
+#include <array>
+#include <memory>
+
+#include "base/containers/span.h"
+#include "base/optional.h"
+#include "base/stl_util.h"
+#include "base/time/time.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+// Pattern decryption uses 16-byte blocks.
+constexpr size_t kBlockSize = 16;
+
+// Keys and IVs have to be 128 bits.
+const std::array<uint8_t, 16> kKey = {0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+ 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13};
+
+const std::array<uint8_t, 16> kIv = {0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
+ 0x26, 0x27, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00};
+
+const std::array<uint8_t, kBlockSize> kOneBlock = {'a', 'b', 'c', 'd', 'e', 'f',
+ 'g', 'h', 'i', 'j', 'k', 'l',
+ 'm', 'n', 'o', 'p'};
+
+const std::array<uint8_t, 6> kPartialBlock = {'a', 'b', 'c', 'd', 'e', 'f'};
+static_assert(base::size(kPartialBlock) != kBlockSize, "kPartialBlock wrong");
+
+std::string MakeString(const std::vector<uint8_t>& chars) {
+ return std::string(chars.begin(), chars.end());
+}
+
+// Combine multiple std::vector<uint8_t> into one.
+std::vector<uint8_t> Combine(const std::vector<std::vector<uint8_t>>& inputs) {
+ std::vector<uint8_t> result;
+ for (const auto& input : inputs)
+ result.insert(result.end(), input.begin(), input.end());
+
+ return result;
+}
+
+// Extract the |n|th block of |input|. The first block is number 1.
+std::vector<uint8_t> GetBlock(size_t n, const std::vector<uint8_t>& input) {
+ DCHECK_LE(n, input.size() / kBlockSize);
+ auto it = input.begin() + ((n - 1) * kBlockSize);
+ return std::vector<uint8_t>(it, it + kBlockSize);
+}
+
+// Returns a std::vector<uint8_t> containing |count| copies of |input|.
+std::vector<uint8_t> Repeat(const std::vector<uint8_t>& input, size_t count) {
+ std::vector<uint8_t> result;
+ for (size_t i = 0; i < count; ++i)
+ result.insert(result.end(), input.begin(), input.end());
+ return result;
+}
+
+} // namespace
+
+class CbcsDecryptorTest : public testing::Test {
+ public:
+ CbcsDecryptorTest()
+ : key_(crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(std::begin(kKey), std::end(kKey)))),
+ iv_(std::begin(kIv), std::end(kIv)),
+ one_block_(std::begin(kOneBlock), std::end(kOneBlock)),
+ partial_block_(std::begin(kPartialBlock), std::end(kPartialBlock)) {}
+
+ // Excrypt |original| using AES-CBC encryption with |key| and |iv|.
+ std::vector<uint8_t> Encrypt(const std::vector<uint8_t>& original,
+ const crypto::SymmetricKey& key,
+ const std::string& iv) {
+ // This code uses crypto::Encryptor to encrypt |original| rather than
+ // calling EVP_EncryptInit_ex() / EVP_EncryptUpdate() / etc. This is done
+ // for simplicity, as the crypto:: code wraps all the calls up nicely.
+ // However, for AES-CBC encryption, the crypto:: code does add padding to
+ // the output, which is simply stripped off.
+ crypto::Encryptor encryptor;
+ EXPECT_TRUE(encryptor.Init(&key, crypto::Encryptor::CBC, iv));
+
+ std::string ciphertext;
+ EXPECT_TRUE(encryptor.Encrypt(MakeString(original), &ciphertext));
+
+ // CBC encyption adds a block of padding at the end, so discard it.
+ DCHECK_GT(ciphertext.size(), original.size());
+ ciphertext.resize(original.size());
+
+ return std::vector<uint8_t>(ciphertext.begin(), ciphertext.end());
+ }
+
+ // Returns a 'cbcs' DecoderBuffer using the data and other parameters.
+ scoped_refptr<DecoderBuffer> CreateEncryptedBuffer(
+ const std::vector<uint8_t>& data,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsample_entries,
+ base::Optional<EncryptionPattern> encryption_pattern) {
+ EXPECT_FALSE(data.empty());
+ EXPECT_FALSE(iv.empty());
+
+ auto encrypted_buffer = DecoderBuffer::CopyFrom(data.data(), data.size());
+
+ // Key_ID is never used.
+ encrypted_buffer->set_decrypt_config(DecryptConfig::CreateCbcsConfig(
+ "key_id", iv, subsample_entries, encryption_pattern));
+ return encrypted_buffer;
+ }
+
+ // Calls DecryptCbcsBuffer() to decrypt |encrypted| using |key|,
+ // and then returns the data in the decrypted buffer.
+ std::vector<uint8_t> DecryptWithKey(scoped_refptr<DecoderBuffer> encrypted,
+ const crypto::SymmetricKey& key) {
+ auto decrypted = DecryptCbcsBuffer(*encrypted, key);
+
+ std::vector<uint8_t> decrypted_data;
+ if (decrypted.get()) {
+ EXPECT_TRUE(decrypted->data_size());
+ decrypted_data.assign(decrypted->data(),
+ decrypted->data() + decrypted->data_size());
+ }
+
+ return decrypted_data;
+ }
+
+ // Constants for testing.
+ std::unique_ptr<crypto::SymmetricKey> key_;
+ const std::string iv_;
+ const std::vector<uint8_t> one_block_;
+ const std::vector<uint8_t> partial_block_;
+};
+
+TEST_F(CbcsDecryptorTest, OneBlock) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 9));
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, AdditionalData) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 9));
+ encrypted_buffer->set_timestamp(base::TimeDelta::FromDays(2));
+ encrypted_buffer->set_duration(base::TimeDelta::FromMinutes(5));
+ encrypted_buffer->set_is_key_frame(true);
+ encrypted_buffer->CopySideDataFrom(encrypted_block.data(),
+ encrypted_block.size());
+
+ auto decrypted_buffer = DecryptCbcsBuffer(*encrypted_buffer, *key_);
+ EXPECT_EQ(encrypted_buffer->timestamp(), decrypted_buffer->timestamp());
+ EXPECT_EQ(encrypted_buffer->duration(), decrypted_buffer->duration());
+ EXPECT_EQ(encrypted_buffer->end_of_stream(),
+ decrypted_buffer->end_of_stream());
+ EXPECT_EQ(encrypted_buffer->is_key_frame(), decrypted_buffer->is_key_frame());
+ EXPECT_EQ(encrypted_buffer->side_data_size(),
+ decrypted_buffer->side_data_size());
+ EXPECT_EQ(base::make_span(encrypted_buffer->side_data(),
+ encrypted_buffer->side_data_size()),
+ base::make_span(decrypted_buffer->side_data(),
+ decrypted_buffer->side_data_size()));
+}
+
+TEST_F(CbcsDecryptorTest, DifferentPattern) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 0));
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, EmptyPattern) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Pattern 0:0 treats the buffer as all encrypted.
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(0, 0));
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, PatternTooLarge) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Pattern 100:0 is too large, so decryption will fail.
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(100, 0));
+ EXPECT_EQ(std::vector<uint8_t>(), DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, NoSubsamples) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ std::vector<SubsampleEntry> subsamples = {};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 9));
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, BadSubsamples) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ // Subsample size > data size.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size() + 1}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 0));
+ EXPECT_EQ(std::vector<uint8_t>(), DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, InvalidIv) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Use an invalid IV for decryption. Call should succeed, but return
+ // something other than the original data.
+ std::string invalid_iv(iv_.size(), 'a');
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, invalid_iv, subsamples, EncryptionPattern(1, 0));
+ EXPECT_NE(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, InvalidKey) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Use a different key for decryption. Call should succeed, but return
+ // something other than the original data.
+ std::unique_ptr<crypto::SymmetricKey> bad_key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, std::string(base::size(kKey), 'b'));
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 0));
+ EXPECT_NE(one_block_, DecryptWithKey(encrypted_buffer, *bad_key));
+}
+
+TEST_F(CbcsDecryptorTest, PartialBlock) {
+ // Only 1 subsample, all "encrypted" data. However, as it's not a full block,
+ // it will be treated as unencrypted.
+ std::vector<SubsampleEntry> subsamples = {{0, partial_block_.size()}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(partial_block_, iv_, subsamples,
+ EncryptionPattern(1, 0));
+ EXPECT_EQ(partial_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, SingleBlockWithExtraData) {
+ // Create some data that is longer than a single block. The full block will
+ // be encrypted, but the extra data at the end will be considered unencrypted.
+ auto encrypted_block =
+ Combine({Encrypt(one_block_, *key_, iv_), partial_block_});
+ auto expected_result = Combine({one_block_, partial_block_});
+
+ // Only 1 subsample, all "encrypted" data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(1, 0));
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, SkipBlock) {
+ // Only 1 subsample, but all unencrypted data.
+ std::vector<SubsampleEntry> subsamples = {{one_block_.size(), 0}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(one_block_, iv_, subsamples,
+ EncryptionPattern(1, 0));
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, MultipleBlocks) {
+ // Encrypt 2 copies of |one_block_| together using kKey and kIv.
+ auto encrypted_block = Encrypt(Repeat(one_block_, 2), *key_, iv_);
+ DCHECK_EQ(2 * kBlockSize, encrypted_block.size());
+
+ // 1 subsample, 4 blocks in (1,1) pattern.
+ // Encrypted blocks come from |encrypted_block|.
+ // data: | enc1 | clear | enc2 | clear |
+ // subsamples: | subsample#1 |
+ // |eeeeeeeeeeeeeeeeeeeeeeeeeeeee|
+ auto input_data = Combine({GetBlock(1, encrypted_block), one_block_,
+ GetBlock(2, encrypted_block), one_block_});
+ auto expected_result = Repeat(one_block_, 4);
+ std::vector<SubsampleEntry> subsamples = {{0, 4 * kBlockSize}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples,
+ EncryptionPattern(1, 1));
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, PartialPattern) {
+ // Encrypt 4 copies of |one_block_| together using kKey and kIv.
+ auto encrypted_block = Encrypt(Repeat(one_block_, 4), *key_, iv_);
+ DCHECK_EQ(4 * kBlockSize, encrypted_block.size());
+
+ // 1 subsample, 4 blocks in (8,2) pattern. As there are not 8 blocks, the
+ // whole buffer will be considered unencrypted.
+ std::vector<SubsampleEntry> subsamples = {{0, 4 * kBlockSize}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(
+ encrypted_block, iv_, subsamples, EncryptionPattern(8, 2));
+ EXPECT_EQ(encrypted_block, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, SkipBlocks) {
+ // Encrypt 5 blocks together using kKey and kIv.
+ auto encrypted_block = Encrypt(Repeat(one_block_, 5), *key_, iv_);
+ DCHECK_EQ(5 * kBlockSize, encrypted_block.size());
+
+ // 1 subsample, 1 unencrypted block followed by 7 blocks in (2,1) pattern.
+ // Encrypted blocks come from |encrypted_block|.
+ // data: | clear | enc1 | enc2 | clear | enc3 | enc4 | clear | enc5 |
+ // subsamples: | subsample#1 |
+ // |uuuuuuu eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee|
+ // Note that the last part only contains one encrypted block. Since it
+ // doesn't contain a full 2 blocks, it will not be decrypted.
+ auto input_data = Combine(
+ {one_block_, GetBlock(1, encrypted_block), GetBlock(2, encrypted_block),
+ one_block_, GetBlock(3, encrypted_block), GetBlock(4, encrypted_block),
+ one_block_, GetBlock(5, encrypted_block)});
+ auto expected_result =
+ Combine({Repeat(one_block_, 7), GetBlock(5, encrypted_block)});
+ std::vector<SubsampleEntry> subsamples = {{kBlockSize, 7 * kBlockSize}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples,
+ EncryptionPattern(2, 1));
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, MultipleSubsamples) {
+ // Encrypt |one_block_| using kKey and kIv.
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // 3 subsamples, each 1 block of |encrypted_block|.
+ // data: | encrypted | encrypted | encrypted |
+ // subsamples: | subsample#1 | subsample#2 | subsample#3 |
+ // |eeeeeeeeeeeee|eeeeeeeeeeeee|eeeeeeeeeeeee|
+ auto input_data = Repeat(encrypted_block, 3);
+ auto expected_result = Repeat(one_block_, 3);
+ std::vector<SubsampleEntry> subsamples = {
+ {0, kBlockSize}, {0, kBlockSize}, {0, kBlockSize}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples,
+ EncryptionPattern(1, 0));
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CbcsDecryptorTest, MultipleSubsamplesWithClearBytes) {
+ // Encrypt |one_block_| using kKey and kIv.
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+ DCHECK_EQ(kBlockSize, encrypted_block.size());
+
+ // Combine into alternating clear/encrypted blocks in 3 subsamples. Split
+ // the second and third clear blocks into part of encrypted data of the
+ // previous block (which as a partial block will be considered unencrypted).
+ // data: | clear | encrypted | clear | encrypted | clear | encrypted |
+ // subsamples: | subsample#1 | subsample#2 | subsample#3 |
+ // |uuuuuuu eeeeeeeeeeee|uuuuuu eeeeeeeeeeeeeeee|uu eeeeeeeeeee|
+ auto input_data = Combine({one_block_, encrypted_block, one_block_,
+ encrypted_block, one_block_, encrypted_block});
+ auto expected_result = Repeat(one_block_, 6);
+ std::vector<SubsampleEntry> subsamples = {{kBlockSize, kBlockSize + 1},
+ {kBlockSize - 1, kBlockSize + 10},
+ {kBlockSize - 10, kBlockSize}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples,
+ EncryptionPattern(1, 0));
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/cdm_adapter.cc b/chromium/media/cdm/cdm_adapter.cc
index 322cd988b04..6e9cdfb8c6a 100644
--- a/chromium/media/cdm/cdm_adapter.cc
+++ b/chromium/media/cdm/cdm_adapter.cc
@@ -11,7 +11,6 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -19,6 +18,7 @@
#include "base/trace_event/trace_event.h"
#include "components/crash/core/common/crash_key.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/callback_registry.h"
#include "media/base/cdm_initialized_promise.h"
#include "media/base/cdm_key_information.h"
#include "media/base/channel_layout.h"
@@ -31,9 +31,9 @@
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
+#include "media/base/video_util.h"
#include "media/cdm/cdm_auxiliary_helper.h"
#include "media/cdm/cdm_helpers.h"
-#include "media/cdm/cdm_module.h"
#include "media/cdm/cdm_wrapper.h"
#include "ui/gfx/geometry/rect.h"
#include "url/origin.h"
@@ -49,6 +49,13 @@ constexpr int kSizeKBMin = 1;
constexpr int kSizeKBMax = 512 * 1024; // 512MB
constexpr int kSizeKBBuckets = 100;
+// Only support version 1 of Storage Id. However, the "latest" version can also
+// be requested.
+constexpr uint32_t kRequestLatestStorageIdVersion = 0;
+constexpr uint32_t kCurrentStorageIdVersion = 1;
+static_assert(kCurrentStorageIdVersion < 0x80000000,
+ "Versions 0x80000000 and above are reserved.");
+
cdm::HdcpVersion ToCdmHdcpVersion(HdcpVersion hdcp_version) {
switch (hdcp_version) {
case media::HdcpVersion::kHdcpVersionNone:
@@ -121,29 +128,6 @@ CdmPromise::Exception ToMediaExceptionType(cdm::Exception exception) {
return CdmPromise::Exception::INVALID_STATE_ERROR;
}
-cdm::Exception ToCdmExceptionType(cdm::Error error) {
- switch (error) {
- case cdm::kNotSupportedError:
- return cdm::kExceptionNotSupportedError;
- case cdm::kInvalidStateError:
- return cdm::kExceptionInvalidStateError;
- case cdm::kInvalidAccessError:
- return cdm::kExceptionTypeError;
- case cdm::kQuotaExceededError:
- return cdm::kExceptionQuotaExceededError;
-
- // TODO(jrummell): Remove these once CDM_8 is no longer supported.
- // https://crbug.com/737296.
- case cdm::kUnknownError:
- case cdm::kClientError:
- case cdm::kOutputError:
- return cdm::kExceptionNotSupportedError;
- }
-
- NOTREACHED() << "Unexpected cdm::Error " << error;
- return cdm::kExceptionInvalidStateError;
-}
-
CdmMessageType ToMediaMessageType(cdm::MessageType message_type) {
switch (message_type) {
case cdm::kLicenseRequest:
@@ -338,9 +322,22 @@ cdm::EncryptionScheme ToCdmEncryptionScheme(const EncryptionScheme& scheme) {
return cdm::EncryptionScheme::kCenc;
break;
case EncryptionScheme::CIPHER_MODE_AES_CBC:
- if (scheme.pattern().IsInEffect())
- return cdm::EncryptionScheme::kCbcs;
- break;
+ // Pattern should be required for 'cbcs' but is currently optional.
+ return cdm::EncryptionScheme::kCbcs;
+ }
+
+ NOTREACHED();
+ return cdm::EncryptionScheme::kUnencrypted;
+}
+
+cdm::EncryptionScheme ToCdmEncryptionScheme(const EncryptionMode& mode) {
+ switch (mode) {
+ case EncryptionMode::kUnencrypted:
+ return cdm::EncryptionScheme::kUnencrypted;
+ case EncryptionMode::kCenc:
+ return cdm::EncryptionScheme::kCenc;
+ case EncryptionMode::kCbcs:
+ return cdm::EncryptionScheme::kCbcs;
}
NOTREACHED();
@@ -407,11 +404,13 @@ void ToCdmInputBuffer(const DecoderBuffer& encrypted_buffer,
input_buffer->subsamples = subsamples->data();
input_buffer->num_subsamples = num_subsamples;
- // TODO(crbug.com/658026): Add encryption scheme to DecoderBuffer.
- input_buffer->encryption_scheme = (decrypt_config->is_encrypted())
- ? cdm::EncryptionScheme::kCenc
- : cdm::EncryptionScheme::kUnencrypted;
- input_buffer->pattern = {0, 0};
+ input_buffer->encryption_scheme =
+ ToCdmEncryptionScheme(decrypt_config->encryption_mode());
+ if (decrypt_config->HasPattern()) {
+ input_buffer->pattern = {
+ decrypt_config->encryption_pattern()->crypt_byte_block(),
+ decrypt_config->encryption_pattern()->skip_byte_block()};
+ }
}
void* GetCdmHost(int host_interface_version, void* user_data) {
@@ -419,32 +418,21 @@ void* GetCdmHost(int host_interface_version, void* user_data) {
return nullptr;
static_assert(
- cdm::ContentDecryptionModule::Host::kVersion == cdm::Host_9::kVersion,
- "update the code below");
-
- // Ensure IsSupportedCdmHostVersion matches implementation of this function.
- // Always update this DCHECK when updating this function.
- // If this check fails, update this function and DCHECK or update
- // IsSupportedCdmHostVersion.
-
- // TODO(xhwang): Static assert these at compile time.
- const int kMinVersion = cdm::ContentDecryptionModule_8::kVersion;
- const int kMaxVersion = cdm::ContentDecryptionModule_10::kVersion;
- DCHECK(!IsSupportedCdmInterfaceVersion(kMinVersion - 1));
- for (int version = kMinVersion; version <= kMaxVersion; ++version)
- DCHECK(IsSupportedCdmInterfaceVersion(version));
- DCHECK(!IsSupportedCdmInterfaceVersion(kMaxVersion + 1));
+ CheckSupportedCdmHostVersions(cdm::Host_9::kVersion,
+ cdm::Host_11::kVersion),
+ "Mismatch between GetCdmHost() and IsSupportedCdmHostVersion()");
+
DCHECK(IsSupportedCdmHostVersion(host_interface_version));
CdmAdapter* cdm_adapter = static_cast<CdmAdapter*>(user_data);
DVLOG(1) << "Create CDM Host with version " << host_interface_version;
switch (host_interface_version) {
- case cdm::Host_8::kVersion:
- return static_cast<cdm::Host_8*>(cdm_adapter);
case cdm::Host_9::kVersion:
return static_cast<cdm::Host_9*>(cdm_adapter);
case cdm::Host_10::kVersion:
return static_cast<cdm::Host_10*>(cdm_adapter);
+ case cdm::Host_11::kVersion:
+ return static_cast<cdm::Host_11*>(cdm_adapter);
default:
NOTREACHED() << "Unexpected host interface version "
<< host_interface_version;
@@ -482,6 +470,7 @@ void CdmAdapter::Create(
const std::string& key_system,
const url::Origin& security_origin,
const CdmConfig& cdm_config,
+ CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -495,8 +484,8 @@ void CdmAdapter::Create(
DCHECK(!session_expiration_update_cb.is_null());
scoped_refptr<CdmAdapter> cdm =
- new CdmAdapter(key_system, security_origin, cdm_config, std::move(helper),
- session_message_cb, session_closed_cb,
+ new CdmAdapter(key_system, security_origin, cdm_config, create_cdm_func,
+ std::move(helper), session_message_cb, session_closed_cb,
session_keys_change_cb, session_expiration_update_cb);
// |cdm| ownership passed to the promise.
@@ -507,6 +496,7 @@ CdmAdapter::CdmAdapter(
const std::string& key_system,
const url::Origin& security_origin,
const CdmConfig& cdm_config,
+ CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -515,26 +505,32 @@ CdmAdapter::CdmAdapter(
: key_system_(key_system),
origin_string_(security_origin.Serialize()),
cdm_config_(cdm_config),
+ create_cdm_func_(create_cdm_func),
+ helper_(std::move(helper)),
session_message_cb_(session_message_cb),
session_closed_cb_(session_closed_cb),
session_keys_change_cb_(session_keys_change_cb),
session_expiration_update_cb_(session_expiration_update_cb),
- helper_(std::move(helper)),
task_runner_(base::ThreadTaskRunnerHandle::Get()),
pool_(new AudioBufferMemoryPool()),
weak_factory_(this) {
+ DVLOG(1) << __func__;
+
DCHECK(!key_system_.empty());
- DCHECK(!session_message_cb_.is_null());
- DCHECK(!session_closed_cb_.is_null());
- DCHECK(!session_keys_change_cb_.is_null());
- DCHECK(!session_expiration_update_cb_.is_null());
+ DCHECK(create_cdm_func_);
DCHECK(helper_);
+ DCHECK(session_message_cb_);
+ DCHECK(session_closed_cb_);
+ DCHECK(session_keys_change_cb_);
+ DCHECK(session_expiration_update_cb_);
helper_->SetFileReadCB(
base::Bind(&CdmAdapter::OnFileRead, weak_factory_.GetWeakPtr()));
}
CdmAdapter::~CdmAdapter() {
+ DVLOG(1) << __func__;
+
// Reject any outstanding promises and close all the existing sessions.
cdm_promise_adapter_.Clear();
@@ -547,13 +543,7 @@ CdmAdapter::~CdmAdapter() {
CdmWrapper* CdmAdapter::CreateCdmInstance(const std::string& key_system) {
DCHECK(task_runner_->BelongsToCurrentThread());
- CreateCdmFunc create_cdm_func = CdmModule::GetInstance()->GetCreateCdmFunc();
- if (!create_cdm_func) {
- LOG(ERROR) << "Failed to get CreateCdmFunc!";
- return nullptr;
- }
-
- CdmWrapper* cdm = CdmWrapper::Create(create_cdm_func, key_system.data(),
+ CdmWrapper* cdm = CdmWrapper::Create(create_cdm_func_, key_system.data(),
key_system.size(), GetCdmHost, this);
DVLOG(1) << "CDM instance for " + key_system + (cdm ? "" : " could not be") +
" created.";
@@ -563,14 +553,15 @@ CdmWrapper* CdmAdapter::CreateCdmInstance(const std::string& key_system) {
// instead of a sparse histogram is okay. The following DCHECK asserts this.
DCHECK(cdm->GetInterfaceVersion() <= 30);
UMA_HISTOGRAM_ENUMERATION("Media.EME.CdmInterfaceVersion",
- cdm->GetInterfaceVersion(),
- cdm::ContentDecryptionModule::kVersion + 1);
+ cdm->GetInterfaceVersion(), 30);
}
return cdm;
}
void CdmAdapter::Initialize(std::unique_ptr<media::SimpleCdmPromise> promise) {
+ DVLOG(1) << __func__;
+
cdm_.reset(CreateCdmInstance(key_system_));
if (!cdm_) {
promise->reject(CdmPromise::Exception::INVALID_STATE_ERROR, 0,
@@ -684,8 +675,20 @@ CdmContext* CdmAdapter::GetCdmContext() {
return this;
}
+std::unique_ptr<CallbackRegistration> CdmAdapter::RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) {
+ NOTIMPLEMENTED();
+ return nullptr;
+}
+
Decryptor* CdmAdapter::GetDecryptor() {
DCHECK(task_runner_->BelongsToCurrentThread());
+
+ // When using HW secure codecs, we cannot and should not use the CDM instance
+ // to do decrypt and/or decode. Instead, we should use the CdmProxy.
+ if (cdm_config_.use_hw_secure_codecs)
+ return nullptr;
+
return this;
}
@@ -806,7 +809,7 @@ void CdmAdapter::InitializeVideoDecoder(const VideoDecoderConfig& config,
return;
}
- natural_size_ = config.natural_size();
+ pixel_aspect_ratio_ = config.GetPixelAspectRatio();
if (status == cdm::kDeferredInitialization) {
DVLOG(1) << "Deferred initialization in " << __func__;
@@ -874,8 +877,9 @@ void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
return;
}
- scoped_refptr<VideoFrame> decoded_frame =
- video_frame->TransformToVideoFrame(natural_size_);
+ gfx::Rect visible_rect(video_frame->Size().width, video_frame->Size().height);
+ scoped_refptr<VideoFrame> decoded_frame = video_frame->TransformToVideoFrame(
+ GetNaturalSize(visible_rect, pixel_aspect_ratio_));
if (!decoded_frame) {
DLOG(ERROR) << __func__ << ": TransformToVideoFrame failed.";
video_decode_cb.Run(Decryptor::kError, nullptr);
@@ -901,7 +905,7 @@ void CdmAdapter::DeinitializeDecoder(StreamType stream_type) {
audio_channel_layout_ = CHANNEL_LAYOUT_NONE;
break;
case Decryptor::kVideo:
- natural_size_ = gfx::Size();
+ pixel_aspect_ratio_ = 0.0;
break;
}
}
@@ -972,17 +976,6 @@ void CdmAdapter::OnRejectPromise(uint32_t promise_id,
std::string(error_message, error_message_size));
}
-void CdmAdapter::OnRejectPromise(uint32_t promise_id,
- cdm::Error error,
- uint32_t system_code,
- const char* error_message,
- uint32_t error_message_size) {
- // cdm::Host_8 version. Remove when CDM_8 no longer supported.
- // https://crbug.com/737296.
- OnRejectPromise(promise_id, ToCdmExceptionType(error), system_code,
- error_message, error_message_size);
-}
-
void CdmAdapter::OnSessionMessage(const char* session_id,
uint32_t session_id_size,
cdm::MessageType message_type,
@@ -996,19 +989,6 @@ void CdmAdapter::OnSessionMessage(const char* session_id,
std::vector<uint8_t>(message_ptr, message_ptr + message_size));
}
-void CdmAdapter::OnSessionMessage(const char* session_id,
- uint32_t session_id_size,
- cdm::MessageType message_type,
- const char* message,
- uint32_t message_size,
- const char* /* legacy_destination_url */,
- uint32_t /* legacy_destination_url_size */) {
- // cdm::Host_8 version. Remove when CDM_8 no longer supported.
- // https://crbug.com/737296.
- OnSessionMessage(session_id, session_id_size, message_type, message,
- message_size);
-}
-
void CdmAdapter::OnSessionKeysChange(const char* session_id,
uint32_t session_id_size,
bool has_additional_usable_key,
@@ -1054,23 +1034,20 @@ void CdmAdapter::OnSessionClosed(const char* session_id,
session_closed_cb_.Run(std::string(session_id, session_id_size));
}
-void CdmAdapter::OnLegacySessionError(const char* session_id,
- uint32_t session_id_size,
- cdm::Error error,
- uint32_t system_code,
- const char* error_message,
- uint32_t error_message_size) {
- // cdm::Host_8 version. Remove when CDM_8 no longer supported.
- // https://crbug.com/737296.
- DCHECK(task_runner_->BelongsToCurrentThread());
-}
-
void CdmAdapter::SendPlatformChallenge(const char* service_id,
uint32_t service_id_size,
const char* challenge,
uint32_t challenge_size) {
DCHECK(task_runner_->BelongsToCurrentThread());
+ if (!cdm_config_.allow_distinctive_identifier) {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindRepeating(&CdmAdapter::OnChallengePlatformDone,
+ weak_factory_.GetWeakPtr(), false, "", "", ""));
+ return;
+ }
+
helper_->ChallengePlatform(std::string(service_id, service_id_size),
std::string(challenge, challenge_size),
base::Bind(&CdmAdapter::OnChallengePlatformDone,
@@ -1213,13 +1190,25 @@ cdm::FileIO* CdmAdapter::CreateFileIO(cdm::FileIOClient* client) {
DVLOG(3) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
+ if (!cdm_config_.allow_persistent_state) {
+ DVLOG(1) << __func__ << ": Persistent state not allowed.";
+ return nullptr;
+ }
+
return helper_->CreateCdmFileIO(client);
}
void CdmAdapter::RequestStorageId(uint32_t version) {
- if (version >= 0x80000000) {
- // Versions 0x80000000 and above are reserved.
- cdm_->OnStorageId(version, nullptr, 0);
+ if (!cdm_config_.allow_persistent_state ||
+ !(version == kCurrentStorageIdVersion ||
+ version == kRequestLatestStorageIdVersion)) {
+ DVLOG(1) << __func__ << ": Persistent state not allowed ("
+ << cdm_config_.allow_persistent_state
+ << ") or invalid storage ID version (" << version << ").";
+ task_runner_->PostTask(
+ FROM_HERE, base::BindRepeating(&CdmAdapter::OnStorageIdObtained,
+ weak_factory_.GetWeakPtr(), version,
+ std::vector<uint8_t>()));
return;
}
diff --git a/chromium/media/cdm/cdm_adapter.h b/chromium/media/cdm/cdm_adapter.h
index 0e5433159f1..eb26e036b5e 100644
--- a/chromium/media/cdm/cdm_adapter.h
+++ b/chromium/media/cdm/cdm_adapter.h
@@ -39,10 +39,16 @@ class CdmWrapper;
class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
public CdmContext,
public Decryptor,
- public cdm::Host_8,
public cdm::Host_9,
- public cdm::Host_10 {
+ public cdm::Host_10,
+ public cdm::Host_11 {
public:
+ using CreateCdmFunc = void* (*)(int cdm_interface_version,
+ const char* key_system,
+ uint32_t key_system_size,
+ GetCdmHostFunc get_cdm_host_func,
+ void* user_data);
+
// Creates the CDM and initialize it using |key_system| and |cdm_config|.
// |allocator| is to be used whenever the CDM needs memory and to create
// VideoFrames. |file_io_provider| is to be used whenever the CDM needs access
@@ -52,6 +58,7 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
const std::string& key_system,
const url::Origin& security_origin,
const CdmConfig& cdm_config,
+ CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -86,6 +93,8 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
CdmContext* GetCdmContext() final;
// CdmContext implementation.
+ std::unique_ptr<CallbackRegistration> RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) final;
Decryptor* GetDecryptor() final;
int GetCdmId() const final;
@@ -150,32 +159,15 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
// cdm::Host_10 specific implementation.
void OnInitialized(bool success) override;
- cdm::CdmProxy* RequestCdmProxy(cdm::CdmProxyClient* client) override;
- // cdm::Host_8 specific implementation.
- void OnRejectPromise(uint32_t promise_id,
- cdm::Error error,
- uint32_t system_code,
- const char* error_message,
- uint32_t error_message_size) override;
- void OnSessionMessage(const char* session_id,
- uint32_t session_id_size,
- cdm::MessageType message_type,
- const char* message,
- uint32_t message_size,
- const char* legacy_destination_url,
- uint32_t legacy_destination_url_size) override;
- void OnLegacySessionError(const char* session_id,
- uint32_t session_id_size,
- cdm::Error error,
- uint32_t system_code,
- const char* error_message,
- uint32_t error_message_size) override;
+ // cdm::Host_11 specific implementation.
+ cdm::CdmProxy* RequestCdmProxy(cdm::CdmProxyClient* client) override;
private:
CdmAdapter(const std::string& key_system,
const url::Origin& security_origin,
const CdmConfig& cdm_config,
+ CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -228,15 +220,17 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
const std::string origin_string_;
const CdmConfig cdm_config_;
+ CreateCdmFunc create_cdm_func_;
+
+ // Helper that provides additional functionality for the CDM.
+ std::unique_ptr<CdmAuxiliaryHelper> helper_;
+
// Callbacks for firing session events.
SessionMessageCB session_message_cb_;
SessionClosedCB session_closed_cb_;
SessionKeysChangeCB session_keys_change_cb_;
SessionExpirationUpdateCB session_expiration_update_cb_;
- // Helper that provides additional functionality for the CDM.
- std::unique_ptr<CdmAuxiliaryHelper> helper_;
-
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
scoped_refptr<AudioBufferMemoryPool> pool_;
@@ -255,9 +249,8 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
int audio_samples_per_second_ = 0;
ChannelLayout audio_channel_layout_ = CHANNEL_LAYOUT_NONE;
- // Keep track of video frame natural size from the latest configuration
- // as the CDM doesn't provide it.
- gfx::Size natural_size_;
+ // Keep track of aspect ratio from the latest configuration.
+ double pixel_aspect_ratio_ = 0.0;
// Tracks whether an output protection query and a positive query result (no
// unprotected external link) have been reported to UMA.
diff --git a/chromium/media/cdm/cdm_adapter_factory.cc b/chromium/media/cdm/cdm_adapter_factory.cc
index 379576ee45f..8826fa5ec12 100644
--- a/chromium/media/cdm/cdm_adapter_factory.cc
+++ b/chromium/media/cdm/cdm_adapter_factory.cc
@@ -8,6 +8,7 @@
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/cdm_factory.h"
#include "media/cdm/cdm_adapter.h"
+#include "media/cdm/cdm_module.h"
#include "url/origin.h"
namespace media {
@@ -28,7 +29,7 @@ void CdmAdapterFactory::Create(
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
const CdmCreatedCB& cdm_created_cb) {
- DVLOG(1) << __FUNCTION__ << ": key_system=" << key_system;
+ DVLOG(1) << __func__ << ": key_system=" << key_system;
if (security_origin.unique()) {
LOG(ERROR) << "Invalid Origin: " << security_origin;
@@ -37,6 +38,15 @@ void CdmAdapterFactory::Create(
return;
}
+ CdmAdapter::CreateCdmFunc create_cdm_func =
+ CdmModule::GetInstance()->GetCreateCdmFunc();
+ if (!create_cdm_func) {
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(cdm_created_cb, nullptr, "CreateCdmFunc not available."));
+ return;
+ }
+
std::unique_ptr<CdmAuxiliaryHelper> cdm_helper = helper_creation_cb_.Run();
if (!cdm_helper) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
@@ -45,7 +55,7 @@ void CdmAdapterFactory::Create(
return;
}
- CdmAdapter::Create(key_system, security_origin, cdm_config,
+ CdmAdapter::Create(key_system, security_origin, cdm_config, create_cdm_func,
std::move(cdm_helper), session_message_cb,
session_closed_cb, session_keys_change_cb,
session_expiration_update_cb, cdm_created_cb);
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index 0ced276cdf4..8ca37d995f8 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -8,10 +8,11 @@
#include <memory>
#include "base/bind.h"
+#include "base/command_line.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_feature_list.h"
+#include "base/strings/string_number_conversions.h"
#include "base/test/scoped_task_environment.h"
#include "media/base/cdm_callback_promise.h"
#include "media/base/cdm_key_information.h"
@@ -21,12 +22,19 @@
#include "media/cdm/api/content_decryption_module.h"
#include "media/cdm/cdm_module.h"
#include "media/cdm/external_clear_key_test_helper.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/mock_library_cdm.h"
#include "media/cdm/mock_helpers.h"
#include "media/cdm/simple_cdm_allocator.h"
#include "media/media_buildflags.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+using ::testing::Invoke;
+using ::testing::IsNull;
+using ::testing::NotNull;
+using ::testing::Return;
+using ::testing::Values;
using ::testing::SaveArg;
using ::testing::StrictMock;
using ::testing::_;
@@ -39,6 +47,11 @@ MATCHER(IsNullTime, "") {
return arg.is_null();
}
+MATCHER(IsNullPlatformChallengeResponse, "") {
+ // Only check the |signed_data| for simplicity.
+ return !arg.signed_data;
+}
+
// TODO(jrummell): These tests are a subset of those in aes_decryptor_unittest.
// Refactor aes_decryptor_unittest.cc to handle AesDecryptor directly and
// via CdmAdapter once CdmAdapter supports decrypting functionality. There
@@ -47,6 +60,8 @@ MATCHER(IsNullTime, "") {
namespace media {
+namespace {
+
// Random key ID used to create a session.
const uint8_t kKeyId[] = {
// base64 equivalent is AQIDBAUGBwgJCgsMDQ4PEA
@@ -84,42 +99,50 @@ const char kKeyAsJWK[] =
" \"type\": \"temporary\""
"}";
-// Tests CdmAdapter with the following parameter:
-// - bool: whether experimental CDM interface should be enabled.
-class CdmAdapterTest : public testing::Test,
- public testing::WithParamInterface<bool> {
+class MockFileIOClient : public cdm::FileIOClient {
public:
- enum ExpectedResult { SUCCESS, FAILURE };
+ MockFileIOClient() = default;
+ ~MockFileIOClient() override = default;
- bool UseExperimentalCdmInterface() { return GetParam(); }
+ MOCK_METHOD1(OnOpenComplete, void(Status));
+ MOCK_METHOD3(OnReadComplete, void(Status, const uint8_t*, uint32_t));
+ MOCK_METHOD1(OnWriteComplete, void(Status));
+};
- CdmAdapterTest() {
- // Enable use of External Clear Key CDM.
- if (UseExperimentalCdmInterface()) {
- scoped_feature_list_.InitWithFeatures(
- {media::kSupportExperimentalCdmInterface}, {});
- }
+} // namespace
-#if BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
- CdmModule::GetInstance()->Initialize(helper_.LibraryPath(), {});
-#else
- CdmModule::GetInstance()->Initialize(helper_.LibraryPath());
-#endif // BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
+// Tests CdmAdapter with the following parameter:
+// - int: CDM interface version to test.
+class CdmAdapterTestBase : public testing::Test,
+ public testing::WithParamInterface<int> {
+ public:
+ enum ExpectedResult { SUCCESS, FAILURE };
+
+ CdmAdapterTestBase() {
+ base::CommandLine::ForCurrentProcess()->AppendSwitchASCII(
+ switches::kOverrideEnabledCdmInterfaceVersion,
+ base::IntToString(GetCdmInterfaceVersion()));
}
- ~CdmAdapterTest() override { CdmModule::ResetInstanceForTesting(); }
+ ~CdmAdapterTestBase() override { CdmModule::ResetInstanceForTesting(); }
protected:
+ virtual std::string GetKeySystemName() = 0;
+ virtual CdmAdapter::CreateCdmFunc GetCreateCdmFunc() = 0;
+
+ int GetCdmInterfaceVersion() { return GetParam(); }
+
// Initializes the adapter. |expected_result| tests that the call succeeds
// or generates an error.
- void InitializeAndExpect(ExpectedResult expected_result) {
- CdmConfig cdm_config; // default settings of false are sufficient.
+ void InitializeWithCdmConfigAndExpect(const CdmConfig& cdm_config,
+ ExpectedResult expected_result) {
std::unique_ptr<CdmAllocator> allocator(new SimpleCdmAllocator());
- std::unique_ptr<CdmAuxiliaryHelper> cdm_helper(
- new MockCdmAuxiliaryHelper(std::move(allocator)));
- CdmAdapter::Create(helper_.KeySystemName(),
+ std::unique_ptr<StrictMock<MockCdmAuxiliaryHelper>> cdm_helper(
+ new StrictMock<MockCdmAuxiliaryHelper>(std::move(allocator)));
+ cdm_helper_ = cdm_helper.get();
+ CdmAdapter::Create(GetKeySystemName(),
url::Origin::Create(GURL("http://foo.com")), cdm_config,
- std::move(cdm_helper),
+ GetCreateCdmFunc(), std::move(cdm_helper),
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionClosed,
@@ -128,11 +151,71 @@ class CdmAdapterTest : public testing::Test,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionExpirationUpdate,
base::Unretained(&cdm_client_)),
- base::Bind(&CdmAdapterTest::OnCdmCreated,
+ base::Bind(&CdmAdapterTestBase::OnCdmCreated,
base::Unretained(this), expected_result));
RunUntilIdle();
+ ASSERT_EQ(expected_result == SUCCESS, !!cdm_);
+ }
+
+ void InitializeAndExpect(ExpectedResult expected_result) {
+ // Default settings of false are sufficient for most tests.
+ CdmConfig cdm_config;
+ InitializeWithCdmConfigAndExpect(cdm_config, expected_result);
+ }
+
+ void OnCdmCreated(ExpectedResult expected_result,
+ const scoped_refptr<ContentDecryptionModule>& cdm,
+ const std::string& error_message) {
+ if (cdm) {
+ ASSERT_EQ(expected_result, SUCCESS)
+ << "CDM creation succeeded unexpectedly.";
+ CdmAdapter* cdm_adapter = static_cast<CdmAdapter*>(cdm.get());
+ ASSERT_EQ(GetCdmInterfaceVersion(), cdm_adapter->GetInterfaceVersion());
+ cdm_ = cdm;
+ } else {
+ ASSERT_EQ(expected_result, FAILURE) << error_message;
+ }
+ }
+
+ void RunUntilIdle() { scoped_task_environment_.RunUntilIdle(); }
+
+ StrictMock<MockCdmClient> cdm_client_;
+ StrictMock<MockCdmAuxiliaryHelper>* cdm_helper_ = nullptr;
+
+ // Keep track of the loaded CDM.
+ scoped_refptr<ContentDecryptionModule> cdm_;
+
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CdmAdapterTestBase);
+};
+
+class CdmAdapterTestWithClearKeyCdm : public CdmAdapterTestBase {
+ public:
+ ~CdmAdapterTestWithClearKeyCdm() {
+ // Clear |cdm_| before we destroy |helper_|.
+ cdm_ = nullptr;
+ RunUntilIdle();
}
+ void SetUp() override {
+ CdmAdapterTestBase::SetUp();
+
+#if BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
+ CdmModule::GetInstance()->Initialize(helper_.LibraryPath(), {});
+#else
+ CdmModule::GetInstance()->Initialize(helper_.LibraryPath());
+#endif // BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
+ }
+
+ // CdmAdapterTestBase implementation.
+ std::string GetKeySystemName() override { return helper_.KeySystemName(); }
+ CdmAdapter::CreateCdmFunc GetCreateCdmFunc() override {
+ return CdmModule::GetInstance()->GetCreateCdmFunc();
+ }
+
+ protected:
// Creates a new session using |key_id|. |session_id_| will be set
// when the promise is resolved. |expected_result| tests that
// CreateSessionAndGenerateRequest() succeeds or generates an error.
@@ -191,23 +274,13 @@ class CdmAdapterTest : public testing::Test,
std::string SessionId() { return session_id_; }
private:
- void OnCdmCreated(ExpectedResult expected_result,
- const scoped_refptr<ContentDecryptionModule>& cdm,
- const std::string& error_message) {
- if (cdm) {
- ASSERT_EQ(expected_result, SUCCESS)
- << "CDM creation succeeded unexpectedly.";
-
- CdmAdapter* cdm_adapter = static_cast<CdmAdapter*>(cdm.get());
-
- ASSERT_EQ(UseExperimentalCdmInterface(),
- cdm_adapter->GetInterfaceVersion() >
- cdm::ContentDecryptionModule::kVersion);
- cdm_ = cdm;
- } else {
- ASSERT_EQ(expected_result, FAILURE) << error_message;
- }
- }
+ // Methods used for promise resolved/rejected.
+ MOCK_METHOD0(OnResolve, void());
+ MOCK_METHOD1(OnResolveWithSession, void(const std::string& session_id));
+ MOCK_METHOD3(OnReject,
+ void(CdmPromise::Exception exception_code,
+ uint32_t system_code,
+ const std::string& error_message));
// Create a promise. |expected_result| is used to indicate how the promise
// should be fulfilled.
@@ -220,8 +293,10 @@ class CdmAdapterTest : public testing::Test,
}
std::unique_ptr<SimpleCdmPromise> promise(new CdmCallbackPromise<>(
- base::Bind(&CdmAdapterTest::OnResolve, base::Unretained(this)),
- base::Bind(&CdmAdapterTest::OnReject, base::Unretained(this))));
+ base::Bind(&CdmAdapterTestWithClearKeyCdm::OnResolve,
+ base::Unretained(this)),
+ base::Bind(&CdmAdapterTestWithClearKeyCdm::OnReject,
+ base::Unretained(this))));
return promise;
}
@@ -238,52 +313,69 @@ class CdmAdapterTest : public testing::Test,
std::unique_ptr<NewSessionCdmPromise> promise(
new CdmCallbackPromise<std::string>(
- base::Bind(&CdmAdapterTest::OnResolveWithSession,
+ base::Bind(&CdmAdapterTestWithClearKeyCdm::OnResolveWithSession,
base::Unretained(this)),
- base::Bind(&CdmAdapterTest::OnReject, base::Unretained(this))));
+ base::Bind(&CdmAdapterTestWithClearKeyCdm::OnReject,
+ base::Unretained(this))));
return promise;
}
- void RunUntilIdle() { base::RunLoop().RunUntilIdle(); }
-
- // Methods used for promise resolved/rejected.
- MOCK_METHOD0(OnResolve, void());
- MOCK_METHOD1(OnResolveWithSession, void(const std::string& session_id));
- MOCK_METHOD3(OnReject,
- void(CdmPromise::Exception exception_code,
- uint32_t system_code,
- const std::string& error_message));
-
- StrictMock<MockCdmClient> cdm_client_;
-
- // Helper class to load/unload External Clear Key Library.
+ // Helper class to load/unload Clear Key CDM Library.
+ // TODO(xhwang): CdmModule does CDM loading/unloading by itself. So it seems
+ // we don't need to use ExternalClearKeyTestHelper. Simplify this if possible.
ExternalClearKeyTestHelper helper_;
- // Keep track of the loaded CDM.
- scoped_refptr<ContentDecryptionModule> cdm_;
-
// |session_id_| is the latest result of calling CreateSession().
std::string session_id_;
+};
- base::test::ScopedTaskEnvironment scoped_task_environment_;
- base::test::ScopedFeatureList scoped_feature_list_;
+class CdmAdapterTestWithMockCdm : public CdmAdapterTestBase {
+ public:
+ ~CdmAdapterTestWithMockCdm() override {
+ // Makes sure Destroy() is called on CdmAdapter destruction.
+ EXPECT_CALL(*mock_library_cdm_, DestroyCalled());
+ cdm_ = nullptr;
+ RunUntilIdle();
+ }
- DISALLOW_COPY_AND_ASSIGN(CdmAdapterTest);
+ // CdmAdapterTestBase implementation.
+ std::string GetKeySystemName() override { return "x-com.mock"; }
+ CdmAdapter::CreateCdmFunc GetCreateCdmFunc() override {
+ return CreateMockLibraryCdm;
+ }
+
+ protected:
+ void InitializeWithCdmConfig(const CdmConfig& cdm_config) {
+ // TODO(xhwang): Add tests for failure cases.
+ InitializeWithCdmConfigAndExpect(cdm_config, SUCCESS);
+ mock_library_cdm_ = MockLibraryCdm::GetInstance();
+ ASSERT_TRUE(mock_library_cdm_);
+ cdm_host_proxy_ = mock_library_cdm_->GetCdmHostProxy();
+ ASSERT_TRUE(cdm_host_proxy_);
+ }
+
+ MockLibraryCdm* mock_library_cdm_ = nullptr;
+ CdmHostProxy* cdm_host_proxy_ = nullptr;
};
-INSTANTIATE_TEST_CASE_P(StableCdmInterface,
- CdmAdapterTest,
- testing::Values(false));
+// Instantiate test cases
+
+INSTANTIATE_TEST_CASE_P(CDM_9, CdmAdapterTestWithClearKeyCdm, Values(9));
+INSTANTIATE_TEST_CASE_P(CDM_10, CdmAdapterTestWithClearKeyCdm, Values(10));
+INSTANTIATE_TEST_CASE_P(CDM_11, CdmAdapterTestWithClearKeyCdm, Values(11));
+
+INSTANTIATE_TEST_CASE_P(CDM_9, CdmAdapterTestWithMockCdm, Values(9));
+INSTANTIATE_TEST_CASE_P(CDM_10, CdmAdapterTestWithMockCdm, Values(10));
+INSTANTIATE_TEST_CASE_P(CDM_11, CdmAdapterTestWithMockCdm, Values(11));
-INSTANTIATE_TEST_CASE_P(ExperimentalCdmInterface,
- CdmAdapterTest,
- testing::Values(true));
+// CdmAdapterTestWithClearKeyCdm Tests
-TEST_P(CdmAdapterTest, Initialize) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, Initialize) {
InitializeAndExpect(SUCCESS);
}
-TEST_P(CdmAdapterTest, BadLibraryPath) {
+// TODO(xhwang): This belongs to CdmModuleTest.
+TEST_P(CdmAdapterTestWithClearKeyCdm, BadLibraryPath) {
CdmModule::ResetInstanceForTesting();
#if BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
@@ -294,17 +386,17 @@ TEST_P(CdmAdapterTest, BadLibraryPath) {
base::FilePath(FILE_PATH_LITERAL("no_library_here")));
#endif // BUILDFLAG(ENABLE_CDM_HOST_VERIFICATION)
- InitializeAndExpect(FAILURE);
+ ASSERT_FALSE(GetCreateCdmFunc());
}
-TEST_P(CdmAdapterTest, CreateWebmSession) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, CreateWebmSession) {
InitializeAndExpect(SUCCESS);
std::vector<uint8_t> key_id(kKeyId, kKeyId + arraysize(kKeyId));
CreateSessionAndExpect(EmeInitDataType::WEBM, key_id, SUCCESS);
}
-TEST_P(CdmAdapterTest, CreateKeyIdsSession) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, CreateKeyIdsSession) {
InitializeAndExpect(SUCCESS);
// Don't include the trailing /0 from the string in the data passed in.
@@ -313,7 +405,7 @@ TEST_P(CdmAdapterTest, CreateKeyIdsSession) {
CreateSessionAndExpect(EmeInitDataType::KEYIDS, key_id, SUCCESS);
}
-TEST_P(CdmAdapterTest, CreateCencSession) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, CreateCencSession) {
InitializeAndExpect(SUCCESS);
std::vector<uint8_t> key_id(kKeyIdAsPssh,
@@ -321,7 +413,7 @@ TEST_P(CdmAdapterTest, CreateCencSession) {
CreateSessionAndExpect(EmeInitDataType::CENC, key_id, SUCCESS);
}
-TEST_P(CdmAdapterTest, CreateSessionWithBadData) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, CreateSessionWithBadData) {
InitializeAndExpect(SUCCESS);
// Use |kKeyId| but specify KEYIDS format.
@@ -329,7 +421,7 @@ TEST_P(CdmAdapterTest, CreateSessionWithBadData) {
CreateSessionAndExpect(EmeInitDataType::KEYIDS, key_id, FAILURE);
}
-TEST_P(CdmAdapterTest, LoadSession) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, LoadSession) {
InitializeAndExpect(SUCCESS);
// LoadSession() is not supported by AesDecryptor.
@@ -337,7 +429,7 @@ TEST_P(CdmAdapterTest, LoadSession) {
CreateSessionAndExpect(EmeInitDataType::KEYIDS, key_id, FAILURE);
}
-TEST_P(CdmAdapterTest, UpdateSession) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, UpdateSession) {
InitializeAndExpect(SUCCESS);
std::vector<uint8_t> key_id(kKeyId, kKeyId + arraysize(kKeyId));
@@ -346,7 +438,7 @@ TEST_P(CdmAdapterTest, UpdateSession) {
UpdateSessionAndExpect(SessionId(), kKeyAsJWK, SUCCESS, true);
}
-TEST_P(CdmAdapterTest, UpdateSessionWithBadData) {
+TEST_P(CdmAdapterTestWithClearKeyCdm, UpdateSessionWithBadData) {
InitializeAndExpect(SUCCESS);
std::vector<uint8_t> key_id(kKeyId, kKeyId + arraysize(kKeyId));
@@ -355,4 +447,126 @@ TEST_P(CdmAdapterTest, UpdateSessionWithBadData) {
UpdateSessionAndExpect(SessionId(), "random data", FAILURE, true);
}
+// CdmAdapterTestWithMockCdm Tests
+
+// ChallengePlatform() will ask the helper to send platform challenge.
+TEST_P(CdmAdapterTestWithMockCdm, ChallengePlatform) {
+ CdmConfig cdm_config;
+ cdm_config.allow_distinctive_identifier = true;
+ InitializeWithCdmConfig(cdm_config);
+
+ std::string service_id = "service_id";
+ std::string challenge = "challenge";
+ EXPECT_CALL(*cdm_helper_, ChallengePlatformCalled(service_id, challenge))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_library_cdm_, OnPlatformChallengeResponse(_));
+ cdm_host_proxy_->SendPlatformChallenge(service_id.data(), service_id.size(),
+ challenge.data(), challenge.size());
+ RunUntilIdle();
+}
+
+// ChallengePlatform() will always fail if |allow_distinctive_identifier| is
+// false.
+TEST_P(CdmAdapterTestWithMockCdm,
+ ChallengePlatform_DistinctiveIdentifierNotAllowed) {
+ CdmConfig cdm_config;
+ InitializeWithCdmConfig(cdm_config);
+
+ EXPECT_CALL(*mock_library_cdm_,
+ OnPlatformChallengeResponse(IsNullPlatformChallengeResponse()));
+ std::string service_id = "service_id";
+ std::string challenge = "challenge";
+ cdm_host_proxy_->SendPlatformChallenge(service_id.data(), service_id.size(),
+ challenge.data(), challenge.size());
+ RunUntilIdle();
+}
+
+// CreateFileIO() will ask helper to create FileIO.
+TEST_P(CdmAdapterTestWithMockCdm, CreateFileIO) {
+ CdmConfig cdm_config;
+ cdm_config.allow_persistent_state = true;
+ InitializeWithCdmConfig(cdm_config);
+
+ MockFileIOClient file_io_client;
+ EXPECT_CALL(*cdm_helper_, CreateCdmFileIO(_));
+ cdm_host_proxy_->CreateFileIO(&file_io_client);
+ RunUntilIdle();
+}
+
+// CreateFileIO() will always fail if |allow_persistent_state| is false.
+TEST_P(CdmAdapterTestWithMockCdm, CreateFileIO_PersistentStateNotAllowed) {
+ CdmConfig cdm_config;
+ InitializeWithCdmConfig(cdm_config);
+
+ // When |allow_persistent_state| is false, should return null immediately
+ // without asking helper to create FileIO.
+ MockFileIOClient file_io_client;
+ ASSERT_FALSE(cdm_host_proxy_->CreateFileIO(&file_io_client));
+ RunUntilIdle();
+}
+
+// RequestStorageId() with version 0 (latest) is supported.
+TEST_P(CdmAdapterTestWithMockCdm, RequestStorageId_Version_0) {
+ CdmConfig cdm_config;
+ cdm_config.allow_persistent_state = true;
+ InitializeWithCdmConfig(cdm_config);
+
+ std::vector<uint8_t> storage_id = {1, 2, 3};
+ EXPECT_CALL(*cdm_helper_, GetStorageIdCalled(0)).WillOnce(Return(storage_id));
+ EXPECT_CALL(*mock_library_cdm_, OnStorageId(0, NotNull(), 3));
+ cdm_host_proxy_->RequestStorageId(0);
+ RunUntilIdle();
+}
+
+// RequestStorageId() with version 1 is supported.
+TEST_P(CdmAdapterTestWithMockCdm, RequestStorageId_Version_1) {
+ CdmConfig cdm_config;
+ cdm_config.allow_persistent_state = true;
+ InitializeWithCdmConfig(cdm_config);
+
+ std::vector<uint8_t> storage_id = {1, 2, 3};
+ EXPECT_CALL(*cdm_helper_, GetStorageIdCalled(1)).WillOnce(Return(storage_id));
+ EXPECT_CALL(*mock_library_cdm_, OnStorageId(1, NotNull(), 3));
+ cdm_host_proxy_->RequestStorageId(1);
+ RunUntilIdle();
+}
+
+// RequestStorageId() with version 2 is not supported.
+TEST_P(CdmAdapterTestWithMockCdm, RequestStorageId_Version_2) {
+ CdmConfig cdm_config;
+ cdm_config.allow_persistent_state = true;
+ InitializeWithCdmConfig(cdm_config);
+
+ EXPECT_CALL(*mock_library_cdm_, OnStorageId(2, IsNull(), 0));
+ cdm_host_proxy_->RequestStorageId(2);
+ RunUntilIdle();
+}
+
+// RequestStorageId() will always fail if |allow_persistent_state| is false.
+TEST_P(CdmAdapterTestWithMockCdm, RequestStorageId_PersistentStateNotAllowed) {
+ CdmConfig cdm_config;
+ InitializeWithCdmConfig(cdm_config);
+
+ EXPECT_CALL(*mock_library_cdm_, OnStorageId(1, IsNull(), 0));
+ cdm_host_proxy_->RequestStorageId(1);
+ RunUntilIdle();
+}
+
+TEST_P(CdmAdapterTestWithMockCdm, GetDecryptor) {
+ CdmConfig cdm_config;
+ InitializeWithCdmConfig(cdm_config);
+ auto* cdm_context = cdm_->GetCdmContext();
+ ASSERT_TRUE(cdm_context);
+ EXPECT_TRUE(cdm_context->GetDecryptor());
+}
+
+TEST_P(CdmAdapterTestWithMockCdm, GetDecryptor_UseHwSecureCodecs) {
+ CdmConfig cdm_config;
+ cdm_config.use_hw_secure_codecs = true;
+ InitializeWithCdmConfig(cdm_config);
+ auto* cdm_context = cdm_->GetCdmContext();
+ ASSERT_TRUE(cdm_context);
+ EXPECT_FALSE(cdm_context->GetDecryptor());
+}
+
} // namespace media
diff --git a/chromium/media/cdm/cdm_wrapper.h b/chromium/media/cdm/cdm_wrapper.h
index 1d35d814279..34f13a742ae 100644
--- a/chromium/media/cdm/cdm_wrapper.h
+++ b/chromium/media/cdm/cdm_wrapper.h
@@ -21,13 +21,9 @@ namespace media {
namespace {
-bool IsExperimentalCdmInterfaceSupported() {
- return base::FeatureList::IsEnabled(media::kSupportExperimentalCdmInterface);
-}
-
bool IsEncryptionSchemeSupportedByLegacyCdms(
const cdm::EncryptionScheme& scheme) {
- // CDM_8 and CDM_9 don't check the encryption scheme, so do it here.
+ // CDM_9 don't check the encryption scheme, so do it here.
return scheme == cdm::EncryptionScheme::kUnencrypted ||
scheme == cdm::EncryptionScheme::kCenc;
}
@@ -174,27 +170,32 @@ class CdmWrapper {
// Template class that does the CdmWrapper -> CdmInterface conversion. Default
// implementations are provided. Any methods that need special treatment should
// be specialized.
-template <class CdmInterface>
+template <int CdmInterfaceVersion>
class CdmWrapperImpl : public CdmWrapper {
public:
+ using CdmInterface =
+ typename CdmInterfaceTraits<CdmInterfaceVersion>::CdmInterface;
+ static_assert(CdmInterfaceVersion == CdmInterface::kVersion,
+ "CDM interface version mismatch.");
+
static CdmWrapper* Create(CreateCdmFunc create_cdm_func,
const char* key_system,
uint32_t key_system_size,
GetCdmHostFunc get_cdm_host_func,
void* user_data) {
void* cdm_instance =
- create_cdm_func(CdmInterface::kVersion, key_system, key_system_size,
+ create_cdm_func(CdmInterfaceVersion, key_system, key_system_size,
get_cdm_host_func, user_data);
if (!cdm_instance)
return nullptr;
- return new CdmWrapperImpl<CdmInterface>(
+ return new CdmWrapperImpl<CdmInterfaceVersion>(
static_cast<CdmInterface*>(cdm_instance));
}
~CdmWrapperImpl() override { cdm_->Destroy(); }
- int GetInterfaceVersion() override { return CdmInterface::kVersion; }
+ int GetInterfaceVersion() override { return CdmInterfaceVersion; }
bool Initialize(bool allow_distinctive_identifier,
bool allow_persistent_state,
@@ -304,7 +305,7 @@ class CdmWrapperImpl : public CdmWrapper {
void OnStorageId(uint32_t version,
const uint8_t* storage_id,
- uint32_t storage_id_size) {
+ uint32_t storage_id_size) override {
cdm_->OnStorageId(version, storage_id, storage_id_size);
}
@@ -320,17 +321,15 @@ class CdmWrapperImpl : public CdmWrapper {
// TODO(crbug.com/799219): Remove when CDM_9 no longer supported.
template <>
-bool CdmWrapperImpl<cdm::ContentDecryptionModule_9>::Initialize(
- bool allow_distinctive_identifier,
- bool allow_persistent_state,
- bool /* use_hw_secure_codecs*/) {
+bool CdmWrapperImpl<9>::Initialize(bool allow_distinctive_identifier,
+ bool allow_persistent_state,
+ bool /* use_hw_secure_codecs*/) {
cdm_->Initialize(allow_distinctive_identifier, allow_persistent_state);
return false;
}
template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_9>::InitializeAudioDecoder(
+cdm::Status CdmWrapperImpl<9>::InitializeAudioDecoder(
const cdm::AudioDecoderConfig_2& audio_decoder_config) {
if (!IsEncryptionSchemeSupportedByLegacyCdms(
audio_decoder_config.encryption_scheme))
@@ -341,8 +340,7 @@ CdmWrapperImpl<cdm::ContentDecryptionModule_9>::InitializeAudioDecoder(
}
template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_9>::InitializeVideoDecoder(
+cdm::Status CdmWrapperImpl<9>::InitializeVideoDecoder(
const cdm::VideoDecoderConfig_2& video_decoder_config) {
if (!IsEncryptionSchemeSupportedByLegacyCdms(
video_decoder_config.encryption_scheme))
@@ -353,7 +351,7 @@ CdmWrapperImpl<cdm::ContentDecryptionModule_9>::InitializeVideoDecoder(
}
template <>
-cdm::Status CdmWrapperImpl<cdm::ContentDecryptionModule_9>::Decrypt(
+cdm::Status CdmWrapperImpl<9>::Decrypt(
const cdm::InputBuffer_2& encrypted_buffer,
cdm::DecryptedBlock* decrypted_buffer) {
if (!IsEncryptionSchemeSupportedByLegacyCdms(
@@ -364,8 +362,7 @@ cdm::Status CdmWrapperImpl<cdm::ContentDecryptionModule_9>::Decrypt(
}
template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_9>::DecryptAndDecodeFrame(
+cdm::Status CdmWrapperImpl<9>::DecryptAndDecodeFrame(
const cdm::InputBuffer_2& encrypted_buffer,
cdm::VideoFrame* video_frame) {
if (!IsEncryptionSchemeSupportedByLegacyCdms(
@@ -377,94 +374,7 @@ CdmWrapperImpl<cdm::ContentDecryptionModule_9>::DecryptAndDecodeFrame(
}
template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_9>::DecryptAndDecodeSamples(
- const cdm::InputBuffer_2& encrypted_buffer,
- cdm::AudioFrames* audio_frames) {
- if (!IsEncryptionSchemeSupportedByLegacyCdms(
- encrypted_buffer.encryption_scheme))
- return cdm::kDecryptError;
-
- return cdm_->DecryptAndDecodeSamples(ToInputBuffer_1(encrypted_buffer),
- audio_frames);
-}
-
-// Specialization for cdm::ContentDecryptionModule_8 methods.
-// TODO(crbug.com/737296): Remove when CDM_8 no longer supported.
-
-template <>
-bool CdmWrapperImpl<cdm::ContentDecryptionModule_8>::Initialize(
- bool allow_distinctive_identifier,
- bool allow_persistent_state,
- bool /* use_hw_secure_codecs*/) {
- cdm_->Initialize(allow_distinctive_identifier, allow_persistent_state);
- return false;
-}
-
-template <>
-bool CdmWrapperImpl<cdm::ContentDecryptionModule_8>::GetStatusForPolicy(
- uint32_t /* promise_id */,
- cdm::HdcpVersion /* min_hdcp_version */) {
- return false;
-}
-
-template <>
-void CdmWrapperImpl<cdm::ContentDecryptionModule_8>::OnStorageId(
- uint32_t version,
- const uint8_t* storage_id,
- uint32_t storage_id_size) {}
-
-template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_8>::InitializeAudioDecoder(
- const cdm::AudioDecoderConfig_2& audio_decoder_config) {
- if (!IsEncryptionSchemeSupportedByLegacyCdms(
- audio_decoder_config.encryption_scheme))
- return cdm::kInitializationError;
-
- return cdm_->InitializeAudioDecoder(
- ToAudioDecoderConfig_1(audio_decoder_config));
-}
-
-template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_8>::InitializeVideoDecoder(
- const cdm::VideoDecoderConfig_2& video_decoder_config) {
- if (!IsEncryptionSchemeSupportedByLegacyCdms(
- video_decoder_config.encryption_scheme))
- return cdm::kInitializationError;
-
- return cdm_->InitializeVideoDecoder(
- ToVideoDecoderConfig_1(video_decoder_config));
-}
-
-template <>
-cdm::Status CdmWrapperImpl<cdm::ContentDecryptionModule_8>::Decrypt(
- const cdm::InputBuffer_2& encrypted_buffer,
- cdm::DecryptedBlock* decrypted_buffer) {
- if (!IsEncryptionSchemeSupportedByLegacyCdms(
- encrypted_buffer.encryption_scheme))
- return cdm::kDecryptError;
-
- return cdm_->Decrypt(ToInputBuffer_1(encrypted_buffer), decrypted_buffer);
-}
-
-template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_8>::DecryptAndDecodeFrame(
- const cdm::InputBuffer_2& encrypted_buffer,
- cdm::VideoFrame* video_frame) {
- if (!IsEncryptionSchemeSupportedByLegacyCdms(
- encrypted_buffer.encryption_scheme))
- return cdm::kDecryptError;
-
- return cdm_->DecryptAndDecodeFrame(ToInputBuffer_1(encrypted_buffer),
- video_frame);
-}
-
-template <>
-cdm::Status
-CdmWrapperImpl<cdm::ContentDecryptionModule_8>::DecryptAndDecodeSamples(
+cdm::Status CdmWrapperImpl<9>::DecryptAndDecodeSamples(
const cdm::InputBuffer_2& encrypted_buffer,
cdm::AudioFrames* audio_frames) {
if (!IsEncryptionSchemeSupportedByLegacyCdms(
@@ -475,66 +385,46 @@ CdmWrapperImpl<cdm::ContentDecryptionModule_8>::DecryptAndDecodeSamples(
audio_frames);
}
+// static
CdmWrapper* CdmWrapper::Create(CreateCdmFunc create_cdm_func,
const char* key_system,
uint32_t key_system_size,
GetCdmHostFunc get_cdm_host_func,
void* user_data) {
- // cdm::ContentDecryptionModule::kVersion is always the latest stable version.
- static_assert(cdm::ContentDecryptionModule::kVersion ==
- cdm::ContentDecryptionModule_9::kVersion,
- "update the code below");
-
- // Ensure IsSupportedCdmInterfaceVersion() matches this implementation.
- // Always update this DCHECK when updating this function.
- // If this check fails, update this function and DCHECK or update
- // IsSupportedCdmInterfaceVersion().
- // TODO(xhwang): Static assert these at compile time.
- const int kMinVersion = cdm::ContentDecryptionModule_8::kVersion;
- const int kMaxVersion = cdm::ContentDecryptionModule_10::kVersion;
- DCHECK(!IsSupportedCdmInterfaceVersion(kMinVersion - 1));
- for (int version = kMinVersion; version <= kMaxVersion; ++version)
- DCHECK(IsSupportedCdmInterfaceVersion(version));
- DCHECK(!IsSupportedCdmInterfaceVersion(kMaxVersion + 1));
+ static_assert(CheckSupportedCdmInterfaceVersions(9, 11),
+ "Mismatch between CdmWrapper::Create() and "
+ "IsSupportedCdmInterfaceVersion()");
// Try to create the CDM using the latest CDM interface version.
// This is only attempted if requested.
CdmWrapper* cdm_wrapper = nullptr;
// TODO(xhwang): Check whether we can use static loops to simplify this code.
- if (IsExperimentalCdmInterfaceSupported()) {
- cdm_wrapper = CdmWrapperImpl<cdm::ContentDecryptionModule_10>::Create(
- create_cdm_func, key_system, key_system_size, get_cdm_host_func,
- user_data);
+
+ // Try to use the latest supported and enabled CDM interface first. If it's
+ // not supported by the CDM, try to create the CDM using older supported
+ // versions.
+ if (IsSupportedAndEnabledCdmInterfaceVersion(11)) {
+ cdm_wrapper =
+ CdmWrapperImpl<11>::Create(create_cdm_func, key_system, key_system_size,
+ get_cdm_host_func, user_data);
}
- // If |cdm_wrapper| is NULL, try to create the CDM using older supported
- // versions of the CDM interface here.
- if (!cdm_wrapper) {
- cdm_wrapper = CdmWrapperImpl<cdm::ContentDecryptionModule_9>::Create(
- create_cdm_func, key_system, key_system_size, get_cdm_host_func,
- user_data);
+ if (!cdm_wrapper && IsSupportedAndEnabledCdmInterfaceVersion(10)) {
+ cdm_wrapper =
+ CdmWrapperImpl<10>::Create(create_cdm_func, key_system, key_system_size,
+ get_cdm_host_func, user_data);
}
- // If |cdm_wrapper| is NULL, try to create the CDM using older supported
- // versions of the CDM interface here.
- if (!cdm_wrapper) {
- cdm_wrapper = CdmWrapperImpl<cdm::ContentDecryptionModule_8>::Create(
- create_cdm_func, key_system, key_system_size, get_cdm_host_func,
- user_data);
+ if (!cdm_wrapper && IsSupportedAndEnabledCdmInterfaceVersion(9)) {
+ cdm_wrapper =
+ CdmWrapperImpl<9>::Create(create_cdm_func, key_system, key_system_size,
+ get_cdm_host_func, user_data);
}
return cdm_wrapper;
}
-// When updating the CdmAdapter, ensure you've updated the CdmWrapper to contain
-// stub implementations for new or modified methods that the older CDM interface
-// does not have.
-// Also update supported_cdm_versions.h.
-static_assert(cdm::ContentDecryptionModule::kVersion ==
- cdm::ContentDecryptionModule_9::kVersion,
- "ensure cdm wrapper templates have old version support");
-
} // namespace media
#endif // MEDIA_CDM_CDM_WRAPPER_H_
diff --git a/chromium/media/cdm/cenc_decryptor.cc b/chromium/media/cdm/cenc_decryptor.cc
new file mode 100644
index 00000000000..ae0442f7a2c
--- /dev/null
+++ b/chromium/media/cdm/cenc_decryptor.cc
@@ -0,0 +1,152 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/cenc_decryptor.h"
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "media/base/subsample_entry.h"
+
+namespace media {
+
+namespace {
+
+enum ClearBytesBufferSel { kSrcContainsClearBytes, kDstContainsClearBytes };
+
+// Copy the cypher bytes as specified by |subsamples| from |src| to |dst|.
+// If |sel| == kSrcContainsClearBytes, then |src| is expected to contain any
+// clear bytes specified by |subsamples| and will be skipped. This is used
+// when copying all the protected data out of a sample. If |sel| ==
+// kDstContainsClearBytes, then any clear bytes mentioned in |subsamples|
+// will be skipped in |dst|. This is used when copying the decrypted bytes
+// back into the buffer, replacing the encrypted portions.
+void CopySubsamples(const std::vector<SubsampleEntry>& subsamples,
+ const ClearBytesBufferSel sel,
+ const uint8_t* src,
+ uint8_t* dst) {
+ for (size_t i = 0; i < subsamples.size(); i++) {
+ const SubsampleEntry& subsample = subsamples[i];
+ if (sel == kSrcContainsClearBytes) {
+ src += subsample.clear_bytes;
+ } else {
+ dst += subsample.clear_bytes;
+ }
+ memcpy(dst, src, subsample.cypher_bytes);
+ src += subsample.cypher_bytes;
+ dst += subsample.cypher_bytes;
+ }
+}
+
+// TODO(crbug.com/840983): This should be done in DecoderBuffer so that
+// additional fields are more easily handled.
+void CopyExtraSettings(const DecoderBuffer& input, DecoderBuffer* output) {
+ output->set_timestamp(input.timestamp());
+ output->set_duration(input.duration());
+ output->set_is_key_frame(input.is_key_frame());
+ output->CopySideDataFrom(input.side_data(), input.side_data_size());
+}
+
+} // namespace
+
+scoped_refptr<DecoderBuffer> DecryptCencBuffer(
+ const DecoderBuffer& input,
+ const crypto::SymmetricKey& key) {
+ const char* sample = reinterpret_cast<const char*>(input.data());
+ const size_t sample_size = input.data_size();
+ DCHECK(sample_size) << "No data to decrypt.";
+
+ const DecryptConfig* decrypt_config = input.decrypt_config();
+ DCHECK(decrypt_config) << "No need to call Decrypt() on unencrypted buffer.";
+ DCHECK_EQ(EncryptionMode::kCenc, decrypt_config->encryption_mode());
+
+ const std::string& iv = decrypt_config->iv();
+ DCHECK_EQ(iv.size(), static_cast<size_t>(DecryptConfig::kDecryptionKeySize));
+
+ crypto::Encryptor encryptor;
+ if (!encryptor.Init(&key, crypto::Encryptor::CTR, "")) {
+ DVLOG(1) << "Could not initialize decryptor.";
+ return nullptr;
+ }
+
+ if (!encryptor.SetCounter(iv)) {
+ DVLOG(1) << "Could not set counter block.";
+ return nullptr;
+ }
+
+ const std::vector<SubsampleEntry>& subsamples = decrypt_config->subsamples();
+ if (subsamples.empty()) {
+ std::string decrypted_text;
+ base::StringPiece encrypted_text(sample, sample_size);
+ if (!encryptor.Decrypt(encrypted_text, &decrypted_text)) {
+ DVLOG(1) << "Could not decrypt data.";
+ return nullptr;
+ }
+
+ // TODO(xhwang): Find a way to avoid this data copy.
+ auto output = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(decrypted_text.data()),
+ decrypted_text.size());
+ CopyExtraSettings(input, output.get());
+ return output;
+ }
+
+ if (!VerifySubsamplesMatchSize(subsamples, sample_size)) {
+ DVLOG(1) << "Subsample sizes do not equal input size";
+ return nullptr;
+ }
+
+ // Compute the size of the encrypted portion. Overflow, etc. checked by
+ // the call to VerifySubsamplesMatchSize().
+ size_t total_encrypted_size = 0;
+ for (const auto& subsample : subsamples)
+ total_encrypted_size += subsample.cypher_bytes;
+
+ // No need to decrypt if there is no encrypted data.
+ if (total_encrypted_size == 0) {
+ auto output = DecoderBuffer::CopyFrom(input.data(), sample_size);
+ CopyExtraSettings(input, output.get());
+ return output;
+ }
+
+ // The encrypted portions of all subsamples must form a contiguous block,
+ // such that an encrypted subsample that ends away from a block boundary is
+ // immediately followed by the start of the next encrypted subsample. We
+ // copy all encrypted subsamples to a contiguous buffer, decrypt them, then
+ // copy the decrypted bytes over the encrypted bytes in the output.
+ // TODO(strobe): attempt to reduce number of memory copies
+ std::unique_ptr<uint8_t[]> encrypted_bytes(new uint8_t[total_encrypted_size]);
+ CopySubsamples(subsamples, kSrcContainsClearBytes,
+ reinterpret_cast<const uint8_t*>(sample),
+ encrypted_bytes.get());
+
+ base::StringPiece encrypted_text(
+ reinterpret_cast<const char*>(encrypted_bytes.get()),
+ total_encrypted_size);
+ std::string decrypted_text;
+ if (!encryptor.Decrypt(encrypted_text, &decrypted_text)) {
+ DVLOG(1) << "Could not decrypt data.";
+ return nullptr;
+ }
+ DCHECK_EQ(decrypted_text.size(), encrypted_text.size());
+
+ scoped_refptr<DecoderBuffer> output = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(sample), sample_size);
+ CopySubsamples(subsamples, kDstContainsClearBytes,
+ reinterpret_cast<const uint8_t*>(decrypted_text.data()),
+ output->writable_data());
+ CopyExtraSettings(input, output.get());
+ return output;
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/cenc_decryptor.h b/chromium/media/cdm/cenc_decryptor.h
new file mode 100644
index 00000000000..f71072dd3c7
--- /dev/null
+++ b/chromium/media/cdm/cenc_decryptor.h
@@ -0,0 +1,46 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CDM_CENC_DECRYPTOR_H_
+#define MEDIA_CDM_CENC_DECRYPTOR_H_
+
+#include "base/memory/ref_counted.h"
+#include "media/base/media_export.h"
+
+namespace crypto {
+class SymmetricKey;
+}
+
+namespace media {
+class DecoderBuffer;
+
+// This class implements 'cenc' AES-CTR scheme as specified by
+// ISO/IEC 23001-7:2016, section 10.1 (https://www.iso.org). Decryption
+// uses the Advanced Encryption Standard specified by AES [FIPS-197,
+// https://www.nist.gov] using 128-bit keys in Counter Mode (AES-CTR-128),
+// as specified in Block Cipher Modes [NIST 800-38A, https://www.nist.gov].
+//
+// Each input buffer is divided into one or more contiguous subsamples. Each
+// subsample has an unprotected part (unencrypted) followed by a protected part
+// (encrypted), only one of which may be zero bytes in length. For example:
+// | DecoderBuffer.data() |
+// | Subsample#1 | Subsample#2 | Subsample#3 |
+// |uuuuu|eeeeeeeeee|uuuu|eeeeeeeeeeee|uu|eeeeeeeeeeee|
+// Subsample encryption encrypts all the bytes in the protected part of the
+// sample. The protected byte sequences of all subsamples is treated as a
+// logically continuous chain of 16 byte cipher blocks, even when they are
+// separated by unprotected data.
+//
+// If no subsamples are specified, the whole input buffer will be treated as
+// protected data.
+
+// Decrypts the encrypted buffer |input| using |key| and values found in
+// |input|->DecryptConfig. The key size must be 128 bits.
+MEDIA_EXPORT scoped_refptr<DecoderBuffer> DecryptCencBuffer(
+ const DecoderBuffer& input,
+ const crypto::SymmetricKey& key);
+
+} // namespace media
+
+#endif // MEDIA_CDM_CENC_DECRYPTOR_H_
diff --git a/chromium/media/cdm/cenc_decryptor_fuzzer.cc b/chromium/media/cdm/cenc_decryptor_fuzzer.cc
new file mode 100644
index 00000000000..3574f130822
--- /dev/null
+++ b/chromium/media/cdm/cenc_decryptor_fuzzer.cc
@@ -0,0 +1,66 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include <array>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/subsample_entry.h"
+#include "media/cdm/cenc_decryptor.h"
+
+const std::array<uint8_t, 16> kKey = {0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+ 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13};
+
+const std::array<uint8_t, 16> kIv = {0x20, 0x21, 0x22, 0x23, 0x24, 0x25,
+ 0x26, 0x27, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00};
+
+// For disabling noisy logging.
+struct Environment {
+ Environment() { logging::SetMinLogLevel(logging::LOG_FATAL); }
+};
+
+Environment* env = new Environment();
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ // From the data provided:
+ // 1) Use the first byte to determine how much of the buffer is "clear".
+ // 2) Rest of the buffer is the input data (which must be at least 1 byte).
+ // So the input buffer needs at least 2 bytes.
+ if (size < 2)
+ return 0;
+
+ const uint8_t clear_bytes = data[0];
+ data += 1;
+ size -= 1;
+
+ static std::unique_ptr<crypto::SymmetricKey> key =
+ crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(std::begin(kKey), std::end(kKey)));
+
+ // |clear_bytes| is used to determine how much of the buffer is "clear".
+ // Since the code checks SubsampleEntries, use |clear_bytes| as the actual
+ // number of bytes clear, and the rest as encrypted. To avoid size_t problems,
+ // only set |subsamples| if |clear_bytes| <= |size|. If |subsamples| is
+ // empty, the complete buffer is treated as encrypted.
+ std::vector<media::SubsampleEntry> subsamples;
+ if (clear_bytes <= size)
+ subsamples.push_back({clear_bytes, size - clear_bytes});
+
+ auto encrypted_buffer = media::DecoderBuffer::CopyFrom(data, size);
+
+ // Key_ID is never used.
+ encrypted_buffer->set_decrypt_config(media::DecryptConfig::CreateCencConfig(
+ "key_id", std::string(std::begin(kIv), std::end(kIv)), subsamples));
+
+ media::DecryptCencBuffer(*encrypted_buffer, *key);
+ return 0;
+}
diff --git a/chromium/media/cdm/cenc_decryptor_unittest.cc b/chromium/media/cdm/cenc_decryptor_unittest.cc
new file mode 100644
index 00000000000..2fa6b6900e1
--- /dev/null
+++ b/chromium/media/cdm/cenc_decryptor_unittest.cc
@@ -0,0 +1,269 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/cenc_decryptor.h"
+
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "base/containers/span.h"
+#include "base/time/time.h"
+#include "crypto/encryptor.h"
+#include "crypto/symmetric_key.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+// Keys and IVs have to be 128 bits.
+const uint8_t kKey[] = {0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
+ 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13};
+static_assert(arraysize(kKey) * 8 == 128, "kKey must be 128 bits");
+
+const uint8_t kIv[] = {0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
+static_assert(arraysize(kIv) * 8 == 128, "kIv must be 128 bits");
+
+const uint8_t kOneBlock[] = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
+ 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p'};
+
+const uint8_t kPartialBlock[] = {'a', 'b', 'c', 'd', 'e', 'f'};
+
+std::string MakeString(const std::vector<uint8_t>& chars) {
+ return std::string(chars.begin(), chars.end());
+}
+
+// Combine multiple std::vector<uint8_t> into one.
+std::vector<uint8_t> Combine(const std::vector<std::vector<uint8_t>>& inputs) {
+ std::vector<uint8_t> result;
+ for (const auto& input : inputs)
+ result.insert(result.end(), input.begin(), input.end());
+
+ return result;
+}
+
+// Returns a std::vector<uint8_t> containing |count| copies of |input|.
+std::vector<uint8_t> Repeat(const std::vector<uint8_t>& input, size_t count) {
+ std::vector<uint8_t> result;
+ for (size_t i = 0; i < count; ++i)
+ result.insert(result.end(), input.begin(), input.end());
+ return result;
+}
+
+} // namespace
+
+// These tests only test decryption logic.
+class CencDecryptorTest : public testing::Test {
+ public:
+ CencDecryptorTest()
+ : key_(crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES,
+ std::string(kKey, kKey + arraysize(kKey)))),
+ iv_(kIv, kIv + arraysize(kIv)),
+ one_block_(kOneBlock, kOneBlock + arraysize(kOneBlock)),
+ partial_block_(kPartialBlock,
+ kPartialBlock + arraysize(kPartialBlock)) {}
+
+ // Excrypt |original| using AES-CTR encryption with |key| and |iv|.
+ std::vector<uint8_t> Encrypt(const std::vector<uint8_t>& original,
+ const crypto::SymmetricKey& key,
+ const std::string& iv) {
+ crypto::Encryptor encryptor;
+ EXPECT_TRUE(encryptor.Init(&key, crypto::Encryptor::CTR, ""));
+ EXPECT_TRUE(encryptor.SetCounter(iv));
+
+ std::string ciphertext;
+ EXPECT_TRUE(encryptor.Encrypt(MakeString(original), &ciphertext));
+ DCHECK_EQ(ciphertext.size(), original.size());
+
+ return std::vector<uint8_t>(ciphertext.begin(), ciphertext.end());
+ }
+
+ // Returns a 'cenc' DecoderBuffer using the data and other parameters.
+ scoped_refptr<DecoderBuffer> CreateEncryptedBuffer(
+ const std::vector<uint8_t>& data,
+ const std::string& iv,
+ const std::vector<SubsampleEntry>& subsample_entries) {
+ EXPECT_FALSE(data.empty());
+ EXPECT_FALSE(iv.empty());
+
+ scoped_refptr<DecoderBuffer> encrypted_buffer =
+ DecoderBuffer::CopyFrom(data.data(), data.size());
+
+ // Key_ID is never used.
+ encrypted_buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig("key_id", iv, subsample_entries));
+ return encrypted_buffer;
+ }
+
+ // Calls DecryptCencBuffer() to decrypt |encrypted| using |key|, and then
+ // returns the decrypted buffer (empty if decryption fails).
+ std::vector<uint8_t> DecryptWithKey(scoped_refptr<DecoderBuffer> encrypted,
+ const crypto::SymmetricKey& key) {
+ scoped_refptr<DecoderBuffer> decrypted = DecryptCencBuffer(*encrypted, key);
+
+ std::vector<uint8_t> decrypted_data;
+ if (decrypted.get()) {
+ EXPECT_TRUE(decrypted->data_size());
+ decrypted_data.assign(decrypted->data(),
+ decrypted->data() + decrypted->data_size());
+ }
+
+ return decrypted_data;
+ }
+
+ // Constants for testing.
+ const std::unique_ptr<crypto::SymmetricKey> key_;
+ const std::string iv_;
+ const std::vector<uint8_t> one_block_;
+ const std::vector<uint8_t> partial_block_;
+};
+
+TEST_F(CencDecryptorTest, OneBlock) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, ExtraData) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ encrypted_buffer->set_timestamp(base::TimeDelta::FromDays(2));
+ encrypted_buffer->set_duration(base::TimeDelta::FromMinutes(5));
+ encrypted_buffer->set_is_key_frame(true);
+ encrypted_buffer->CopySideDataFrom(encrypted_block.data(),
+ encrypted_block.size());
+
+ auto decrypted_buffer = DecryptCencBuffer(*encrypted_buffer, *key_);
+ EXPECT_EQ(encrypted_buffer->timestamp(), decrypted_buffer->timestamp());
+ EXPECT_EQ(encrypted_buffer->duration(), decrypted_buffer->duration());
+ EXPECT_EQ(encrypted_buffer->end_of_stream(),
+ decrypted_buffer->end_of_stream());
+ EXPECT_EQ(encrypted_buffer->is_key_frame(), decrypted_buffer->is_key_frame());
+ EXPECT_EQ(encrypted_buffer->side_data_size(),
+ decrypted_buffer->side_data_size());
+ EXPECT_EQ(base::make_span(encrypted_buffer->side_data(),
+ encrypted_buffer->side_data_size()),
+ base::make_span(decrypted_buffer->side_data(),
+ decrypted_buffer->side_data_size()));
+}
+
+TEST_F(CencDecryptorTest, NoSubsamples) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ // No subsamples specified.
+ std::vector<SubsampleEntry> subsamples = {};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_EQ(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, BadSubsamples) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ // Subsample size > data size.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size() + 1}};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_EQ(std::vector<uint8_t>(), DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, InvalidIv) {
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Use an invalid IV for decryption. Call should succeed, but return
+ // something other than the original data.
+ std::string invalid_iv(iv_.size(), 'a');
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, invalid_iv, subsamples);
+ EXPECT_NE(one_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, InvalidKey) {
+ std::unique_ptr<crypto::SymmetricKey> bad_key = crypto::SymmetricKey::Import(
+ crypto::SymmetricKey::AES, std::string(arraysize(kKey), 'b'));
+ auto encrypted_block = Encrypt(one_block_, *key_, iv_);
+
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ // Use a different key for decryption. Call should succeed, but return
+ // something other than the original data.
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_NE(one_block_, DecryptWithKey(encrypted_buffer, *bad_key));
+}
+
+TEST_F(CencDecryptorTest, PartialBlock) {
+ auto encrypted_block = Encrypt(partial_block_, *key_, iv_);
+
+ // Only 1 subsample, all encrypted data.
+ std::vector<SubsampleEntry> subsamples = {{0, encrypted_block.size()}};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_EQ(partial_block_, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, MultipleSubsamples) {
+ // Encrypt 3 copies of |one_block_| together.
+ auto encrypted_block = Encrypt(Repeat(one_block_, 3), *key_, iv_);
+
+ // Treat as 3 subsamples.
+ std::vector<SubsampleEntry> subsamples = {
+ {0, one_block_.size()}, {0, one_block_.size()}, {0, one_block_.size()}};
+
+ auto encrypted_buffer =
+ CreateEncryptedBuffer(encrypted_block, iv_, subsamples);
+ EXPECT_EQ(Repeat(one_block_, 3), DecryptWithKey(encrypted_buffer, *key_));
+}
+
+TEST_F(CencDecryptorTest, MultipleSubsamplesWithClearBytes) {
+ // Create a buffer that looks like:
+ // subsamples: | subsample#1 | subsample#2 | subsample#3 |
+ // | clear | encrypted | clear | encrypted | clear |
+ // source: | one | partial* | partial | one* | partial |
+ // where * means the source is encrypted
+ auto encrypted_block =
+ Encrypt(Combine({partial_block_, one_block_}), *key_, iv_);
+ std::vector<uint8_t> encrypted_partial_block(
+ encrypted_block.begin(), encrypted_block.begin() + partial_block_.size());
+ EXPECT_EQ(encrypted_partial_block.size(), partial_block_.size());
+ std::vector<uint8_t> encrypted_one_block(
+ encrypted_block.begin() + partial_block_.size(), encrypted_block.end());
+ EXPECT_EQ(encrypted_one_block.size(), one_block_.size());
+
+ auto input_data =
+ Combine({one_block_, encrypted_partial_block, partial_block_,
+ encrypted_one_block, partial_block_});
+ auto expected_result = Combine(
+ {one_block_, partial_block_, partial_block_, one_block_, partial_block_});
+ std::vector<SubsampleEntry> subsamples = {
+ {one_block_.size(), partial_block_.size()},
+ {partial_block_.size(), one_block_.size()},
+ {partial_block_.size(), 0}};
+
+ auto encrypted_buffer = CreateEncryptedBuffer(input_data, iv_, subsamples);
+ EXPECT_EQ(expected_result, DecryptWithKey(encrypted_buffer, *key_));
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/external_clear_key_test_helper.cc b/chromium/media/cdm/external_clear_key_test_helper.cc
index 8c9cea4f106..53fb602306d 100644
--- a/chromium/media/cdm/external_clear_key_test_helper.cc
+++ b/chromium/media/cdm/external_clear_key_test_helper.cc
@@ -33,7 +33,7 @@ void ExternalClearKeyTestHelper::LoadLibrary() {
// Determine the location of the CDM. It is expected to be in the same
// directory as the current module.
base::FilePath cdm_base_path;
- ASSERT_TRUE(PathService::Get(base::DIR_MODULE, &cdm_base_path));
+ ASSERT_TRUE(base::PathService::Get(base::DIR_MODULE, &cdm_base_path));
cdm_base_path = cdm_base_path.Append(
GetPlatformSpecificDirectory(kClearKeyCdmBaseDirectory));
library_path_ = cdm_base_path.AppendASCII(
diff --git a/chromium/media/cdm/library_cdm/BUILD.gn b/chromium/media/cdm/library_cdm/BUILD.gn
new file mode 100644
index 00000000000..2512ca9ff31
--- /dev/null
+++ b/chromium/media/cdm/library_cdm/BUILD.gn
@@ -0,0 +1,37 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//media/media_options.gni")
+
+if (enable_library_cdms) {
+ source_set("test_support") {
+ testonly = true
+
+ visibility = [ "//media/cdm:unit_tests" ]
+
+ sources = [
+ "mock_library_cdm.cc",
+ "mock_library_cdm.h",
+ ]
+
+ deps = [
+ ":cdm_host_proxy",
+ "//base",
+ "//media/cdm:cdm_api",
+ "//testing/gmock",
+ ]
+ }
+
+ source_set("cdm_host_proxy") {
+ sources = [
+ "cdm_host_proxy.h",
+ "cdm_host_proxy_impl.h",
+ ]
+
+ deps = [
+ "//base",
+ "//media/cdm:cdm_api",
+ ]
+ }
+}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h b/chromium/media/cdm/library_cdm/cdm_host_proxy.h
index 7025bd2c5f7..ccbda3b6913 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h
+++ b/chromium/media/cdm/library_cdm/cdm_host_proxy.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_H_
-#define MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_H_
+#ifndef MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_H_
+#define MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_H_
#include "media/cdm/api/content_decryption_module.h"
@@ -60,4 +60,4 @@ class CdmHostProxy {
} // namespace media
-#endif // MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_H_
+#endif // MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_H_
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy_impl.h b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
index d1c0fa64fcb..49c2f2d1614 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy_impl.h
+++ b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
@@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_IMPL_H_
-#define MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_IMPL_H_
+#ifndef MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_IMPL_H_
+#define MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_IMPL_H_
+
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "media/cdm/api/content_decryption_module.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
namespace media {
@@ -138,6 +138,15 @@ cdm::CdmProxy* CdmHostProxyImpl<cdm::Host_9>::RequestCdmProxy(
return nullptr;
}
+// Specialization for cdm::Host_10 methods.
+
+template <>
+cdm::CdmProxy* CdmHostProxyImpl<cdm::Host_10>::RequestCdmProxy(
+ cdm::CdmProxyClient* /* client */) {
+ NOTREACHED() << "cdm::ContentDecryptionModule_10 CDM should never call this.";
+ return nullptr;
+}
+
} // namespace media
-#endif // MEDIA_CDM_LIBRARY_CDM_CLEAR_KEY_CDM_CDM_HOST_PROXY_IMPL_H_
+#endif // MEDIA_CDM_LIBRARY_CDM_CDM_HOST_PROXY_IMPL_H_
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
index 1b20b84a184..c6fe179fc73 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
@@ -14,8 +14,6 @@ loadable_module("clear_key_cdm") {
"cdm_file_adapter.h",
"cdm_file_io_test.cc",
"cdm_file_io_test.h",
- "cdm_host_proxy.h",
- "cdm_host_proxy_impl.h",
"cdm_proxy_test.cc",
"cdm_proxy_test.h",
"cdm_video_decoder.cc",
@@ -38,6 +36,7 @@ loadable_module("clear_key_cdm") {
"//media", # For media::AudioTimestampHelper
"//media:shared_memory_support", # For media::AudioBus.
"//media/cdm:cdm_api", # For content_decryption_module.h
+ "//media/cdm/library_cdm:cdm_host_proxy",
"//url",
]
@@ -51,6 +50,15 @@ loadable_module("clear_key_cdm") {
defines += [ "CLEAR_KEY_CDM_USE_FFMPEG_DECODER" ]
deps += [ "//third_party/ffmpeg" ]
}
+
+ if (media_use_libvpx) {
+ sources += [
+ "libvpx_cdm_video_decoder.cc",
+ "libvpx_cdm_video_decoder.h",
+ ]
+ defines += [ "CLEAR_KEY_CDM_USE_LIBVPX_DECODER" ]
+ deps += [ "//third_party/libvpx" ]
+ }
}
source_set("clear_key_cdm_proxy") {
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.cc
index d5450efb021..33994ad8aa0 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/logging.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
namespace media {
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc
index aad4ba54a7a..152539f4bd6 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.cc
@@ -9,7 +9,7 @@
#include "base/logging.h"
#include "base/macros.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_common.h"
namespace media {
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
index 982bbfcb962..27c7940acc8 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
@@ -21,11 +21,12 @@
#include "media/base/cdm_key_information.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
+#include "media/base/encryption_pattern.h"
#include "media/cdm/api/content_decryption_module_ext.h"
#include "media/cdm/json_web_key.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy_impl.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy_impl.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_test.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.h"
#include "media/media_buildflags.h"
@@ -36,7 +37,6 @@
#include "base/path_service.h"
#include "media/base/media.h"
#include "media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.h"
-#include "media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.h"
#if !defined COMPONENT_BUILD
static base::AtExitManager g_at_exit_manager;
@@ -95,26 +95,32 @@ static scoped_refptr<media::DecoderBuffer> CopyDecoderBufferFrom(
output_buffer->set_timestamp(
base::TimeDelta::FromMicroseconds(input_buffer.timestamp));
- // TODO(xhwang): Unify how to check whether a buffer is encrypted.
- // See http://crbug.com/675003
- if (input_buffer.iv_size != 0) {
- DCHECK_GT(input_buffer.key_id_size, 0u);
- std::vector<media::SubsampleEntry> subsamples;
- for (uint32_t i = 0; i < input_buffer.num_subsamples; ++i) {
- subsamples.push_back(
- media::SubsampleEntry(input_buffer.subsamples[i].clear_bytes,
- input_buffer.subsamples[i].cipher_bytes));
- }
+ if (input_buffer.encryption_scheme == cdm::EncryptionScheme::kUnencrypted)
+ return output_buffer;
- std::unique_ptr<media::DecryptConfig> decrypt_config(
- new media::DecryptConfig(
- std::string(reinterpret_cast<const char*>(input_buffer.key_id),
- input_buffer.key_id_size),
- std::string(reinterpret_cast<const char*>(input_buffer.iv),
- input_buffer.iv_size),
- subsamples));
+ DCHECK_GT(input_buffer.iv_size, 0u);
+ DCHECK_GT(input_buffer.key_id_size, 0u);
+ std::vector<media::SubsampleEntry> subsamples;
+ for (uint32_t i = 0; i < input_buffer.num_subsamples; ++i) {
+ subsamples.push_back(
+ media::SubsampleEntry(input_buffer.subsamples[i].clear_bytes,
+ input_buffer.subsamples[i].cipher_bytes));
+ }
- output_buffer->set_decrypt_config(std::move(decrypt_config));
+ const std::string key_id_string(
+ reinterpret_cast<const char*>(input_buffer.key_id),
+ input_buffer.key_id_size);
+ const std::string iv_string(reinterpret_cast<const char*>(input_buffer.iv),
+ input_buffer.iv_size);
+ if (input_buffer.encryption_scheme == cdm::EncryptionScheme::kCenc) {
+ output_buffer->set_decrypt_config(media::DecryptConfig::CreateCencConfig(
+ key_id_string, iv_string, subsamples));
+ } else {
+ DCHECK_EQ(input_buffer.encryption_scheme, cdm::EncryptionScheme::kCbcs);
+ output_buffer->set_decrypt_config(media::DecryptConfig::CreateCbcsConfig(
+ key_id_string, iv_string, subsamples,
+ media::EncryptionPattern(input_buffer.pattern.crypt_byte_block,
+ input_buffer.pattern.skip_byte_block)));
}
return output_buffer;
@@ -263,9 +269,10 @@ void* CreateCdmInstance(int cdm_interface_version,
return nullptr;
}
- // We support both CDM_9 and CDM_10.
+ // We support CDM_9, CDM_10 and CDM_11.
using CDM_9 = cdm::ContentDecryptionModule_9;
using CDM_10 = cdm::ContentDecryptionModule_10;
+ using CDM_11 = cdm::ContentDecryptionModule_11;
if (cdm_interface_version == CDM_9::kVersion) {
CDM_9::Host* host = static_cast<CDM_9::Host*>(
@@ -288,6 +295,17 @@ void* CreateCdmInstance(int cdm_interface_version,
new media::ClearKeyCdm(host, key_system_string));
}
+ if (cdm_interface_version == CDM_11::kVersion) {
+ CDM_11::Host* host = static_cast<CDM_11::Host*>(
+ get_cdm_host_func(CDM_11::Host::kVersion, user_data));
+ if (!host)
+ return nullptr;
+
+ DVLOG(1) << __func__ << ": Create ClearKeyCdm with CDM_11::Host.";
+ return static_cast<CDM_11*>(
+ new media::ClearKeyCdm(host, key_system_string));
+ }
+
return nullptr;
}
@@ -353,36 +371,6 @@ namespace media {
namespace {
-bool IsSupportedConfigEncryptionScheme(cdm::EncryptionScheme scheme) {
- // TODO(crbug.com/658026): Support other decryption schemes.
- switch (scheme) {
- case cdm::EncryptionScheme::kUnencrypted:
- case cdm::EncryptionScheme::kCenc:
- return true;
- case cdm::EncryptionScheme::kCbcs:
- return false;
- }
-
- NOTREACHED();
- return false;
-}
-
-bool IsSupportedBufferEncryptionScheme(cdm::EncryptionScheme scheme,
- cdm::Pattern pattern) {
- // TODO(crbug.com/658026): Support other decryption schemes.
- switch (scheme) {
- case cdm::EncryptionScheme::kUnencrypted:
- return true;
- case cdm::EncryptionScheme::kCenc:
- return pattern.crypt_byte_block == 0 && pattern.skip_byte_block == 0;
- case cdm::EncryptionScheme::kCbcs:
- return false;
- }
-
- NOTREACHED();
- return false;
-}
-
cdm::InputBuffer_2 ToInputBuffer_2(cdm::InputBuffer_1 encrypted_buffer) {
cdm::InputBuffer_2 buffer = {};
buffer.data = encrypted_buffer.data;
@@ -698,11 +686,6 @@ cdm::Status ClearKeyCdm::InitializeAudioDecoder(
if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem)
return cdm::kInitializationError;
- if (!IsSupportedConfigEncryptionScheme(
- audio_decoder_config.encryption_scheme)) {
- return cdm::kInitializationError;
- }
-
#if defined(CLEAR_KEY_CDM_USE_FFMPEG_DECODER)
if (!audio_decoder_)
audio_decoder_.reset(
@@ -735,11 +718,6 @@ cdm::Status ClearKeyCdm::InitializeVideoDecoder(
if (key_system_ == kExternalClearKeyDecryptOnlyKeySystem)
return cdm::kInitializationError;
- if (!IsSupportedConfigEncryptionScheme(
- video_decoder_config.encryption_scheme)) {
- return cdm::kInitializationError;
- }
-
if (video_decoder_ && video_decoder_->is_initialized()) {
DCHECK(!video_decoder_->is_initialized());
return cdm::kInitializationError;
@@ -885,17 +863,10 @@ cdm::Status ClearKeyCdm::DecryptToMediaDecoderBuffer(
scoped_refptr<DecoderBuffer>* decrypted_buffer) {
DCHECK(decrypted_buffer);
- if (!IsSupportedBufferEncryptionScheme(encrypted_buffer.encryption_scheme,
- encrypted_buffer.pattern)) {
- return cdm::kDecryptError;
- }
-
scoped_refptr<DecoderBuffer> buffer = CopyDecoderBufferFrom(encrypted_buffer);
- // TODO(xhwang): Unify how to check whether a buffer is encrypted.
- // See http://crbug.com/675003
- if (buffer->end_of_stream() || !buffer->decrypt_config() ||
- !buffer->decrypt_config()->is_encrypted()) {
+ // EOS and unencrypted streams can be returned as-is.
+ if (buffer->end_of_stream() || !buffer->decrypt_config()) {
*decrypted_buffer = std::move(buffer);
return cdm::kSuccess;
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
index 143fd20946c..b9c78fa9665 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.h
@@ -17,6 +17,7 @@
#include "base/synchronization/lock.h"
#include "media/base/cdm_key_information.h"
#include "media/base/cdm_promise.h"
+#include "media/cdm/api/content_decryption_module.h"
#include "media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h"
namespace media {
@@ -32,7 +33,8 @@ const int64_t kInitialTimerDelayMs = 200;
// Clear key implementation of the cdm::ContentDecryptionModule interfaces.
class ClearKeyCdm : public cdm::ContentDecryptionModule_9,
- public cdm::ContentDecryptionModule_10 {
+ public cdm::ContentDecryptionModule_10,
+ public cdm::ContentDecryptionModule_11 {
public:
template <typename HostInterface>
ClearKeyCdm(HostInterface* host, const std::string& key_system);
@@ -53,7 +55,7 @@ class ClearKeyCdm : public cdm::ContentDecryptionModule_9,
const cdm::InputBuffer_1& encrypted_buffer,
cdm::AudioFrames* audio_frames) override;
- // cdm::ContentDecryptionModule_10 implementation.
+ // cdm::ContentDecryptionModule_10/11 implementation.
void Initialize(bool allow_distinctive_identifier,
bool allow_persistent_state,
bool use_hw_secure_codecs) override;
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
index 953b89d0bee..c251354b535 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.cc
@@ -4,18 +4,20 @@
#include "media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h"
+#include "base/bind_helpers.h"
#include "base/logging.h"
+#include "media/base/content_decryption_module.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_proxy_common.h"
namespace media {
-ClearKeyCdmProxy::ClearKeyCdmProxy() {}
+ClearKeyCdmProxy::ClearKeyCdmProxy() : weak_factory_(this) {}
ClearKeyCdmProxy::~ClearKeyCdmProxy() {}
-// TODO(xhwang): Returns a non-null pointer and add a test covering this path.
base::WeakPtr<CdmContext> ClearKeyCdmProxy::GetCdmContext() {
- return nullptr;
+ DVLOG(1) << __func__;
+ return weak_factory_.GetWeakPtr();
}
void ClearKeyCdmProxy::Initialize(Client* client, InitializeCB init_cb) {
@@ -31,7 +33,7 @@ void ClearKeyCdmProxy::Process(Function function,
const std::vector<uint8_t>& input_data,
uint32_t expected_output_data_size,
ProcessCB process_cb) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
if (crypto_session_id != kClearKeyCdmProxyCryptoSessionId ||
!std::equal(input_data.begin(), input_data.end(),
@@ -50,7 +52,7 @@ void ClearKeyCdmProxy::Process(Function function,
void ClearKeyCdmProxy::CreateMediaCryptoSession(
const std::vector<uint8_t>& input_data,
CreateMediaCryptoSessionCB create_media_crypto_session_cb) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
if (!std::equal(input_data.begin(), input_data.end(),
kClearKeyCdmProxyInputData.begin(),
@@ -70,4 +72,16 @@ void ClearKeyCdmProxy::SetKey(uint32_t crypto_session_id,
void ClearKeyCdmProxy::RemoveKey(uint32_t crypto_session_id,
const std::vector<uint8_t>& key_id) {}
+Decryptor* ClearKeyCdmProxy::GetDecryptor() {
+ DVLOG(1) << __func__;
+
+ if (!aes_decryptor_) {
+ aes_decryptor_ = base::MakeRefCounted<AesDecryptor>(
+ base::DoNothing(), base::DoNothing(), base::DoNothing(),
+ base::DoNothing());
+ }
+
+ return aes_decryptor_.get();
+}
+
} // namespace media
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
index 5ec686eae32..20f3d99cb35 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h
@@ -7,12 +7,16 @@
#include "base/callback.h"
#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/weak_ptr.h"
+#include "media/base/cdm_context.h"
+#include "media/cdm/aes_decryptor.h"
#include "media/cdm/cdm_proxy.h"
namespace media {
// CdmProxy implementation for Clear Key CDM to test CDM Proxy support.
-class ClearKeyCdmProxy : public CdmProxy {
+class ClearKeyCdmProxy : public CdmProxy, public CdmContext {
public:
ClearKeyCdmProxy();
~ClearKeyCdmProxy() final;
@@ -34,7 +38,14 @@ class ClearKeyCdmProxy : public CdmProxy {
void RemoveKey(uint32_t crypto_session_id,
const std::vector<uint8_t>& key_id) final;
+ // CdmContext implementation.
+ Decryptor* GetDecryptor() final;
+
private:
+ scoped_refptr<AesDecryptor> aes_decryptor_;
+
+ base::WeakPtrFactory<ClearKeyCdmProxy> weak_factory_;
+
DISALLOW_COPY_AND_ASSIGN(ClearKeyCdmProxy);
};
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h
index 7e20629152c..d41dce7745d 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.h
@@ -17,8 +17,8 @@
#include "base/memory/weak_ptr.h"
#include "media/base/content_decryption_module.h"
#include "media/cdm/aes_decryptor.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/cdm/library_cdm/clear_key_cdm/cdm_file_adapter.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
namespace media {
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
index 6ed901d4ab0..a043005d57a 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
@@ -16,7 +16,7 @@
#include "media/base/audio_timestamp_helper.h"
#include "media/base/data_buffer.h"
#include "media/base/limits.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_decoding_loop.h"
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.cc
index a945fd17044..5234f3f6334 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_video_decoder.cc
@@ -9,7 +9,7 @@
#include "base/callback.h"
#include "base/logging.h"
#include "media/base/limits.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_decoding_loop.h"
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.cc
index 99305db3ddd..11ebda6690a 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.cc
@@ -6,7 +6,7 @@
#include "base/logging.h"
#include "media/base/limits.h"
-#include "media/cdm/library_cdm/clear_key_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "third_party/libvpx/source/libvpx/vpx/vp8dx.h"
#include "third_party/libvpx/source/libvpx/vpx/vpx_decoder.h"
@@ -25,7 +25,8 @@ LibvpxCdmVideoDecoder::~LibvpxCdmVideoDecoder() {
Deinitialize();
}
-bool LibvpxCdmVideoDecoder::Initialize(const cdm::VideoDecoderConfig& config) {
+bool LibvpxCdmVideoDecoder::Initialize(
+ const cdm::VideoDecoderConfig_2& config) {
DVLOG(1) << "Initialize()";
if (!IsValidOutputConfig(config.format, config.coded_size)) {
@@ -44,8 +45,10 @@ bool LibvpxCdmVideoDecoder::Initialize(const cdm::VideoDecoderConfig& config) {
vpx_config.h = config.coded_size.height;
vpx_config.threads = kDecodeThreads;
- vpx_codec_err_t status =
- vpx_codec_dec_init(vpx_codec_, vpx_codec_vp8_dx(), &vpx_config, 0);
+ vpx_codec_err_t status = vpx_codec_dec_init(
+ vpx_codec_,
+ config.codec == cdm::kCodecVp9 ? vpx_codec_vp9_dx() : vpx_codec_vp8_dx(),
+ &vpx_config, 0);
if (status != VPX_CODEC_OK) {
LOG(ERROR) << "InitializeLibvpx(): vpx_codec_dec_init failed, ret="
<< status;
@@ -87,7 +90,7 @@ cdm::Status LibvpxCdmVideoDecoder::DecodeFrame(const uint8_t* compressed_frame,
int32_t compressed_frame_size,
int64_t timestamp,
cdm::VideoFrame* decoded_frame) {
- DVLOG(1) << "DecodeFrame()";
+ DVLOG(3) << __func__ << ": frame size = " << compressed_frame_size;
DCHECK(decoded_frame);
// Pass |compressed_frame| to libvpx.
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.h b/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.h
index 1d202fc9b07..c71084ae0a9 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/libvpx_cdm_video_decoder.h
@@ -25,7 +25,7 @@ class LibvpxCdmVideoDecoder : public CdmVideoDecoder {
~LibvpxCdmVideoDecoder() override;
// CdmVideoDecoder implementation.
- bool Initialize(const cdm::VideoDecoderConfig& config) override;
+ bool Initialize(const cdm::VideoDecoderConfig_2& config) override;
void Deinitialize() override;
void Reset() override;
cdm::Status DecodeFrame(const uint8_t* compressed_frame,
diff --git a/chromium/media/cdm/library_cdm/mock_library_cdm.cc b/chromium/media/cdm/library_cdm/mock_library_cdm.cc
new file mode 100644
index 00000000000..c5803b54392
--- /dev/null
+++ b/chromium/media/cdm/library_cdm/mock_library_cdm.cc
@@ -0,0 +1,100 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/library_cdm/mock_library_cdm.h"
+
+#include <string>
+
+#include "base/logging.h"
+#include "media/cdm/library_cdm/cdm_host_proxy.h"
+#include "media/cdm/library_cdm/cdm_host_proxy_impl.h"
+
+namespace media {
+
+namespace {
+static MockLibraryCdm* g_mock_library_cdm = nullptr;
+} // namespace
+
+// static
+MockLibraryCdm* MockLibraryCdm::GetInstance() {
+ return g_mock_library_cdm;
+}
+
+template <typename HostInterface>
+MockLibraryCdm::MockLibraryCdm(HostInterface* host,
+ const std::string& key_system)
+ : cdm_host_proxy_(new CdmHostProxyImpl<HostInterface>(host)) {}
+
+MockLibraryCdm::~MockLibraryCdm() {
+ DCHECK(g_mock_library_cdm);
+ g_mock_library_cdm = nullptr;
+}
+
+CdmHostProxy* MockLibraryCdm::GetCdmHostProxy() {
+ return cdm_host_proxy_.get();
+}
+
+void MockLibraryCdm::Initialize(bool allow_distinctive_identifier,
+ bool allow_persistent_state) {
+ cdm_host_proxy_->OnInitialized(true);
+}
+
+void MockLibraryCdm::Initialize(bool allow_distinctive_identifier,
+ bool allow_persistent_state,
+ bool use_hw_secure_codecs) {
+ cdm_host_proxy_->OnInitialized(true);
+}
+
+void* CreateMockLibraryCdm(int cdm_interface_version,
+ const char* key_system,
+ uint32_t key_system_size,
+ GetCdmHostFunc get_cdm_host_func,
+ void* user_data) {
+ DVLOG(1) << __func__;
+ DCHECK(!g_mock_library_cdm);
+
+ std::string key_system_string(key_system, key_system_size);
+
+ // We support CDM_9, CDM_10 and CDM_11.
+ using CDM_9 = cdm::ContentDecryptionModule_9;
+ using CDM_10 = cdm::ContentDecryptionModule_10;
+ using CDM_11 = cdm::ContentDecryptionModule_11;
+
+ if (cdm_interface_version == CDM_9::kVersion) {
+ CDM_9::Host* host = static_cast<CDM_9::Host*>(
+ get_cdm_host_func(CDM_9::Host::kVersion, user_data));
+ if (!host)
+ return nullptr;
+
+ DVLOG(1) << __func__ << ": Create ClearKeyCdm with CDM_9::Host.";
+ g_mock_library_cdm = new MockLibraryCdm(host, key_system_string);
+ return static_cast<CDM_9*>(g_mock_library_cdm);
+ }
+
+ if (cdm_interface_version == CDM_10::kVersion) {
+ CDM_10::Host* host = static_cast<CDM_10::Host*>(
+ get_cdm_host_func(CDM_10::Host::kVersion, user_data));
+ if (!host)
+ return nullptr;
+
+ DVLOG(1) << __func__ << ": Create ClearKeyCdm with CDM_10::Host.";
+ g_mock_library_cdm = new MockLibraryCdm(host, key_system_string);
+ return static_cast<CDM_10*>(g_mock_library_cdm);
+ }
+
+ if (cdm_interface_version == CDM_11::kVersion) {
+ CDM_11::Host* host = static_cast<CDM_11::Host*>(
+ get_cdm_host_func(CDM_11::Host::kVersion, user_data));
+ if (!host)
+ return nullptr;
+
+ DVLOG(1) << __func__ << ": Create ClearKeyCdm with CDM_11::Host.";
+ g_mock_library_cdm = new MockLibraryCdm(host, key_system_string);
+ return static_cast<CDM_11*>(g_mock_library_cdm);
+ }
+
+ return nullptr;
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/library_cdm/mock_library_cdm.h b/chromium/media/cdm/library_cdm/mock_library_cdm.h
new file mode 100644
index 00000000000..029e9ba0f85
--- /dev/null
+++ b/chromium/media/cdm/library_cdm/mock_library_cdm.h
@@ -0,0 +1,148 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CDM_LIBRARY_CDM_MOCK_LIBRARY_CDM_H_
+#define MEDIA_CDM_LIBRARY_CDM_MOCK_LIBRARY_CDM_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+
+#include "media/cdm/api/content_decryption_module.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace media {
+
+class CdmHostProxy;
+
+// Mock implementation of the cdm::ContentDecryptionModule interfaces.
+class MockLibraryCdm : public cdm::ContentDecryptionModule_9,
+ public cdm::ContentDecryptionModule_10,
+ public cdm::ContentDecryptionModule_11 {
+ public:
+ // Provides easy access to the MockLibraryCdm instance for testing to avoid
+ // going through multiple layers to get it (e.g. CdmAdapter -> CdmWrapper ->
+ // CdmWrapperImpl). It does impose a limitation that we cannot have more than
+ // one MockLibraryCdm instances at the same time, which is fine in most
+ // testing cases.
+ static MockLibraryCdm* GetInstance();
+
+ template <typename HostInterface>
+ MockLibraryCdm(HostInterface* host, const std::string& key_system);
+
+ CdmHostProxy* GetCdmHostProxy();
+
+ // cdm::ContentDecryptionModule_9 implementation.
+ void Initialize(bool allow_distinctive_identifier,
+ bool allow_persistent_state) override;
+ MOCK_METHOD1(
+ InitializeAudioDecoder,
+ cdm::Status(const cdm::AudioDecoderConfig_1& audio_decoder_config));
+ MOCK_METHOD1(
+ InitializeVideoDecoder,
+ cdm::Status(const cdm::VideoDecoderConfig_1& video_decoder_config));
+ MOCK_METHOD2(Decrypt,
+ cdm::Status(const cdm::InputBuffer_1& encrypted_buffer,
+ cdm::DecryptedBlock* decrypted_block));
+ MOCK_METHOD2(DecryptAndDecodeFrame,
+ cdm::Status(const cdm::InputBuffer_1& encrypted_buffer,
+ cdm::VideoFrame* video_frame));
+ MOCK_METHOD2(DecryptAndDecodeSamples,
+ cdm::Status(const cdm::InputBuffer_1& encrypted_buffer,
+ cdm::AudioFrames* audio_frames));
+
+ // cdm::ContentDecryptionModule_10/11 implementation.
+ void Initialize(bool allow_distinctive_identifier,
+ bool allow_persistent_state,
+ bool use_hw_secure_codecs) override;
+ MOCK_METHOD1(
+ InitializeAudioDecoder,
+ cdm::Status(const cdm::AudioDecoderConfig_2& audio_decoder_config));
+ MOCK_METHOD1(
+ InitializeVideoDecoder,
+ cdm::Status(const cdm::VideoDecoderConfig_2& video_decoder_config));
+ MOCK_METHOD2(Decrypt,
+ cdm::Status(const cdm::InputBuffer_2& encrypted_buffer,
+ cdm::DecryptedBlock* decrypted_block));
+ MOCK_METHOD2(DecryptAndDecodeFrame,
+ cdm::Status(const cdm::InputBuffer_2& encrypted_buffer,
+ cdm::VideoFrame* video_frame));
+ MOCK_METHOD2(DecryptAndDecodeSamples,
+ cdm::Status(const cdm::InputBuffer_2& encrypted_buffer,
+ cdm::AudioFrames* audio_frames));
+
+ // Common cdm::ContentDecryptionModule_* implementation.
+ MOCK_METHOD2(GetStatusForPolicy,
+ void(uint32_t promise_id, const cdm::Policy& policy));
+ MOCK_METHOD5(CreateSessionAndGenerateRequest,
+ void(uint32_t promise_id,
+ cdm::SessionType session_type,
+ cdm::InitDataType init_data_type,
+ const uint8_t* init_data,
+ uint32_t init_data_size));
+ MOCK_METHOD4(LoadSession,
+ void(uint32_t promise_id,
+ cdm::SessionType session_type,
+ const char* session_id,
+ uint32_t session_id_length));
+ MOCK_METHOD5(UpdateSession,
+ void(uint32_t promise_id,
+ const char* session_id,
+ uint32_t session_id_length,
+ const uint8_t* response,
+ uint32_t response_size));
+ MOCK_METHOD3(CloseSession,
+ void(uint32_t promise_id,
+ const char* session_id,
+ uint32_t session_id_length));
+ MOCK_METHOD3(RemoveSession,
+ void(uint32_t promise_id,
+ const char* session_id,
+ uint32_t session_id_length));
+ MOCK_METHOD3(SetServerCertificate,
+ void(uint32_t promise_id,
+ const uint8_t* server_certificate_data,
+ uint32_t server_certificate_data_size));
+ MOCK_METHOD1(TimerExpired, void(void* context));
+ MOCK_METHOD1(DeinitializeDecoder, void(cdm::StreamType decoder_type));
+ MOCK_METHOD1(ResetDecoder, void(cdm::StreamType decoder_type));
+ MOCK_METHOD1(OnPlatformChallengeResponse,
+ void(const cdm::PlatformChallengeResponse& response));
+ MOCK_METHOD3(OnQueryOutputProtectionStatus,
+ void(cdm::QueryResult result,
+ uint32_t link_mask,
+ uint32_t output_protection_mask));
+ MOCK_METHOD3(OnStorageId,
+ void(uint32_t version,
+ const uint8_t* storage_id,
+ uint32_t storage_id_size));
+
+ // It could be tricky to expect Destroy() to be called and then delete
+ // MockLibraryCdm directly in the test. So call "delete this" in this class,
+ // same as a normal CDM implementation would do, but also add DestroyCalled()
+ // so that it's easy to ensure Destroy() is actually called.
+ MOCK_METHOD0(DestroyCalled, void());
+ void Destroy() override {
+ DestroyCalled();
+ delete this;
+ }
+
+ private:
+ // Can only be destructed through Destroy().
+ ~MockLibraryCdm() override;
+
+ std::unique_ptr<CdmHostProxy> cdm_host_proxy_;
+};
+
+// Helper function to create MockLibraryCdm.
+void* CreateMockLibraryCdm(int cdm_interface_version,
+ const char* key_system,
+ uint32_t key_system_size,
+ GetCdmHostFunc get_cdm_host_func,
+ void* user_data);
+
+} // namespace media
+
+#endif // MEDIA_CDM_LIBRARY_CDM_MOCK_LIBRARY_CDM_H_
diff --git a/chromium/media/cdm/mock_helpers.cc b/chromium/media/cdm/mock_helpers.cc
index fcd85efeaec..0e8b68e6287 100644
--- a/chromium/media/cdm/mock_helpers.cc
+++ b/chromium/media/cdm/mock_helpers.cc
@@ -14,12 +14,6 @@ MockCdmAuxiliaryHelper::~MockCdmAuxiliaryHelper() = default;
void MockCdmAuxiliaryHelper::SetFileReadCB(FileReadCB file_read_cb) {}
-cdm::FileIO* MockCdmAuxiliaryHelper::CreateCdmFileIO(
- cdm::FileIOClient* client) {
- NOTREACHED();
- return nullptr;
-}
-
cdm::Buffer* MockCdmAuxiliaryHelper::CreateCdmBuffer(size_t capacity) {
return allocator_->CreateCdmBuffer(capacity);
}
diff --git a/chromium/media/cdm/mock_helpers.h b/chromium/media/cdm/mock_helpers.h
index abf3587ae64..5c812113220 100644
--- a/chromium/media/cdm/mock_helpers.h
+++ b/chromium/media/cdm/mock_helpers.h
@@ -26,7 +26,7 @@ class MockCdmAuxiliaryHelper : public CdmAuxiliaryHelper {
// CdmAuxiliaryHelper implementation.
void SetFileReadCB(FileReadCB file_read_cb) override;
- cdm::FileIO* CreateCdmFileIO(cdm::FileIOClient* client) override;
+ MOCK_METHOD1(CreateCdmFileIO, cdm::FileIO*(cdm::FileIOClient* client));
cdm::Buffer* CreateCdmBuffer(size_t capacity) override;
std::unique_ptr<VideoFrameImpl> CreateCdmVideoFrame() override;
diff --git a/chromium/media/cdm/simple_cdm_allocator_unittest.cc b/chromium/media/cdm/simple_cdm_allocator_unittest.cc
index 9cc1352c64a..5f9d1d77fe1 100644
--- a/chromium/media/cdm/simple_cdm_allocator_unittest.cc
+++ b/chromium/media/cdm/simple_cdm_allocator_unittest.cc
@@ -22,14 +22,14 @@ class TestCdmBuffer : public cdm::Buffer {
}
// cdm::Buffer implementation.
- void Destroy() {
+ void Destroy() override {
DestroyCalled();
delete this;
}
- uint32_t Capacity() const { return buffer_.size(); }
- uint8_t* Data() { return buffer_.data(); }
- void SetSize(uint32_t size) { size_ = size > Capacity() ? 0 : size; }
- uint32_t Size() const { return size_; }
+ uint32_t Capacity() const override { return buffer_.size(); }
+ uint8_t* Data() override { return buffer_.data(); }
+ void SetSize(uint32_t size) override { size_ = size > Capacity() ? 0 : size; }
+ uint32_t Size() const override { return size_; }
private:
TestCdmBuffer(uint32_t capacity) : buffer_(capacity), size_(0) {
diff --git a/chromium/media/cdm/supported_cdm_versions.cc b/chromium/media/cdm/supported_cdm_versions.cc
index 1fdf5b64066..971895257f6 100644
--- a/chromium/media/cdm/supported_cdm_versions.cc
+++ b/chromium/media/cdm/supported_cdm_versions.cc
@@ -4,48 +4,44 @@
#include "media/cdm/supported_cdm_versions.h"
-#include "media/cdm/api/content_decryption_module.h"
+#include "base/command_line.h"
+#include "base/optional.h"
+#include "base/strings/string_number_conversions.h"
+#include "media/base/media_switches.h"
namespace media {
-bool IsSupportedCdmModuleVersion(int version) {
- switch (version) {
- // Latest.
- case CDM_MODULE_VERSION:
- return true;
- default:
- return false;
- }
-}
+namespace {
+
+// Returns the overridden supported CDM interface version specified on command
+// line, which can be null if not specified.
+base::Optional<int> GetSupportedCdmInterfaceVersionOverrideFromCommandLine() {
+ auto* command_line = base::CommandLine::ForCurrentProcess();
+ if (!command_line)
+ return base::nullopt;
+
+ auto version_string = command_line->GetSwitchValueASCII(
+ switches::kOverrideEnabledCdmInterfaceVersion);
-bool IsSupportedCdmInterfaceVersion(int version) {
- static_assert(cdm::ContentDecryptionModule::kVersion ==
- cdm::ContentDecryptionModule_9::kVersion,
- "update the code below");
- switch (version) {
- // Supported versions in decreasing order.
- case cdm::ContentDecryptionModule_10::kVersion:
- case cdm::ContentDecryptionModule_9::kVersion:
- case cdm::ContentDecryptionModule_8::kVersion:
- return true;
- default:
- return false;
- }
+ int version = 0;
+ if (!base::StringToInt(version_string, &version))
+ return base::nullopt;
+ else
+ return version;
}
-bool IsSupportedCdmHostVersion(int version) {
- static_assert(cdm::ContentDecryptionModule::Host::kVersion ==
- cdm::ContentDecryptionModule_9::Host::kVersion,
- "update the code below");
- switch (version) {
- // Supported versions in decreasing order.
- case cdm::Host_10::kVersion:
- case cdm::Host_9::kVersion:
- case cdm::Host_8::kVersion:
- return true;
- default:
- return false;
- }
+} // namespace
+
+bool IsSupportedAndEnabledCdmInterfaceVersion(int version) {
+ if (!IsSupportedCdmInterfaceVersion(version))
+ return false;
+
+ auto version_override =
+ GetSupportedCdmInterfaceVersionOverrideFromCommandLine();
+ if (version_override)
+ return version == version_override;
+
+ return IsCdmInterfaceVersionEnabledByDefault(version);
}
} // namespace media
diff --git a/chromium/media/cdm/supported_cdm_versions.h b/chromium/media/cdm/supported_cdm_versions.h
index dd572b91ccb..857a3ab6336 100644
--- a/chromium/media/cdm/supported_cdm_versions.h
+++ b/chromium/media/cdm/supported_cdm_versions.h
@@ -5,15 +5,155 @@
#ifndef MEDIA_CDM_SUPPORTED_CDM_VERSIONS_H_
#define MEDIA_CDM_SUPPORTED_CDM_VERSIONS_H_
+#include <array>
+
#include "media/base/media_export.h"
+#include "media/cdm/api/content_decryption_module.h"
+
+// A library CDM interface is "supported" if it's implemented by CdmAdapter and
+// CdmWrapper. Typically multiple CDM interfaces are supported:
+// - The latest stable CDM interface.
+// - Previous stable CDM interface(s), for supporting older CDMs.
+// - Experimental CDM interface(s), for development.
+//
+// A library CDM interface is "enabled" if it's enabled at runtime, e.g. being
+// able to be registered and creating CDM instances. Experimental CDM interfaces
+// must not be enabled by default.
+//
+// Whether a CDM interface is enabled can also be overridden by using command
+// line switch switches::kOverrideEnabledCdmInterfaceVersion for finer control
+// in a test environment or for local debugging, including enabling experimental
+// CDM interfaces.
namespace media {
-MEDIA_EXPORT bool IsSupportedCdmModuleVersion(int version);
+struct SupportedVersion {
+ int version;
+ bool enabled;
+};
+
+constexpr std::array<SupportedVersion, 3> kSupportedCdmInterfaceVersions = {{
+ {9, true},
+ {10, true},
+ {11, false},
+}};
+
+// In most cases CdmInterface::kVersion == CdmInterface::Host::kVersion. However
+// this is not guaranteed. For example, a newer CDM interface may use an
+// existing CDM host. So we keep CDM host support separate from CDM interface
+// support. In CdmInterfaceTraits we also static assert that for supported CDM
+// interface, CdmInterface::Host::kVersion must also be supported.
+constexpr int kMinSupportedCdmHostVersion = 9;
+constexpr int kMaxSupportedCdmHostVersion = 11;
+
+constexpr bool IsSupportedCdmModuleVersion(int version) {
+ return version == CDM_MODULE_VERSION;
+}
+
+// Returns whether the CDM interface of |version| is supported in the
+// implementation.
+constexpr bool IsSupportedCdmInterfaceVersion(int version) {
+ for (size_t i = 0; i < kSupportedCdmInterfaceVersions.size(); ++i) {
+ if (kSupportedCdmInterfaceVersions[i].version == version)
+ return true;
+ }
+
+ return false;
+}
+
+// Returns whether the CDM host interface of |version| is supported in the
+// implementation. Currently there's no way to disable a supported CDM host
+// interface at run time.
+constexpr bool IsSupportedCdmHostVersion(int version) {
+ return kMinSupportedCdmHostVersion <= version &&
+ version <= kMaxSupportedCdmHostVersion;
+}
+
+// Returns whether the CDM interface of |version| is enabled by default.
+constexpr bool IsCdmInterfaceVersionEnabledByDefault(int version) {
+ for (size_t i = 0; i < kSupportedCdmInterfaceVersions.size(); ++i) {
+ if (kSupportedCdmInterfaceVersions[i].version == version)
+ return kSupportedCdmInterfaceVersions[i].enabled;
+ }
+
+ return false;
+}
+
+// Returns whether the CDM interface of |version| is supported in the
+// implementation and enabled at runtime.
+MEDIA_EXPORT bool IsSupportedAndEnabledCdmInterfaceVersion(int version);
+
+typedef bool (*VersionCheckFunc)(int version);
+
+// Returns true if all versions in the range [min_version, max_version] and no
+// versions outside the range are supported, and false otherwise.
+constexpr bool CheckVersions(VersionCheckFunc check_func,
+ int min_version,
+ int max_version) {
+ // For simplicity, only check one version out of the range boundary.
+ if (check_func(min_version - 1) || check_func(max_version + 1))
+ return false;
+
+ for (int version = min_version; version <= max_version; ++version) {
+ if (!check_func(version))
+ return false;
+ }
+
+ return true;
+}
+
+// Ensures CDM interface versions in and only in the range [min_version,
+// max_version] are supported in the implementation.
+constexpr bool CheckSupportedCdmInterfaceVersions(int min_version,
+ int max_version) {
+ return CheckVersions(IsSupportedCdmInterfaceVersion, min_version,
+ max_version);
+}
+
+// Ensures CDM host interface versions in and only in the range [min_version,
+// max_version] are supported in the implementation.
+constexpr bool CheckSupportedCdmHostVersions(int min_version, int max_version) {
+ return CheckVersions(IsSupportedCdmHostVersion, min_version, max_version);
+}
+
+// Traits for CDM Interfaces
+template <int CdmInterfaceVersion>
+struct CdmInterfaceTraits {};
+
+template <>
+struct CdmInterfaceTraits<9> {
+ using CdmInterface = cdm::ContentDecryptionModule_9;
+ static_assert(CdmInterface::kVersion == 9, "CDM interface version mismatch");
+ static_assert(IsSupportedCdmHostVersion(CdmInterface::Host::kVersion),
+ "Host not supported");
+ // CDM_9 is already stable and enabled by default.
+ // TODO(xhwang): After CDM_9 support is removed, consider to use a macro to
+ // help define CdmInterfaceTraits specializations.
+};
-MEDIA_EXPORT bool IsSupportedCdmInterfaceVersion(int version);
+template <>
+struct CdmInterfaceTraits<10> {
+ using CdmInterface = cdm::ContentDecryptionModule_10;
+ static_assert(CdmInterface::kVersion == 10, "CDM interface version mismatch");
+ static_assert(IsSupportedCdmHostVersion(CdmInterface::Host::kVersion),
+ "Host not supported");
+ static_assert(
+ CdmInterface::kIsStable ||
+ !IsCdmInterfaceVersionEnabledByDefault(CdmInterface::kVersion),
+ "Experimental CDM interface should not be enabled by default");
+};
-MEDIA_EXPORT bool IsSupportedCdmHostVersion(int version);
+template <>
+struct CdmInterfaceTraits<11> {
+ using CdmInterface = cdm::ContentDecryptionModule_11;
+ static_assert(CdmInterface::kVersion == 11, "CDM interface version mismatch");
+ static_assert(IsSupportedCdmHostVersion(CdmInterface::Host::kVersion),
+ "Host not supported");
+ static_assert(
+ CdmInterface::kIsStable ||
+ !IsCdmInterfaceVersionEnabledByDefault(CdmInterface::kVersion),
+ "Experimental CDM interface should not be enabled by default");
+};
} // namespace media
diff --git a/chromium/media/device_monitors/device_monitor_mac.h b/chromium/media/device_monitors/device_monitor_mac.h
index b7c85f9b601..26f8353082c 100644
--- a/chromium/media/device_monitors/device_monitor_mac.h
+++ b/chromium/media/device_monitors/device_monitor_mac.h
@@ -8,6 +8,7 @@
#include <memory>
#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
#include "base/system_monitor/system_monitor.h"
#include "base/threading/thread_checker.h"
#include "media/base/media_export.h"
@@ -18,9 +19,7 @@ class DeviceMonitorMacImpl;
namespace media {
-class AudioDeviceListenerMac;
-
-// Class to track audio/video devices removal or addition via callback to
+// Class to track video devices removal or addition via callback to
// base::SystemMonitor ProcessDevicesChanged(). A single object of this class
// is created from the browser main process and lives as long as this one.
class MEDIA_EXPORT DeviceMonitorMac {
@@ -31,7 +30,7 @@ class MEDIA_EXPORT DeviceMonitorMac {
scoped_refptr<base::SingleThreadTaskRunner> device_task_runner);
~DeviceMonitorMac();
- // Registers the observers for the audio/video device removal, connection and
+ // Registers the observers for the video device removal, connection and
// suspension. The AVFoundation library is also loaded and initialised if the
// OS supports it.
void StartMonitoring();
@@ -44,7 +43,6 @@ class MEDIA_EXPORT DeviceMonitorMac {
private:
scoped_refptr<base::SingleThreadTaskRunner> device_task_runner_;
std::unique_ptr<DeviceMonitorMacImpl> device_monitor_impl_;
- std::unique_ptr<AudioDeviceListenerMac> audio_device_listener_;
// |thread_checker_| is used to check that constructor and StartMonitoring()
// are called in the correct thread, the UI thread, that also owns the object.
diff --git a/chromium/media/device_monitors/device_monitor_mac.mm b/chromium/media/device_monitors/device_monitor_mac.mm
index 8830d933e0d..fff4482bae4 100644
--- a/chromium/media/device_monitors/device_monitor_mac.mm
+++ b/chromium/media/device_monitors/device_monitor_mac.mm
@@ -14,7 +14,6 @@
#include "base/macros.h"
#include "base/task_runner_util.h"
#include "base/threading/thread_checker.h"
-#include "media/audio/mac/audio_device_listener_mac.h"
namespace {
@@ -444,15 +443,6 @@ void DeviceMonitorMac::StartMonitoring() {
DVLOG(1) << "Monitoring via AVFoundation";
device_monitor_impl_ =
std::make_unique<AVFoundationMonitorImpl>(this, device_task_runner_);
- audio_device_listener_ = std::make_unique<AudioDeviceListenerMac>(
- base::BindRepeating([] {
- if (base::SystemMonitor::Get()) {
- base::SystemMonitor::Get()->ProcessDevicesChanged(
- base::SystemMonitor::DEVTYPE_AUDIO);
- }
- }),
- true /* monitor_default_input */, true /* monitor_addition_removal */,
- true /* monitor_sources */);
}
void DeviceMonitorMac::NotifyDeviceChanged(
diff --git a/chromium/media/device_monitors/device_monitor_udev.cc b/chromium/media/device_monitors/device_monitor_udev.cc
index d0e8bb8fb57..3998e671f27 100644
--- a/chromium/media/device_monitors/device_monitor_udev.cc
+++ b/chromium/media/device_monitors/device_monitor_udev.cc
@@ -50,7 +50,7 @@ void DeviceMonitorLinux::Initialize() {
DCHECK(io_task_runner_->BelongsToCurrentThread());
// We want to be notified of IO message loop destruction to delete |udev_|.
- base::MessageLoop::current()->AddDestructionObserver(this);
+ base::MessageLoopCurrent::Get()->AddDestructionObserver(this);
std::vector<device::UdevLinux::UdevMonitorFilter> filters;
for (const SubsystemMap& entry : kSubsystemMap) {
diff --git a/chromium/media/device_monitors/device_monitor_udev.h b/chromium/media/device_monitors/device_monitor_udev.h
index fe326eee8e4..330b678a43a 100644
--- a/chromium/media/device_monitors/device_monitor_udev.h
+++ b/chromium/media/device_monitors/device_monitor_udev.h
@@ -12,7 +12,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
#include "base/single_thread_task_runner.h"
#include "media/base/media_export.h"
@@ -27,7 +27,7 @@ class UdevLinux;
namespace media {
class MEDIA_EXPORT DeviceMonitorLinux
- : public base::MessageLoop::DestructionObserver {
+ : public base::MessageLoopCurrent::DestructionObserver {
public:
explicit DeviceMonitorLinux(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
diff --git a/chromium/media/ffmpeg/ffmpeg_common.h b/chromium/media/ffmpeg/ffmpeg_common.h
index ec33068fb84..f641d6bcf92 100644
--- a/chromium/media/ffmpeg/ffmpeg_common.h
+++ b/chromium/media/ffmpeg/ffmpeg_common.h
@@ -28,9 +28,6 @@ extern "C" {
MSVC_PUSH_DISABLE_WARNING(4244);
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
-#if !BUILDFLAG(USE_SYSTEM_FFMPEG)
-#include <libavformat/internal.h>
-#endif // !BUILDFLAG(USE_SYSTEM_FFMPEG)
#include <libavformat/avio.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
@@ -42,6 +39,8 @@ MSVC_POP_WARNING();
namespace media {
+constexpr int64_t kNoFFmpegTimestamp = static_cast<int64_t>(AV_NOPTS_VALUE);
+
class AudioDecoderConfig;
class EncryptionScheme;
class VideoDecoderConfig;
diff --git a/chromium/media/filters/BUILD.gn b/chromium/media/filters/BUILD.gn
index 9166066e7d1..30eb8ec2ff9 100644
--- a/chromium/media/filters/BUILD.gn
+++ b/chromium/media/filters/BUILD.gn
@@ -103,7 +103,7 @@ source_set("filters") {
"//media/formats",
"//media/video",
"//skia",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
"//third_party/libyuv",
]
@@ -126,6 +126,8 @@ source_set("filters") {
sources += [
"audio_file_reader.cc",
"audio_file_reader.h",
+ "audio_video_metadata_extractor.cc",
+ "audio_video_metadata_extractor.h",
"blocking_url_protocol.cc",
"blocking_url_protocol.h",
"ffmpeg_audio_decoder.cc",
@@ -137,15 +139,9 @@ source_set("filters") {
"ffmpeg_glue.h",
"in_memory_url_protocol.cc",
"in_memory_url_protocol.h",
+ "media_file_checker.cc",
+ "media_file_checker.h",
]
- if (!is_android) {
- sources += [
- "audio_video_metadata_extractor.cc",
- "audio_video_metadata_extractor.h",
- "media_file_checker.cc",
- "media_file_checker.h",
- ]
- }
}
if (media_use_libvpx) {
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index 861652aa387..9429b426ceb 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -82,6 +82,8 @@ void MediaCodecAudioDecoder::Initialize(
sample_format_ = kSampleFormatAc3;
else if (config.codec() == kCodecEAC3)
sample_format_ = kSampleFormatEac3;
+ else if (config.codec() == kCodecMpegHAudio)
+ sample_format_ = kSampleFormatMpegHAudio;
if (state_ == STATE_ERROR) {
DVLOG(1) << "Decoder is in error state.";
@@ -176,7 +178,7 @@ void MediaCodecAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DCHECK(codec_loop_);
- DVLOG(2) << __func__ << " " << buffer->AsHumanReadableString();
+ DVLOG(3) << __func__ << " " << buffer->AsHumanReadableString();
DCHECK_EQ(state_, STATE_READY) << " unexpected state " << AsString(state_);
@@ -190,7 +192,7 @@ void MediaCodecAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
}
void MediaCodecAudioDecoder::Reset(const base::Closure& closure) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
ClearInputQueue(DecodeStatus::ABORTED);
@@ -285,7 +287,7 @@ bool MediaCodecAudioDecoder::IsAnyInputPending() const {
}
MediaCodecLoop::InputData MediaCodecAudioDecoder::ProvideInputData() {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__;
const DecoderBuffer* decoder_buffer = input_queue_.front().first.get();
@@ -296,7 +298,9 @@ MediaCodecLoop::InputData MediaCodecAudioDecoder::ProvideInputData() {
input_data.memory = static_cast<const uint8_t*>(decoder_buffer->data());
input_data.length = decoder_buffer->data_size();
const DecryptConfig* decrypt_config = decoder_buffer->decrypt_config();
- if (decrypt_config && decrypt_config->is_encrypted()) {
+ if (decrypt_config) {
+ // TODO(crbug.com/813845): Use encryption scheme settings from
+ // DecryptConfig.
input_data.key_id = decrypt_config->key_id();
input_data.iv = decrypt_config->iv();
input_data.subsamples = decrypt_config->subsamples();
@@ -334,7 +338,7 @@ void MediaCodecAudioDecoder::ClearInputQueue(DecodeStatus decode_status) {
}
void MediaCodecAudioDecoder::SetState(State new_state) {
- DVLOG(1) << __func__ << ": " << AsString(state_) << "->"
+ DVLOG(3) << __func__ << ": " << AsString(state_) << "->"
<< AsString(new_state);
state_ = new_state;
}
@@ -372,7 +376,7 @@ bool MediaCodecAudioDecoder::OnDecodedEos(
bool MediaCodecAudioDecoder::OnDecodedFrame(
const MediaCodecLoop::OutputBuffer& out) {
- DVLOG(2) << __func__ << " pts:" << out.pts;
+ DVLOG(3) << __func__ << " pts:" << out.pts;
DCHECK_NE(out.size, 0U);
DCHECK_NE(out.index, MediaCodecLoop::kInvalidBufferIndex);
diff --git a/chromium/media/filters/aom_video_decoder.cc b/chromium/media/filters/aom_video_decoder.cc
index 5cb89e9ec00..41250ae6dad 100644
--- a/chromium/media/filters/aom_video_decoder.cc
+++ b/chromium/media/filters/aom_video_decoder.cc
@@ -14,6 +14,7 @@
#include "media/base/decoder_buffer.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
+#include "media/base/video_util.h"
#include "third_party/libyuv/include/libyuv/convert.h"
// Include libaom header files.
@@ -358,9 +359,11 @@ scoped_refptr<VideoFrame> AomVideoDecoder::CopyImageToVideoFrame(
}
// Since we're making a copy, only copy the visible area.
- const gfx::Size size(img->d_w, img->d_h);
- auto frame = frame_pool_.CreateFrame(pixel_format, size, gfx::Rect(size),
- config_.natural_size(), kNoTimestamp);
+ const gfx::Rect visible_rect(img->d_w, img->d_h);
+ auto frame = frame_pool_.CreateFrame(
+ pixel_format, visible_rect.size(), visible_rect,
+ GetNaturalSize(visible_rect, config_.GetPixelAspectRatio()),
+ kNoTimestamp);
if (!frame)
return nullptr;
@@ -380,8 +383,8 @@ scoped_refptr<VideoFrame> AomVideoDecoder::CopyImageToVideoFrame(
frame->visible_data(VideoFrame::kUPlane),
frame->stride(VideoFrame::kUPlane),
frame->visible_data(VideoFrame::kVPlane),
- frame->stride(VideoFrame::kVPlane), size.width(),
- size.height());
+ frame->stride(VideoFrame::kVPlane), visible_rect.width(),
+ visible_rect.height());
}
return frame;
diff --git a/chromium/media/filters/aom_video_decoder_unittest.cc b/chromium/media/filters/aom_video_decoder_unittest.cc
index e57498326fe..7ab2b07ad95 100644
--- a/chromium/media/filters/aom_video_decoder_unittest.cc
+++ b/chromium/media/filters/aom_video_decoder_unittest.cc
@@ -6,6 +6,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "build/build_config.h"
@@ -45,7 +46,7 @@ class AomVideoDecoderTest : public testing::Test {
decoder_->Initialize(
config, false, nullptr, NewExpectedBoolCB(success),
base::Bind(&AomVideoDecoderTest::FrameReady, base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/audio_decoder_selector_unittest.cc b/chromium/media/filters/audio_decoder_selector_unittest.cc
index 5ac7d464332..07af187f509 100644
--- a/chromium/media/filters/audio_decoder_selector_unittest.cc
+++ b/chromium/media/filters/audio_decoder_selector_unittest.cc
@@ -11,6 +11,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "build/build_config.h"
+#include "media/base/channel_layout.h"
#include "media/base/gmock_callback_support.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
@@ -59,14 +60,14 @@ class AudioDecoderSelectorTest : public ::testing::Test {
};
AudioDecoderSelectorTest()
- : traits_(&media_log_),
+ : traits_(&media_log_, CHANNEL_LAYOUT_STEREO),
demuxer_stream_(
new StrictMock<MockDemuxerStream>(DemuxerStream::AUDIO)) {
// |cdm_context_| and |decryptor_| are conditionally created in
// InitializeDecoderSelector().
}
- ~AudioDecoderSelectorTest() { base::RunLoop().RunUntilIdle(); }
+ ~AudioDecoderSelectorTest() override { base::RunLoop().RunUntilIdle(); }
MOCK_METHOD2(OnDecoderSelected,
void(AudioDecoder*, DecryptingDemuxerStream*));
diff --git a/chromium/media/filters/audio_decoder_unittest.cc b/chromium/media/filters/audio_decoder_unittest.cc
index df321cad833..9b373fd78ed 100644
--- a/chromium/media/filters/audio_decoder_unittest.cc
+++ b/chromium/media/filters/audio_decoder_unittest.cc
@@ -8,6 +8,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/format_macros.h"
#include "base/macros.h"
@@ -246,7 +247,7 @@ class AudioDecoderTest
decoder_->Initialize(
config, nullptr, NewExpectedBoolCB(success),
base::Bind(&AudioDecoderTest::OnDecoderOutput, base::Unretained(this)),
- AudioDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/audio_file_reader_unittest.cc b/chromium/media/filters/audio_file_reader_unittest.cc
index 6b3ce992ba7..6df10b2a8fd 100644
--- a/chromium/media/filters/audio_file_reader_unittest.cc
+++ b/chromium/media/filters/audio_file_reader_unittest.cc
@@ -201,7 +201,6 @@ TEST_F(AudioFileReaderTest, WaveF32LE) {
12719);
}
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(AudioFileReaderTest, MP3) {
RunTest("sfx.mp3",
"1.30,2.72,4.56,5.08,3.74,2.03,",
@@ -225,6 +224,7 @@ TEST_F(AudioFileReaderTest, CorruptMP3) {
44928);
}
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(AudioFileReaderTest, AAC) {
RunTest("sfx.m4a", "1.81,1.66,2.32,3.27,4.46,3.36,", 1, 44100,
base::TimeDelta::FromMicroseconds(371660), 16391, 13312);
diff --git a/chromium/media/filters/audio_renderer_algorithm_unittest.cc b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
index c7c50156609..e496653df46 100644
--- a/chromium/media/filters/audio_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
@@ -115,7 +115,7 @@ class AudioRendererAlgorithmTest : public testing::Test {
format = media::AudioParameters::AUDIO_BITSTREAM_EAC3;
AudioParameters params(format, channel_layout, samples_per_second,
- bytes_per_sample_ * 8, frames_per_buffer);
+ frames_per_buffer);
bool is_encrypted = false;
algorithm_.Initialize(params, is_encrypted);
algorithm_.SetChannelMask(std::move(channel_mask));
@@ -259,12 +259,11 @@ class AudioRendererAlgorithmTest : public testing::Test {
void WsolaTest(double playback_rate) {
const int kSampleRateHz = 48000;
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
- const int kBytesPerSample = 2;
const int kNumFrames = kSampleRateHz / 100; // 10 milliseconds.
channels_ = ChannelLayoutToChannelCount(kChannelLayout);
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, kChannelLayout,
- kSampleRateHz, kBytesPerSample * 8, kNumFrames);
+ kSampleRateHz, kNumFrames);
bool is_encrypted = false;
algorithm_.Initialize(params, is_encrypted);
diff --git a/chromium/media/filters/audio_timestamp_validator.cc b/chromium/media/filters/audio_timestamp_validator.cc
index 666252540f5..cd5d42aee7c 100644
--- a/chromium/media/filters/audio_timestamp_validator.cc
+++ b/chromium/media/filters/audio_timestamp_validator.cc
@@ -23,6 +23,9 @@ const int kLimitTriesForStableTiming = 5;
// CheckForTimestampGap().
const int kStableTimeGapThrsholdMsec = 1;
+// Maximum number of timestamp gap warnings sent to MediaLog.
+const int kMaxTimestampGapWarnings = 10;
+
AudioTimestampValidator::AudioTimestampValidator(
const AudioDecoderConfig& decoder_config,
MediaLog* media_log)
@@ -101,7 +104,7 @@ void AudioTimestampValidator::CheckForTimestampGap(
// Let developers know if their files timestamps are way off from
if (num_unstable_audio_tries_ > limit_unstable_audio_tries_) {
- MEDIA_LOG(ERROR, media_log_)
+ MEDIA_LOG(WARNING, media_log_)
<< "Failed to reconcile encoded audio times with decoded output.";
}
}
@@ -111,7 +114,8 @@ void AudioTimestampValidator::CheckForTimestampGap(
}
if (std::abs(ts_delta.InMilliseconds()) > drift_warning_threshold_msec_) {
- MEDIA_LOG(ERROR, media_log_)
+ LIMITED_MEDIA_LOG(WARNING, media_log_, num_timestamp_gap_warnings_,
+ kMaxTimestampGapWarnings)
<< " Large timestamp gap detected; may cause AV sync to drift."
<< " time:" << buffer.timestamp().InMicroseconds() << "us"
<< " expected:" << expected_ts.InMicroseconds() << "us"
@@ -141,4 +145,4 @@ void AudioTimestampValidator::RecordOutputDuration(
audio_output_ts_helper_->AddFrames(audio_buffer->frame_count());
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/filters/audio_timestamp_validator.h b/chromium/media/filters/audio_timestamp_validator.h
index bfdfcfd852e..665501c320e 100644
--- a/chromium/media/filters/audio_timestamp_validator.h
+++ b/chromium/media/filters/audio_timestamp_validator.h
@@ -59,6 +59,9 @@ class MEDIA_EXPORT AudioTimestampValidator {
// logs if things get worse. See CheckTimestampForGap().
uint32_t drift_warning_threshold_msec_;
+ // Tracks the number of MEDIA_LOG warnings when large timestamp gap detected.
+ int num_timestamp_gap_warnings_ = 0;
+
DISALLOW_COPY_AND_ASSIGN(AudioTimestampValidator);
};
diff --git a/chromium/media/filters/audio_timestamp_validator_unittest.cc b/chromium/media/filters/audio_timestamp_validator_unittest.cc
index af1833cd904..b2b84afa3f8 100644
--- a/chromium/media/filters/audio_timestamp_validator_unittest.cc
+++ b/chromium/media/filters/audio_timestamp_validator_unittest.cc
@@ -204,6 +204,9 @@ TEST_P(AudioTimestampValidatorTest, RepeatedWarnForSlowAccumulatingDrift) {
"with decoded output."))
.Times(0);
+ int num_timestamp_gap_warnings = 0;
+ const int kMaxTimestampGapWarnings = 10; // Must be the same as in .cc
+
for (int i = 0; i < 100; ++i) {
// Wait for delayed output to begin plus an additional two iterations to
// start using drift offset. The the two iterations without offset will
@@ -218,9 +221,12 @@ TEST_P(AudioTimestampValidatorTest, RepeatedWarnForSlowAccumulatingDrift) {
encoded_buffer->set_timestamp((i * kBufferDuration) + offset);
// Expect gap warnings to start when drift hits 50 milliseconds. Warnings
- // should continue as the gap widens.
+ // should continue as the gap widens until log limit is hit.
+
if (offset > base::TimeDelta::FromMilliseconds(50)) {
- EXPECT_MEDIA_LOG(HasSubstr("timestamp gap detected"));
+ EXPECT_LIMITED_MEDIA_LOG(HasSubstr("timestamp gap detected"),
+ num_timestamp_gap_warnings,
+ kMaxTimestampGapWarnings);
}
validator.CheckForTimestampGap(*encoded_buffer);
diff --git a/chromium/media/filters/audio_video_metadata_extractor_unittest.cc b/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
index 022efccc36d..7e5793a8df1 100644
--- a/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
+++ b/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
@@ -185,6 +185,7 @@ TEST(AudioVideoMetadataExtractorTest, AndroidRotatedMP4Video) {
EXPECT_EQ(0u, extractor->attached_images_bytes().size());
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST(AudioVideoMetadataExtractorTest, AudioMP3) {
std::unique_ptr<AudioVideoMetadataExtractor> extractor =
@@ -259,6 +260,5 @@ TEST(AudioVideoMetadataExtractorTest, AudioFLACInMp4) {
EXPECT_EQ(0u, extractor->attached_images_bytes().size());
}
-#endif
} // namespace media
diff --git a/chromium/media/filters/blocking_url_protocol_unittest.cc b/chromium/media/filters/blocking_url_protocol_unittest.cc
index dda40006510..9509e7b9506 100644
--- a/chromium/media/filters/blocking_url_protocol_unittest.cc
+++ b/chromium/media/filters/blocking_url_protocol_unittest.cc
@@ -27,9 +27,7 @@ class BlockingUrlProtocolTest : public testing::Test {
CHECK(data_source_.Initialize(GetTestDataFilePath("bear-320x240.webm")));
}
- virtual ~BlockingUrlProtocolTest() {
- data_source_.Stop();
- }
+ ~BlockingUrlProtocolTest() override { data_source_.Stop(); }
MOCK_METHOD0(OnDataSourceError, void());
diff --git a/chromium/media/filters/chunk_demuxer.cc b/chromium/media/filters/chunk_demuxer.cc
index 8bb2151f16d..50fc1bc8ba5 100644
--- a/chromium/media/filters/chunk_demuxer.cc
+++ b/chromium/media/filters/chunk_demuxer.cc
@@ -235,12 +235,6 @@ bool ChunkDemuxerStream::UpdateAudioConfig(const AudioDecoderConfig& config,
if (!SBSTREAM_IS_SET) {
DCHECK_EQ(state_, UNINITIALIZED);
- // FLAC in MSE here is only supported if in ISOBMFF, which has feature flag.
- // Though the MP4StreamParser shouldn't produce FLAC decoder configs if the
- // feature is disabled, double-check feature support here in debug builds.
- DCHECK(config.codec() != kCodecFLAC ||
- base::FeatureList::IsEnabled(kMseFlacInIsobmff));
-
// Enable partial append window support for most audio codecs (notably: not
// opus).
partial_append_window_trimming_enabled_ =
@@ -351,14 +345,6 @@ void ChunkDemuxerStream::SetEnabled(bool enabled, base::TimeDelta timestamp) {
base::ResetAndReturn(&read_cb_).Run(kOk,
StreamParserBuffer::CreateEOSBuffer());
}
- if (!stream_status_change_cb_.is_null())
- stream_status_change_cb_.Run(this, is_enabled_, timestamp);
-}
-
-void ChunkDemuxerStream::SetStreamStatusChangeCB(
- const StreamStatusChangeCB& cb) {
- DCHECK(!cb.is_null());
- stream_status_change_cb_ = BindToCurrentLoop(cb);
}
TextTrackConfig ChunkDemuxerStream::text_track_config() {
@@ -567,15 +553,6 @@ std::vector<DemuxerStream*> ChunkDemuxer::GetAllStreams() {
return result;
}
-void ChunkDemuxer::SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) {
- base::AutoLock auto_lock(lock_);
- DCHECK(!cb.is_null());
- for (const auto& stream : audio_streams_)
- stream->SetStreamStatusChangeCB(cb);
- for (const auto& stream : video_streams_)
- stream->SetStreamStatusChangeCB(cb);
-}
-
TimeDelta ChunkDemuxer::GetStartTime() const {
return TimeDelta();
}
@@ -782,68 +759,59 @@ base::TimeDelta ChunkDemuxer::GetHighestPresentationTimestamp(
return itr->second->GetHighestPresentationTimestamp();
}
-void ChunkDemuxer::OnEnabledAudioTracksChanged(
+void ChunkDemuxer::FindAndEnableProperTracks(
const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) {
+ base::TimeDelta curr_time,
+ DemuxerStream::Type track_type,
+ TrackChangeCB change_completed_cb) {
base::AutoLock auto_lock(lock_);
+
std::set<ChunkDemuxerStream*> enabled_streams;
for (const auto& id : track_ids) {
auto it = track_id_to_demux_stream_map_.find(id);
if (it == track_id_to_demux_stream_map_.end())
continue;
ChunkDemuxerStream* stream = it->second;
- DCHECK_EQ(DemuxerStream::AUDIO, stream->type());
+ DCHECK(stream);
+ DCHECK_EQ(track_type, stream->type());
// TODO(servolk): Remove after multiple enabled audio tracks are supported
// by the media::RendererImpl.
if (!enabled_streams.empty()) {
MEDIA_LOG(INFO, media_log_)
- << "Only one enabled audio track is supported, ignoring track " << id;
+ << "Only one enabled track is supported, ignoring track " << id;
continue;
}
enabled_streams.insert(stream);
+ stream->SetEnabled(true, curr_time);
}
- // First disable all streams that need to be disabled and then enable streams
- // that are enabled.
- for (const auto& stream : audio_streams_) {
- if (enabled_streams.find(stream.get()) == enabled_streams.end()) {
+ bool is_audio = track_type == DemuxerStream::AUDIO;
+ for (const auto& stream : is_audio ? audio_streams_ : video_streams_) {
+ if (stream && enabled_streams.find(stream.get()) == enabled_streams.end()) {
DVLOG(1) << __func__ << ": disabling stream " << stream.get();
stream->SetEnabled(false, curr_time);
}
}
- for (auto* stream : enabled_streams) {
- DVLOG(1) << __func__ << ": enabling stream " << stream;
- stream->SetEnabled(true, curr_time);
- }
+
+ std::vector<DemuxerStream*> streams(enabled_streams.begin(),
+ enabled_streams.end());
+ std::move(change_completed_cb).Run(track_type, streams);
}
-void ChunkDemuxer::OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> track_id,
- base::TimeDelta curr_time) {
- base::AutoLock auto_lock(lock_);
- ChunkDemuxerStream* selected_stream = nullptr;
- if (track_id) {
- auto it = track_id_to_demux_stream_map_.find(*track_id);
- if (it != track_id_to_demux_stream_map_.end()) {
- selected_stream = it->second;
- DCHECK(selected_stream);
- DCHECK_EQ(DemuxerStream::VIDEO, selected_stream->type());
- }
- }
+void ChunkDemuxer::OnEnabledAudioTracksChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ FindAndEnableProperTracks(track_ids, curr_time, DemuxerStream::AUDIO,
+ std::move(change_completed_cb));
+}
- // First disable all streams that need to be disabled and then enable the
- // stream that needs to be enabled (if any).
- for (const auto& stream : video_streams_) {
- if (stream.get() != selected_stream) {
- DVLOG(1) << __func__ << ": disabling stream " << stream.get();
- DCHECK_EQ(DemuxerStream::VIDEO, stream->type());
- stream->SetEnabled(false, curr_time);
- }
- }
- if (selected_stream) {
- DVLOG(1) << __func__ << ": enabling stream " << selected_stream;
- selected_stream->SetEnabled(true, curr_time);
- }
+void ChunkDemuxer::OnSelectedVideoTrackChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ FindAndEnableProperTracks(track_ids, curr_time, DemuxerStream::VIDEO,
+ std::move(change_completed_cb));
}
void ChunkDemuxer::OnMemoryPressure(
diff --git a/chromium/media/filters/chunk_demuxer.h b/chromium/media/filters/chunk_demuxer.h
index d3fa05c2e64..9253d2d50a5 100644
--- a/chromium/media/filters/chunk_demuxer.h
+++ b/chromium/media/filters/chunk_demuxer.h
@@ -123,8 +123,6 @@ class MEDIA_EXPORT ChunkDemuxerStream : public DemuxerStream {
bool IsEnabled() const;
void SetEnabled(bool enabled, base::TimeDelta timestamp);
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb);
-
// Returns the text track configuration. It is an error to call this method
// if type() != TEXT.
TextTrackConfig text_track_config();
@@ -171,7 +169,6 @@ class MEDIA_EXPORT ChunkDemuxerStream : public DemuxerStream {
ReadCB read_cb_;
bool partial_append_window_trimming_enabled_;
bool is_enabled_;
- StreamStatusChangeCB stream_status_change_cb_;
DISALLOW_IMPLICIT_CONSTRUCTORS(ChunkDemuxerStream);
};
@@ -210,7 +207,6 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
void Seek(base::TimeDelta time, const PipelineStatusCB& cb) override;
base::Time GetTimelineOffset() const override;
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
base::TimeDelta GetStartTime() const override;
int64_t GetMemoryUsage() const override;
void AbortPendingReads() override;
@@ -255,11 +251,12 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
base::TimeDelta GetHighestPresentationTimestamp(const std::string& id) const;
void OnEnabledAudioTracksChanged(const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) override;
- // |track_id| either contains the selected video track id or is null,
- // indicating that all video tracks are deselected/disabled.
- void OnSelectedVideoTrackChanged(base::Optional<MediaTrack::Id> track_id,
- base::TimeDelta curr_time) override;
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
+
+ void OnSelectedVideoTrackChanged(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
// Appends media data to the source buffer associated with |id|, applying
// and possibly updating |*timestamp_offset| during coded frame processing.
@@ -357,6 +354,12 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
SHUTDOWN,
};
+ // Helper for vide and audio track changing.
+ void FindAndEnableProperTracks(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ DemuxerStream::Type track_type,
+ TrackChangeCB change_completed_cb);
+
void ChangeState_Locked(State new_state);
// Reports an error and puts the demuxer in a state where it won't accept more
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 2dd165fb9eb..53adadf198c 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -1420,6 +1420,9 @@ TEST_P(ChunkDemuxerTest, Init) {
EXPECT_FALSE(video_stream);
}
+ for (auto* stream : demuxer_->GetAllStreams())
+ EXPECT_TRUE(stream->SupportsConfigChanges());
+
ShutdownDemuxer();
demuxer_.reset();
}
@@ -4770,30 +4773,49 @@ TEST_P(ChunkDemuxerTest,
CheckExpectedBuffers(video_stream, "71K 81");
}
-void OnStreamStatusChanged(base::WaitableEvent* event,
- DemuxerStream* stream,
- bool enabled,
- base::TimeDelta) {
- event->Signal();
+namespace {
+void QuitLoop(base::Closure quit_closure,
+ DemuxerStream::Type type,
+ const std::vector<DemuxerStream*>& streams) {
+ quit_closure.Run();
}
-void CheckStreamStatusNotifications(MediaResource* media_resource,
- ChunkDemuxerStream* stream) {
+void DisableAndEnableDemuxerTracks(
+ ChunkDemuxer* demuxer,
+ base::test::ScopedTaskEnvironment* scoped_task_environment) {
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
+ std::vector<MediaTrack::Id> audio_tracks;
+ std::vector<MediaTrack::Id> video_tracks;
- ASSERT_TRUE(stream->IsEnabled());
- media_resource->SetStreamStatusChangeCB(
- base::Bind(&OnStreamStatusChanged, base::Unretained(&event)));
+ base::RunLoop disable_video;
+ demuxer->OnSelectedVideoTrackChanged(
+ video_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(disable_video.QuitClosure())));
+ disable_video.Run();
- stream->SetEnabled(false, base::TimeDelta());
- base::RunLoop().RunUntilIdle();
- ASSERT_TRUE(event.IsSignaled());
+ base::RunLoop disable_audio;
+ demuxer->OnEnabledAudioTracksChanged(
+ audio_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(disable_audio.QuitClosure())));
+ disable_audio.Run();
- event.Reset();
- stream->SetEnabled(true, base::TimeDelta());
- base::RunLoop().RunUntilIdle();
- ASSERT_TRUE(event.IsSignaled());
+ base::RunLoop enable_video;
+ video_tracks.push_back(MediaTrack::Id("1"));
+ demuxer->OnSelectedVideoTrackChanged(
+ video_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(enable_video.QuitClosure())));
+ enable_video.Run();
+
+ base::RunLoop enable_audio;
+ audio_tracks.push_back(MediaTrack::Id("2"));
+ demuxer->OnEnabledAudioTracksChanged(
+ audio_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(enable_audio.QuitClosure())));
+ enable_audio.Run();
+
+ scoped_task_environment->RunUntilIdle();
+}
}
TEST_P(ChunkDemuxerTest, StreamStatusNotifications) {
@@ -4806,17 +4828,16 @@ TEST_P(ChunkDemuxerTest, StreamStatusNotifications) {
EXPECT_NE(nullptr, video_stream);
// Verify stream status changes without pending read.
- CheckStreamStatusNotifications(demuxer_.get(), audio_stream);
- CheckStreamStatusNotifications(demuxer_.get(), video_stream);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
// Verify stream status changes with pending read.
bool read_done = false;
audio_stream->Read(base::Bind(&OnReadDone_EOSExpected, &read_done));
- CheckStreamStatusNotifications(demuxer_.get(), audio_stream);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
EXPECT_TRUE(read_done);
read_done = false;
video_stream->Read(base::Bind(&OnReadDone_EOSExpected, &read_done));
- CheckStreamStatusNotifications(demuxer_.get(), video_stream);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
EXPECT_TRUE(read_done);
}
diff --git a/chromium/media/filters/decoder_selector.cc b/chromium/media/filters/decoder_selector.cc
index a7542ee8471..df0cf6fd5c3 100644
--- a/chromium/media/filters/decoder_selector.cc
+++ b/chromium/media/filters/decoder_selector.cc
@@ -90,7 +90,7 @@ void DecoderSelector<StreamType>::SelectDecoder(
waiting_for_decryption_key_cb_ = waiting_for_decryption_key_cb;
decoders_ = create_decoders_cb_.Run();
- config_ = StreamTraits::GetDecoderConfig(input_stream_);
+ config_ = traits_->GetDecoderConfig(input_stream_);
InitializeDecoder();
}
@@ -125,6 +125,7 @@ void DecoderSelector<StreamType>::InitializeDecoder() {
return;
}
+ DVLOG(2) << __func__ << ": initializing " << decoder_->GetDisplayName();
traits_->InitializeDecoder(
decoder_.get(), config_,
input_stream_->liveness() == DemuxerStream::LIVENESS_LIVE, cdm_context_,
@@ -135,7 +136,8 @@ void DecoderSelector<StreamType>::InitializeDecoder() {
template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::DecoderInitDone(bool success) {
- DVLOG(2) << __func__ << ": success=" << success;
+ DVLOG(2) << __func__ << ": " << decoder_->GetDisplayName()
+ << " success=" << success;
DCHECK(task_runner_->BelongsToCurrentThread());
if (!success) {
@@ -197,7 +199,7 @@ void DecoderSelector<StreamType>::DecryptingDemuxerStreamInitDone(
// try to see whether any decoder can decrypt-and-decode the encrypted stream
// directly. So in both cases, we'll initialize the decoders.
input_stream_ = decrypted_stream_.get();
- config_ = StreamTraits::GetDecoderConfig(input_stream_);
+ config_ = traits_->GetDecoderConfig(input_stream_);
DCHECK(!config_.is_encrypted());
// If we're here we tried all the decoders w/ is_encrypted=true, try again
diff --git a/chromium/media/filters/decoder_stream.cc b/chromium/media/filters/decoder_stream.cc
index 329ec765875..c4ecd423fbc 100644
--- a/chromium/media/filters/decoder_stream.cc
+++ b/chromium/media/filters/decoder_stream.cc
@@ -42,10 +42,11 @@ const char* GetTraceString<DemuxerStream::AUDIO>() {
template <DemuxerStream::Type StreamType>
DecoderStream<StreamType>::DecoderStream(
+ std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log)
- : traits_(media_log),
+ : traits_(std::move(traits)),
task_runner_(task_runner),
create_decoders_cb_(std::move(create_decoders_cb)),
media_log_(media_log),
@@ -111,7 +112,7 @@ void DecoderStream<StreamType>::Initialize(
statistics_cb_ = statistics_cb;
waiting_for_decryption_key_cb_ = waiting_for_decryption_key_cb;
- traits_.OnStreamReset(stream_);
+ traits_->OnStreamReset(stream_);
state_ = STATE_INITIALIZING;
SelectDecoder();
@@ -171,7 +172,7 @@ void DecoderStream<StreamType>::Reset(const base::Closure& closure) {
}
ClearOutputs();
- traits_.OnStreamReset(stream_);
+ traits_->OnStreamReset(stream_);
// It's possible to have received a DECODE_ERROR and entered STATE_ERROR right
// before a Reset() is executed. If we are still waiting for a demuxer read,
@@ -275,7 +276,7 @@ void DecoderStream<StreamType>::SelectDecoder() {
task_runner_, create_decoders_cb_, media_log_);
decoder_selector_->SelectDecoder(
- &traits_, stream_, cdm_context, blacklisted_decoder,
+ traits_.get(), stream_, cdm_context, blacklisted_decoder,
base::BindRepeating(&DecoderStream<StreamType>::OnDecoderSelected,
weak_factory_.GetWeakPtr()),
base::BindRepeating(&DecoderStream<StreamType>::OnDecodeOutputReady,
@@ -335,7 +336,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
}
// Send logs and statistics updates including the decoder name.
- traits_.ReportStatistics(statistics_cb_, 0);
+ traits_->ReportStatistics(statistics_cb_, 0);
media_log_->SetBooleanProperty(GetStreamTypeString() + "_dds",
!!decrypting_demuxer_stream_);
media_log_->SetStringProperty(GetStreamTypeString() + "_decoder",
@@ -344,7 +345,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
MEDIA_LOG(INFO, media_log_)
<< "Selected " << decoder_->GetDisplayName() << " for "
<< GetStreamTypeString() << " decoding, config: "
- << StreamTraits::GetDecoderConfig(stream_).AsHumanReadableString();
+ << traits_->GetDecoderConfig(stream_).AsHumanReadableString();
if (state_ == STATE_REINITIALIZING_DECODER) {
CompleteDecoderReinitialization(true);
@@ -399,7 +400,7 @@ void DecoderStream<StreamType>::DecodeInternal(
DCHECK(!reset_cb_);
DCHECK(buffer);
- traits_.OnDecode(*buffer);
+ traits_->OnDecode(*buffer);
int buffer_size = buffer->end_of_stream() ? 0 : buffer->data_size();
@@ -492,7 +493,7 @@ void DecoderStream<StreamType>::OnDecodeDone(int buffer_size,
case DecodeStatus::OK:
// Any successful decode counts!
if (buffer_size > 0)
- traits_.ReportStatistics(statistics_cb_, buffer_size);
+ traits_->ReportStatistics(statistics_cb_, buffer_size);
if (state_ == STATE_NORMAL) {
if (end_of_stream) {
@@ -540,7 +541,7 @@ void DecoderStream<StreamType>::OnDecodeOutputReady(
// If the frame should be dropped, exit early and decode another frame.
decoder_produced_a_frame_ = true;
- if (traits_.OnDecodeDone(output) == PostDecodeAction::DROP)
+ if (traits_->OnDecodeDone(output) == PostDecodeAction::DROP)
return;
if (prepare_cb_ && output->timestamp() + AverageDuration() >=
@@ -682,8 +683,8 @@ void DecoderStream<StreamType>::OnBufferReady(
// lost frames if we were to fallback then).
pending_buffers_.clear();
- const DecoderConfig& config = StreamTraits::GetDecoderConfig(stream_);
- traits_.OnConfigChanged(config);
+ const DecoderConfig& config = traits_->GetDecoderConfig(stream_);
+ traits_->OnConfigChanged(config);
MEDIA_LOG(INFO, media_log_)
<< GetStreamTypeString()
@@ -737,8 +738,8 @@ void DecoderStream<StreamType>::ReinitializeDecoder() {
state_ = STATE_REINITIALIZING_DECODER;
// Decoders should not need a new CDM during reinitialization.
- traits_.InitializeDecoder(
- decoder_.get(), StreamTraits::GetDecoderConfig(stream_),
+ traits_->InitializeDecoder(
+ decoder_.get(), traits_->GetDecoderConfig(stream_),
stream_->liveness() == DemuxerStream::LIVENESS_LIVE, cdm_context_,
base::BindRepeating(&DecoderStream<StreamType>::OnDecoderReinitialized,
weak_factory_.GetWeakPtr()),
@@ -749,7 +750,7 @@ void DecoderStream<StreamType>::ReinitializeDecoder() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::OnDecoderReinitialized(bool success) {
- FUNCTION_DVLOG(2);
+ FUNCTION_DVLOG(2) << ": success = " << success;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_REINITIALIZING_DECODER);
@@ -858,7 +859,7 @@ void DecoderStream<StreamType>::ClearOutputs() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::MaybePrepareAnotherOutput() {
- FUNCTION_DVLOG(2);
+ FUNCTION_DVLOG(3);
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(state_ == STATE_NORMAL || state_ == STATE_FLUSHING_DECODER ||
state_ == STATE_END_OF_STREAM ||
diff --git a/chromium/media/filters/decoder_stream.h b/chromium/media/filters/decoder_stream.h
index f2095ecc808..95bd78f81eb 100644
--- a/chromium/media/filters/decoder_stream.h
+++ b/chromium/media/filters/decoder_stream.h
@@ -62,7 +62,8 @@ class MEDIA_EXPORT DecoderStream {
using ReadCB =
base::RepeatingCallback<void(Status, const scoped_refptr<Output>&)>;
- DecoderStream(const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ DecoderStream(std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
+ const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log);
virtual ~DecoderStream();
@@ -213,7 +214,7 @@ class MEDIA_EXPORT DecoderStream {
void MaybePrepareAnotherOutput();
void OnPreparedOutputReady(const scoped_refptr<Output>& frame);
- DecoderStreamTraits<StreamType> traits_;
+ std::unique_ptr<DecoderStreamTraits<StreamType>> traits_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
CreateDecodersCB create_decoders_cb_;
diff --git a/chromium/media/filters/decoder_stream_traits.cc b/chromium/media/filters/decoder_stream_traits.cc
index 794a639e5ea..707919df4fb 100644
--- a/chromium/media/filters/decoder_stream_traits.cc
+++ b/chromium/media/filters/decoder_stream_traits.cc
@@ -31,21 +31,24 @@ bool DecoderStreamTraits<DemuxerStream::AUDIO>::NeedsBitstreamConversion(
// static
scoped_refptr<DecoderStreamTraits<DemuxerStream::AUDIO>::OutputType>
- DecoderStreamTraits<DemuxerStream::AUDIO>::CreateEOSOutput() {
+DecoderStreamTraits<DemuxerStream::AUDIO>::CreateEOSOutput() {
return OutputType::CreateEOSBuffer();
}
-// static
+DecoderStreamTraits<DemuxerStream::AUDIO>::DecoderStreamTraits(
+ MediaLog* media_log,
+ ChannelLayout initial_hw_layout)
+ : media_log_(media_log), initial_hw_layout_(initial_hw_layout) {}
+
DecoderStreamTraits<DemuxerStream::AUDIO>::DecoderConfigType
DecoderStreamTraits<DemuxerStream::AUDIO>::GetDecoderConfig(
DemuxerStream* stream) {
- return stream->audio_decoder_config();
+ auto config = stream->audio_decoder_config();
+ // Demuxer is not aware of hw layout, so we set it here.
+ config.set_target_output_channel_layout(initial_hw_layout_);
+ return config;
}
-DecoderStreamTraits<DemuxerStream::AUDIO>::DecoderStreamTraits(
- MediaLog* media_log)
- : media_log_(media_log) {}
-
void DecoderStreamTraits<DemuxerStream::AUDIO>::ReportStatistics(
const StatisticsCB& statistics_cb,
int bytes_decoded) {
@@ -114,18 +117,17 @@ DecoderStreamTraits<DemuxerStream::VIDEO>::CreateEOSOutput() {
return OutputType::CreateEOSFrame();
}
-// static
+DecoderStreamTraits<DemuxerStream::VIDEO>::DecoderStreamTraits(
+ MediaLog* media_log)
+ // Randomly selected number of samples to keep.
+ : keyframe_distance_average_(16) {}
+
DecoderStreamTraits<DemuxerStream::VIDEO>::DecoderConfigType
DecoderStreamTraits<DemuxerStream::VIDEO>::GetDecoderConfig(
DemuxerStream* stream) {
return stream->video_decoder_config();
}
-DecoderStreamTraits<DemuxerStream::VIDEO>::DecoderStreamTraits(
- MediaLog* media_log)
- // Randomly selected number of samples to keep.
- : keyframe_distance_average_(16) {}
-
void DecoderStreamTraits<DemuxerStream::VIDEO>::ReportStatistics(
const StatisticsCB& statistics_cb,
int bytes_decoded) {
@@ -154,6 +156,7 @@ void DecoderStreamTraits<DemuxerStream::VIDEO>::InitializeDecoder(
waiting_for_decryption_key_cb) {
DCHECK(config.IsValidConfig());
stats_.video_decoder_name = decoder->GetDisplayName();
+ DVLOG(2) << stats_.video_decoder_name;
decoder->Initialize(config, low_delay, cdm_context, init_cb, output_cb,
waiting_for_decryption_key_cb);
}
diff --git a/chromium/media/filters/decoder_stream_traits.h b/chromium/media/filters/decoder_stream_traits.h
index 8e10156eb33..ec3a2d42ab0 100644
--- a/chromium/media/filters/decoder_stream_traits.h
+++ b/chromium/media/filters/decoder_stream_traits.h
@@ -9,6 +9,7 @@
#include "base/time/time.h"
#include "media/base/audio_decoder.h"
#include "media/base/cdm_context.h"
+#include "media/base/channel_layout.h"
#include "media/base/demuxer_stream.h"
#include "media/base/moving_average.h"
#include "media/base/pipeline_status.h"
@@ -43,9 +44,8 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::AUDIO> {
static std::string ToString();
static bool NeedsBitstreamConversion(DecoderType* decoder);
static scoped_refptr<OutputType> CreateEOSOutput();
- static DecoderConfigType GetDecoderConfig(DemuxerStream* stream);
- explicit DecoderStreamTraits(MediaLog* media_log);
+ DecoderStreamTraits(MediaLog* media_log, ChannelLayout initial_hw_layout);
void ReportStatistics(const StatisticsCB& statistics_cb, int bytes_decoded);
void InitializeDecoder(
@@ -56,6 +56,7 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::AUDIO> {
const InitCB& init_cb,
const OutputCB& output_cb,
const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb);
+ DecoderConfigType GetDecoderConfig(DemuxerStream* stream);
void OnDecode(const DecoderBuffer& buffer);
PostDecodeAction OnDecodeDone(const scoped_refptr<OutputType>& buffer);
void OnStreamReset(DemuxerStream* stream);
@@ -67,6 +68,9 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::AUDIO> {
// drift.
std::unique_ptr<AudioTimestampValidator> audio_ts_validator_;
MediaLog* media_log_;
+ // HW layout at the time pipeline was started. Will not reflect possible
+ // device changes.
+ ChannelLayout initial_hw_layout_;
PipelineStatistics stats_;
};
@@ -83,10 +87,10 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::VIDEO> {
static std::string ToString();
static bool NeedsBitstreamConversion(DecoderType* decoder);
static scoped_refptr<OutputType> CreateEOSOutput();
- static DecoderConfigType GetDecoderConfig(DemuxerStream* stream);
explicit DecoderStreamTraits(MediaLog* media_log);
+ DecoderConfigType GetDecoderConfig(DemuxerStream* stream);
void ReportStatistics(const StatisticsCB& statistics_cb, int bytes_decoded);
void InitializeDecoder(
DecoderType* decoder,
@@ -97,7 +101,6 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::VIDEO> {
const OutputCB& output_cb,
const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb);
void OnDecode(const DecoderBuffer& buffer);
-
PostDecodeAction OnDecodeDone(const scoped_refptr<OutputType>& buffer);
void OnStreamReset(DemuxerStream* stream);
void OnConfigChanged(const DecoderConfigType& config) {}
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index 42148531570..f68e048aed9 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -44,11 +44,11 @@ const int kDecodingDelay = 3;
static scoped_refptr<DecoderBuffer> CreateFakeEncryptedBuffer() {
const int buffer_size = 16; // Need a non-empty buffer;
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(buffer_size));
- buffer->set_decrypt_config(std::unique_ptr<DecryptConfig>(new DecryptConfig(
+ buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
std::string(reinterpret_cast<const char*>(kFakeKeyId),
arraysize(kFakeKeyId)),
std::string(reinterpret_cast<const char*>(kFakeIv), arraysize(kFakeIv)),
- std::vector<SubsampleEntry>())));
+ std::vector<SubsampleEntry>()));
return buffer;
}
@@ -65,9 +65,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
decoded_frame_(NULL),
decoded_frame_list_() {}
- virtual ~DecryptingAudioDecoderTest() {
- Destroy();
- }
+ ~DecryptingAudioDecoderTest() override { Destroy(); }
void InitializeAndExpectResult(const AudioDecoderConfig& config,
bool success) {
diff --git a/chromium/media/filters/decrypting_demuxer_stream.cc b/chromium/media/filters/decrypting_demuxer_stream.cc
index 79c301ed56b..06009f7a947 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream.cc
@@ -219,29 +219,13 @@ void DecryptingDemuxerStream::DecryptBuffer(
return;
}
- // TODO(xhwang): Unify clear buffer handling in clear and encrypted stream.
- // See http://crbug.com/675003
if (!buffer->decrypt_config()) {
- DVLOG(2) << "DoDecryptBuffer() - clear buffer in clear stream.";
+ DVLOG(2) << "DoDecryptBuffer() - clear buffer.";
state_ = kIdle;
base::ResetAndReturn(&read_cb_).Run(kOk, std::move(buffer));
return;
}
- if (!buffer->decrypt_config()->is_encrypted()) {
- DVLOG(2) << "DoDecryptBuffer() - clear buffer in encrypted stream.";
- scoped_refptr<DecoderBuffer> decrypted = DecoderBuffer::CopyFrom(
- buffer->data(), buffer->data_size());
- decrypted->set_timestamp(buffer->timestamp());
- decrypted->set_duration(buffer->duration());
- if (buffer->is_key_frame())
- decrypted->set_is_key_frame(true);
-
- state_ = kIdle;
- base::ResetAndReturn(&read_cb_).Run(kOk, std::move(decrypted));
- return;
- }
-
pending_buffer_to_decrypt_ = std::move(buffer);
state_ = kPendingDecrypt;
DecryptPendingBuffer();
diff --git a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
index 3c333ff81cc..2650a45a168 100644
--- a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -43,10 +43,12 @@ static scoped_refptr<DecoderBuffer> CreateFakeEncryptedStreamBuffer(
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(kFakeBufferSize));
std::string iv = is_clear ? std::string() :
std::string(reinterpret_cast<const char*>(kFakeIv), arraysize(kFakeIv));
- buffer->set_decrypt_config(std::unique_ptr<DecryptConfig>(
- new DecryptConfig(std::string(reinterpret_cast<const char*>(kFakeKeyId),
- arraysize(kFakeKeyId)),
- iv, std::vector<SubsampleEntry>())));
+ if (!is_clear) {
+ buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
+ std::string(reinterpret_cast<const char*>(kFakeKeyId),
+ arraysize(kFakeKeyId)),
+ iv, {}));
+ }
return buffer;
}
@@ -80,7 +82,7 @@ class DecryptingDemuxerStreamTest : public testing::Test {
encrypted_buffer_(CreateFakeEncryptedStreamBuffer(false)),
decrypted_buffer_(new DecoderBuffer(kFakeBufferSize)) {}
- virtual ~DecryptingDemuxerStreamTest() {
+ ~DecryptingDemuxerStreamTest() override {
if (is_initialized_)
EXPECT_CALL(*decryptor_, CancelDecrypt(_));
demuxer_stream_.reset();
@@ -162,10 +164,11 @@ class DecryptingDemuxerStreamTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
- void EnterClearReadingState(bool is_stream_encrytped) {
- EXPECT_TRUE(clear_encrypted_stream_buffer_->decrypt_config());
+ void EnterClearReadingState(bool is_stream_encrypted) {
+ // Unencrypted streams never have a DecryptConfig.
+ EXPECT_FALSE(clear_encrypted_stream_buffer_->decrypt_config());
EXPECT_CALL(*input_audio_stream_, Read(_))
- .WillOnce(ReturnBuffer(is_stream_encrytped
+ .WillOnce(ReturnBuffer(is_stream_encrypted
? clear_encrypted_stream_buffer_
: clear_buffer_));
@@ -323,9 +326,6 @@ TEST_F(DecryptingDemuxerStreamTest, Read_Normal) {
}
// Test normal read case where the buffer is clear.
-// TODO(xhwang): Unify clear buffer handling in clear and encrypted stream.
-// See http://crbug.com/675003
-
TEST_F(DecryptingDemuxerStreamTest, Read_ClearBufferInEncryptedStream) {
Initialize();
EnterClearReadingState(true);
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index b96b71f4290..095dc3acb54 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -37,11 +37,11 @@ const int kDecodingDelay = 3;
static scoped_refptr<DecoderBuffer> CreateFakeEncryptedBuffer() {
const int buffer_size = 16; // Need a non-empty buffer;
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(buffer_size));
- buffer->set_decrypt_config(std::unique_ptr<DecryptConfig>(new DecryptConfig(
+ buffer->set_decrypt_config(DecryptConfig::CreateCencConfig(
std::string(reinterpret_cast<const char*>(kFakeKeyId),
arraysize(kFakeKeyId)),
std::string(reinterpret_cast<const char*>(kFakeIv), arraysize(kFakeIv)),
- std::vector<SubsampleEntry>())));
+ {}));
return buffer;
}
@@ -59,9 +59,7 @@ class DecryptingVideoDecoderTest : public testing::Test {
VideoFrame::CreateBlackFrame(TestVideoConfig::NormalCodedSize())),
null_video_frame_(scoped_refptr<VideoFrame>()) {}
- virtual ~DecryptingVideoDecoderTest() {
- Destroy();
- }
+ ~DecryptingVideoDecoderTest() override { Destroy(); }
enum CdmType { CDM_WITHOUT_DECRYPTOR, CDM_WITH_DECRYPTOR };
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index c46657dad60..01d2cdee245 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -2,7 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/filters/fake_video_decoder.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
@@ -10,7 +12,6 @@
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
#include "media/base/video_frame.h"
-#include "media/filters/fake_video_decoder.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -54,7 +55,7 @@ class FakeVideoDecoderTest
decoder_->Initialize(
config, false, nullptr, NewExpectedBoolCB(success),
base::Bind(&FakeVideoDecoderTest::FrameReady, base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
current_config_ = config;
}
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.cc b/chromium/media/filters/ffmpeg_audio_decoder.cc
index f6d9296fce3..11991c20741 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.cc
+++ b/chromium/media/filters/ffmpeg_audio_decoder.cc
@@ -302,17 +302,29 @@ bool FFmpegAudioDecoder::ConfigureDecoder(const AudioDecoderConfig& config) {
if (!config.should_discard_decoder_delay())
codec_context_->flags2 |= AV_CODEC_FLAG2_SKIP_MANUAL;
- if (config.codec() == kCodecOpus)
+ AVDictionary* codec_options = NULL;
+ if (config.codec() == kCodecOpus) {
codec_context_->request_sample_fmt = AV_SAMPLE_FMT_FLT;
+ // Disable phase inversion to avoid artifacts in mono downmix. See
+ // http://crbug.com/806219
+ if (config.target_output_channel_layout() == CHANNEL_LAYOUT_MONO) {
+ int result = av_dict_set(&codec_options, "apply_phase_inv", "0", 0);
+ DCHECK_GE(result, 0);
+ }
+ }
+
AVCodec* codec = avcodec_find_decoder(codec_context_->codec_id);
- if (!codec || avcodec_open2(codec_context_.get(), codec, NULL) < 0) {
+ if (!codec ||
+ avcodec_open2(codec_context_.get(), codec, &codec_options) < 0) {
DLOG(ERROR) << "Could not initialize audio decoder: "
<< codec_context_->codec_id;
ReleaseFFmpegResources();
state_ = kUninitialized;
return false;
}
+ // Verify avcodec_open2() used all given options.
+ DCHECK_EQ(0, av_dict_count(codec_options));
// Success!
av_sample_format_ = codec_context_->sample_fmt;
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index 3dde789738d..7402ce16ab5 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -85,29 +85,26 @@ static base::TimeDelta FramesToTimeDelta(int frames, double sample_rate) {
frames * base::Time::kMicrosecondsPerSecond / sample_rate);
}
-static base::TimeDelta ExtractStartTime(AVStream* stream,
- base::TimeDelta start_time_estimate) {
- DCHECK(start_time_estimate != kNoTimestamp);
- if (stream->start_time == static_cast<int64_t>(AV_NOPTS_VALUE)) {
- return start_time_estimate == kInfiniteDuration ? base::TimeDelta()
- : start_time_estimate;
+static base::TimeDelta ExtractStartTime(AVStream* stream) {
+ // The default start time is zero.
+ base::TimeDelta start_time;
+
+ // First try to use the |start_time| value as is.
+ if (stream->start_time != kNoFFmpegTimestamp)
+ start_time = ConvertFromTimeBase(stream->time_base, stream->start_time);
+
+ // Next try to use the first DTS value, for codecs where we know PTS == DTS
+ // (excludes all H26x codecs). The start time must be returned in PTS.
+ if (stream->first_dts != kNoFFmpegTimestamp &&
+ stream->codecpar->codec_id != AV_CODEC_ID_HEVC &&
+ stream->codecpar->codec_id != AV_CODEC_ID_H264 &&
+ stream->codecpar->codec_id != AV_CODEC_ID_MPEG4) {
+ const base::TimeDelta first_pts =
+ ConvertFromTimeBase(stream->time_base, stream->first_dts);
+ if (first_pts < start_time)
+ start_time = first_pts;
}
- // First try the lower of the estimate and the |start_time| value.
- base::TimeDelta start_time =
- std::min(ConvertFromTimeBase(stream->time_base, stream->start_time),
- start_time_estimate);
-
- // Next see if the first buffered pts value is usable.
- if (stream->pts_buffer[0] != static_cast<int64_t>(AV_NOPTS_VALUE)) {
- const base::TimeDelta buffered_pts =
- ConvertFromTimeBase(stream->time_base, stream->pts_buffer[0]);
- if (buffered_pts < start_time)
- start_time = buffered_pts;
- }
-
- // NOTE: Do not use AVStream->first_dts since |start_time| should be a
- // presentation timestamp.
return start_time;
}
@@ -130,8 +127,7 @@ static void UmaHistogramAspectRatio(const char* name, const T& size) {
name,
// Intentionally use integer division to truncate the result.
size.height() ? (size.width() * 100) / size.height() : kInfiniteRatio,
- base::CustomHistogram::ArrayToCustomRanges(
- kCommonAspectRatios100, arraysize(kCommonAspectRatios100)));
+ base::CustomHistogram::ArrayToCustomEnumRanges(kCommonAspectRatios100));
}
// Record detected track counts by type corresponding to a src= playback.
@@ -514,7 +510,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
buffer->set_duration(kNoTimestamp);
}
- // Note: If pts is AV_NOPTS_VALUE, stream_timestamp will be kNoTimestamp.
+ // Note: If pts is kNoFFmpegTimestamp, stream_timestamp will be kNoTimestamp.
const base::TimeDelta stream_timestamp =
ConvertStreamTimestamp(stream_->time_base, packet->pts);
@@ -557,8 +553,8 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
// code paths below; otherwise they should be treated as a parse error.
if ((!fixup_chained_ogg_ || last_packet_timestamp_ == kNoTimestamp) &&
buffer->timestamp() < base::TimeDelta()) {
- MEDIA_LOG(DEBUG, media_log_)
- << "FFmpegDemuxer: unfixable negative timestamp";
+ MEDIA_LOG(ERROR, media_log_)
+ << "FFmpegDemuxer: unfixable negative timestamp.";
demuxer_->NotifyDemuxerError(DEMUXER_ERROR_COULD_NOT_PARSE);
return;
}
@@ -716,7 +712,8 @@ void FFmpegDemuxerStream::EnableBitstreamConverter() {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
InitBitstreamConverter();
#else
- NOTREACHED() << "Proprietary codecs not enabled.";
+ DLOG(ERROR) << "Proprietary codecs not enabled and stream requires bitstream "
+ "conversion. Playback will likely fail.";
#endif
}
@@ -798,14 +795,6 @@ void FFmpegDemuxerStream::SetEnabled(bool enabled, base::TimeDelta timestamp) {
DVLOG(1) << "Read from disabled stream, returning EOS";
base::ResetAndReturn(&read_cb_).Run(kOk, DecoderBuffer::CreateEOSBuffer());
}
- if (!stream_status_change_cb_.is_null())
- stream_status_change_cb_.Run(this, is_enabled_, timestamp);
-}
-
-void FFmpegDemuxerStream::SetStreamStatusChangeCB(
- const StreamStatusChangeCB& cb) {
- DCHECK(!cb.is_null());
- stream_status_change_cb_ = cb;
}
void FFmpegDemuxerStream::SetLiveness(Liveness liveness) {
@@ -871,7 +860,7 @@ std::string FFmpegDemuxerStream::GetMetadata(const char* key) const {
base::TimeDelta FFmpegDemuxerStream::ConvertStreamTimestamp(
const AVRational& time_base,
int64_t timestamp) {
- if (timestamp == static_cast<int64_t>(AV_NOPTS_VALUE))
+ if (timestamp == kNoFFmpegTimestamp)
return kNoTimestamp;
return ConvertFromTimeBase(time_base, timestamp);
@@ -1103,13 +1092,6 @@ std::vector<DemuxerStream*> FFmpegDemuxer::GetAllStreams() {
return result;
}
-void FFmpegDemuxer::SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) {
- for (const auto& stream : streams_) {
- if (stream)
- stream->SetStreamStatusChangeCB(cb);
- }
-}
-
FFmpegDemuxerStream* FFmpegDemuxer::GetFirstEnabledFFmpegStream(
DemuxerStream::Type type) const {
for (const auto& stream : streams_) {
@@ -1271,42 +1253,6 @@ void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
AVFormatContext* format_context = glue_->format_context();
streams_.resize(format_context->nb_streams);
- // Estimate the start time for each stream by looking through the packets
- // buffered during avformat_find_stream_info(). These values will be
- // considered later when determining the actual stream start time.
- //
- // These packets haven't been completely processed yet, so only look through
- // these values if the AVFormatContext has a valid start time.
- //
- // If no estimate is found, the stream entry will be kInfiniteDuration.
- std::vector<base::TimeDelta> start_time_estimates(format_context->nb_streams,
- kInfiniteDuration);
-#if !BUILDFLAG(USE_SYSTEM_FFMPEG)
- const AVFormatInternal* internal = format_context->internal;
- if (internal && internal->packet_buffer &&
- format_context->start_time != static_cast<int64_t>(AV_NOPTS_VALUE)) {
- struct AVPacketList* packet_buffer = internal->packet_buffer;
- while (packet_buffer != internal->packet_buffer_end) {
- DCHECK_LT(static_cast<size_t>(packet_buffer->pkt.stream_index),
- start_time_estimates.size());
- const AVStream* stream =
- format_context->streams[packet_buffer->pkt.stream_index];
- if (packet_buffer->pkt.pts != static_cast<int64_t>(AV_NOPTS_VALUE)) {
- const base::TimeDelta packet_pts =
- ConvertFromTimeBase(stream->time_base, packet_buffer->pkt.pts);
- // We ignore kNoTimestamp here since -int64_t::min() is possible; see
- // https://crbug.com/700501. Technically this is a valid value, but in
- // practice shouldn't occur, so just ignore it when estimating.
- if (packet_pts != kNoTimestamp && packet_pts != kInfiniteDuration &&
- packet_pts < start_time_estimates[stream->index]) {
- start_time_estimates[stream->index] = packet_pts;
- }
- }
- packet_buffer = packet_buffer->next;
- }
- }
-#endif // !BUILDFLAG(USE_SYSTEM_FFMPEG)
-
std::unique_ptr<MediaTracks> media_tracks(new MediaTracks());
DCHECK(track_id_to_demux_stream_map_.empty());
@@ -1455,8 +1401,7 @@ void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
max_duration = std::max(max_duration, streams_[i]->duration());
- base::TimeDelta start_time =
- ExtractStartTime(stream, start_time_estimates[i]);
+ base::TimeDelta start_time = ExtractStartTime(stream);
// Note: This value is used for seeking, so we must take the true value and
// not the one possibly clamped to zero below.
@@ -1494,7 +1439,7 @@ void FFmpegDemuxer::OnFindStreamInfoDone(const PipelineStatusCB& status_cb,
if (text_enabled_)
AddTextStreams();
- if (format_context->duration != static_cast<int64_t>(AV_NOPTS_VALUE)) {
+ if (format_context->duration != kNoFFmpegTimestamp) {
// If there is a duration value in the container use that to find the
// maximum between it and the duration from A/V streams.
const AVRational av_time_base = {1, AV_TIME_BASE};
@@ -1737,9 +1682,11 @@ void FFmpegDemuxer::OnSeekFrameDone(int result) {
base::ResetAndReturn(&pending_seek_cb_).Run(PIPELINE_OK);
}
-void FFmpegDemuxer::OnEnabledAudioTracksChanged(
+void FFmpegDemuxer::FindAndEnableProperTracks(
const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) {
+ base::TimeDelta curr_time,
+ DemuxerStream::Type track_type,
+ TrackChangeCB change_completed_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
std::set<FFmpegDemuxerStream*> enabled_streams;
@@ -1748,7 +1695,7 @@ void FFmpegDemuxer::OnEnabledAudioTracksChanged(
if (it == track_id_to_demux_stream_map_.end())
continue;
FFmpegDemuxerStream* stream = it->second;
- DCHECK_EQ(DemuxerStream::AUDIO, stream->type());
+ DCHECK_EQ(track_type, stream->type());
// TODO(servolk): Remove after multiple enabled audio tracks are supported
// by the media::RendererImpl.
if (!enabled_streams.empty()) {
@@ -1757,52 +1704,38 @@ void FFmpegDemuxer::OnEnabledAudioTracksChanged(
continue;
}
enabled_streams.insert(stream);
+ stream->SetEnabled(true, curr_time);
}
// First disable all streams that need to be disabled and then enable streams
// that are enabled.
for (const auto& stream : streams_) {
- if (stream && stream->type() == DemuxerStream::AUDIO &&
+ if (stream && stream->type() == track_type &&
enabled_streams.find(stream.get()) == enabled_streams.end()) {
DVLOG(1) << __func__ << ": disabling stream " << stream.get();
stream->SetEnabled(false, curr_time);
}
}
- for (auto* stream : enabled_streams) {
- DCHECK(stream);
- DVLOG(1) << __func__ << ": enabling stream " << stream;
- stream->SetEnabled(true, curr_time);
- }
-}
-void FFmpegDemuxer::OnSelectedVideoTrackChanged(
- base::Optional<MediaTrack::Id> track_id,
- base::TimeDelta curr_time) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ std::vector<DemuxerStream*> streams(enabled_streams.begin(),
+ enabled_streams.end());
+ std::move(change_completed_cb).Run(track_type, streams);
+}
- FFmpegDemuxerStream* selected_stream = nullptr;
- if (track_id) {
- auto it = track_id_to_demux_stream_map_.find(*track_id);
- if (it != track_id_to_demux_stream_map_.end()) {
- selected_stream = it->second;
- DCHECK(selected_stream);
- DCHECK_EQ(DemuxerStream::VIDEO, selected_stream->type());
- }
- }
+void FFmpegDemuxer::OnEnabledAudioTracksChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ FindAndEnableProperTracks(track_ids, curr_time, DemuxerStream::AUDIO,
+ std::move(change_completed_cb));
+}
- // First disable all streams that need to be disabled and then enable the
- // stream that needs to be enabled (if any).
- for (const auto& stream : streams_) {
- if (stream && stream->type() == DemuxerStream::VIDEO &&
- stream.get() != selected_stream) {
- DVLOG(1) << __func__ << ": disabling stream " << stream.get();
- stream->SetEnabled(false, curr_time);
- }
- }
- if (selected_stream) {
- DVLOG(1) << __func__ << ": enabling stream " << selected_stream;
- selected_stream->SetEnabled(true, curr_time);
- }
+void FFmpegDemuxer::OnSelectedVideoTrackChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ FindAndEnableProperTracks(track_ids, curr_time, DemuxerStream::VIDEO,
+ std::move(change_completed_cb));
}
void FFmpegDemuxer::ReadFrameIfNeeded() {
diff --git a/chromium/media/filters/ffmpeg_demuxer.h b/chromium/media/filters/ffmpeg_demuxer.h
index a847f6a263d..425958d0244 100644
--- a/chromium/media/filters/ffmpeg_demuxer.h
+++ b/chromium/media/filters/ffmpeg_demuxer.h
@@ -119,8 +119,6 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
bool IsEnabled() const;
void SetEnabled(bool enabled, base::TimeDelta timestamp);
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb);
-
void SetLiveness(Liveness liveness);
// Returns the range of buffered data in this stream.
@@ -190,7 +188,6 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
DecoderBufferQueue buffer_queue_;
ReadCB read_cb_;
- StreamStatusChangeCB stream_status_change_cb_;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
std::unique_ptr<FFmpegBitstreamConverter> bitstream_converter_;
@@ -226,7 +223,6 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
void Seek(base::TimeDelta time, const PipelineStatusCB& cb) override;
base::Time GetTimelineOffset() const override;
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
base::TimeDelta GetStartTime() const override;
int64_t GetMemoryUsage() const override;
@@ -244,11 +240,12 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
void NotifyDemuxerError(PipelineStatus error);
void OnEnabledAudioTracksChanged(const std::vector<MediaTrack::Id>& track_ids,
- base::TimeDelta curr_time) override;
- // |track_id| either contains the selected video track id or is null,
- // indicating that all video tracks are deselected/disabled.
- void OnSelectedVideoTrackChanged(base::Optional<MediaTrack::Id> track_id,
- base::TimeDelta curr_time) override;
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
+
+ void OnSelectedVideoTrackChanged(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
// The lowest demuxed timestamp. If negative, DemuxerStreams must use this to
// adjust packet timestamps such that external clients see a zero-based
@@ -268,6 +265,12 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
// To allow tests access to privates.
friend class FFmpegDemuxerTest;
+ // Helper for vide and audio track changing.
+ void FindAndEnableProperTracks(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ DemuxerStream::Type track_type,
+ TrackChangeCB change_completed_cb);
+
// FFmpeg callbacks during initialization.
void OnOpenContextDone(const PipelineStatusCB& status_cb, bool result);
void OnFindStreamInfoDone(const PipelineStatusCB& status_cb, int result);
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index 3f82b8687f8..b09816a3ba3 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -13,7 +13,6 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/path_service.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
@@ -77,42 +76,6 @@ MATCHER_P(SkippingUnsupportedStream, stream_type, "") {
std::string(stream_type) + " track");
}
-namespace {
-void OnStreamStatusChanged(base::WaitableEvent* event,
- DemuxerStream* stream,
- bool enabled,
- base::TimeDelta) {
- event->Signal();
-}
-
-void CheckStreamStatusNotifications(
- MediaResource* media_resource,
- FFmpegDemuxerStream* stream,
- base::test::ScopedTaskEnvironment* scoped_task_environment) {
- base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
-
- ASSERT_TRUE(stream->IsEnabled());
- media_resource->SetStreamStatusChangeCB(
- base::Bind(&OnStreamStatusChanged, base::Unretained(&event)));
-
- stream->SetEnabled(false, base::TimeDelta());
- scoped_task_environment->RunUntilIdle();
- ASSERT_TRUE(event.IsSignaled());
-
- event.Reset();
- stream->SetEnabled(true, base::TimeDelta());
- scoped_task_environment->RunUntilIdle();
- ASSERT_TRUE(event.IsSignaled());
-}
-
-void OnReadDone_ExpectEos(DemuxerStream::Status status,
- scoped_refptr<DecoderBuffer> buffer) {
- EXPECT_EQ(status, DemuxerStream::kOk);
- EXPECT_TRUE(buffer->end_of_stream());
-}
-}
-
const uint8_t kEncryptedMediaInitData[] = {
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
0x38, 0x39, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35,
@@ -122,7 +85,7 @@ static void EosOnReadDone(bool* got_eos_buffer,
DemuxerStream::Status status,
scoped_refptr<DecoderBuffer> buffer) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
EXPECT_EQ(status, DemuxerStream::kOk);
if (buffer->end_of_stream()) {
@@ -142,7 +105,7 @@ class FFmpegDemuxerTest : public testing::Test {
protected:
FFmpegDemuxerTest() = default;
- virtual ~FFmpegDemuxerTest() { Shutdown(); }
+ ~FFmpegDemuxerTest() override { Shutdown(); }
void Shutdown() {
if (demuxer_)
@@ -246,7 +209,7 @@ class FFmpegDemuxerTest : public testing::Test {
}
OnReadDoneCalled(read_expectation.size, read_expectation.timestamp_us);
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
}
DemuxerStream::ReadCB NewReadCB(
@@ -356,7 +319,7 @@ class FFmpegDemuxerTest : public testing::Test {
CHECK(!data_source_);
base::FilePath file_path;
- EXPECT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_SOURCE_ROOT, &file_path));
file_path = file_path.Append(FILE_PATH_LITERAL("media"))
.Append(FILE_PATH_LITERAL("test"))
@@ -517,6 +480,15 @@ TEST_F(FFmpegDemuxerTest, Initialize_Encrypted) {
InitializeDemuxer();
}
+TEST_F(FFmpegDemuxerTest, Initialize_NoConfigChangeSupport) {
+ // Will create one audio, one video, and one text stream.
+ CreateDemuxer("bear-vp8-webvtt.webm");
+ InitializeDemuxer();
+
+ for (auto* stream : demuxer_->GetAllStreams())
+ EXPECT_FALSE(stream->SupportsConfigChanges());
+}
+
TEST_F(FFmpegDemuxerTest, AbortPendingReads) {
// We test that on a successful audio packet read.
CreateDemuxer("bear-320x240.webm");
@@ -724,12 +696,9 @@ TEST_F(FFmpegDemuxerTest, Read_InvalidNegativeTimestamp) {
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
}
-// TODO(dalecurtis): Test is disabled since FFmpeg does not currently guarantee
-// the order of demuxed packets in OGG containers. Re-enable and fix key frame
-// expectations once we decide to either workaround it or attempt a fix
-// upstream. See http://crbug.com/387996.
-TEST_F(FFmpegDemuxerTest,
- DISABLED_Read_AudioNegativeStartTimeAndOggDiscard_Bear) {
+// Android has no Theora support, so these tests doesn't work.
+#if !defined(OS_ANDROID)
+TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOggDiscard_Bear) {
// Many ogg files have negative starting timestamps, so ensure demuxing and
// seeking work correctly with a negative start time.
CreateDemuxer("bear.ogv");
@@ -739,8 +708,12 @@ TEST_F(FFmpegDemuxerTest,
DemuxerStream* video = GetStream(DemuxerStream::VIDEO);
DemuxerStream* audio = GetStream(DemuxerStream::AUDIO);
- // Run the test twice with a seek in between.
- for (int i = 0; i < 2; ++i) {
+ // Run the test once (should be twice..., see note) with a seek in between.
+ //
+ // TODO(dalecurtis): We only run the test once since FFmpeg does not currently
+ // guarantee the order of demuxed packets in OGG containers. See
+ // http://crbug.com/387996.
+ for (int i = 0; i < 1; ++i) {
audio->Read(
NewReadCBWithCheckedDiscard(FROM_HERE, 40, 0, kInfiniteDuration, true));
base::RunLoop().Run();
@@ -759,10 +732,10 @@ TEST_F(FFmpegDemuxerTest,
video->Read(NewReadCB(FROM_HERE, 5751, 0, true));
base::RunLoop().Run();
- video->Read(NewReadCB(FROM_HERE, 846, 33367, true));
+ video->Read(NewReadCB(FROM_HERE, 846, 33367, false));
base::RunLoop().Run();
- video->Read(NewReadCB(FROM_HERE, 1255, 66733, true));
+ video->Read(NewReadCB(FROM_HERE, 1255, 66733, false));
base::RunLoop().Run();
// Seek back to the beginning and repeat the test.
@@ -775,9 +748,6 @@ TEST_F(FFmpegDemuxerTest,
// Same test above, but using sync2.ogv which has video stream muxed before the
// audio stream, so seeking based only on start time will fail since ffmpeg is
// essentially just seeking based on file position.
-//
-// Android has no Theora support, so this test doesn't work.
-#if !defined(OS_ANDROID)
TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOggDiscard_Sync) {
// Many ogg files have negative starting timestamps, so ensure demuxing and
// seeking work correctly with a negative start time.
@@ -1257,7 +1227,6 @@ TEST_F(FFmpegDemuxerTest, SeekWithCuesBeforeFirstCluster) {
base::RunLoop().Run();
}
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
// Ensure ID3v1 tag reading is disabled. id3_test.mp3 has an ID3v1 tag with the
// field "title" set to "sample for id3 test".
TEST_F(FFmpegDemuxerTest, NoID3TagData) {
@@ -1265,9 +1234,7 @@ TEST_F(FFmpegDemuxerTest, NoID3TagData) {
InitializeDemuxer();
EXPECT_FALSE(av_dict_get(format_context()->metadata, "title", NULL, 0));
}
-#endif
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
// Ensure MP3 files with large image/video based ID3 tags demux okay. FFmpeg
// will hand us a video stream to the data which will likely be in a format we
// don't accept as video; e.g. PNG.
@@ -1287,7 +1254,6 @@ TEST_F(FFmpegDemuxerTest, Mp3WithVideoStreamID3TagData) {
EXPECT_FALSE(GetStream(DemuxerStream::VIDEO));
EXPECT_TRUE(GetStream(DemuxerStream::AUDIO));
}
-#endif
// Ensure a video with an unsupported audio track still results in the video
// stream being demuxed. Because we disable the speex parser for ogg, the audio
@@ -1330,6 +1296,7 @@ TEST_F(FFmpegDemuxerTest, MP4_ZeroStszEntry) {
InitializeDemuxer();
ReadUntilEndOfStream(GetStream(DemuxerStream::AUDIO));
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
class Mp3SeekFFmpegDemuxerTest
: public FFmpegDemuxerTest,
@@ -1371,6 +1338,7 @@ INSTANTIATE_TEST_CASE_P(, Mp3SeekFFmpegDemuxerTest,
"bear-audio-10s-VBR-has-TOC.mp3",
"bear-audio-10s-VBR-no-TOC.mp3"));
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
static void ValidateAnnexB(DemuxerStream* stream,
DemuxerStream::Status status,
scoped_refptr<DecoderBuffer> buffer) {
@@ -1378,7 +1346,7 @@ static void ValidateAnnexB(DemuxerStream* stream,
if (buffer->end_of_stream()) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
return;
}
@@ -1395,7 +1363,7 @@ static void ValidateAnnexB(DemuxerStream* stream,
if (!is_valid) {
LOG(ERROR) << "Buffer contains invalid Annex B data.";
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
return;
}
@@ -1737,7 +1705,6 @@ TEST_F(FFmpegDemuxerTest, Read_Flac) {
44100, kSampleFormatS32);
}
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(FFmpegDemuxerTest, Read_Flac_Mp4) {
CreateDemuxer("bear-flac.mp4");
InitializeDemuxer();
@@ -1759,7 +1726,6 @@ TEST_F(FFmpegDemuxerTest, Read_Flac_192kHz_Mp4) {
VerifyFlacStream(GetStream(DemuxerStream::AUDIO), 32, CHANNEL_LAYOUT_STEREO,
192000, kSampleFormatS32);
}
-#endif // USE_PROPRIETARY_CODECS
// Verify that FFmpeg demuxer falls back to choosing disabled streams for
// seeking if there's no suitable enabled stream found.
@@ -1797,6 +1763,57 @@ TEST_F(FFmpegDemuxerTest, Seek_FallbackToDisabledAudioStream) {
EXPECT_EQ(astream, preferred_seeking_stream(base::TimeDelta()));
}
+namespace {
+void QuitLoop(base::Closure quit_closure,
+ DemuxerStream::Type type,
+ const std::vector<DemuxerStream*>& streams) {
+ quit_closure.Run();
+}
+
+void DisableAndEnableDemuxerTracks(
+ FFmpegDemuxer* demuxer,
+ base::test::ScopedTaskEnvironment* scoped_task_environment) {
+ base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
+ base::WaitableEvent::InitialState::NOT_SIGNALED);
+ std::vector<MediaTrack::Id> audio_tracks;
+ std::vector<MediaTrack::Id> video_tracks;
+
+ base::RunLoop disable_video;
+ demuxer->OnSelectedVideoTrackChanged(
+ video_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(disable_video.QuitClosure())));
+ disable_video.Run();
+
+ base::RunLoop disable_audio;
+ demuxer->OnEnabledAudioTracksChanged(
+ audio_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(disable_audio.QuitClosure())));
+ disable_audio.Run();
+
+ base::RunLoop enable_video;
+ video_tracks.push_back(MediaTrack::Id("1"));
+ demuxer->OnSelectedVideoTrackChanged(
+ video_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(enable_video.QuitClosure())));
+ enable_video.Run();
+
+ base::RunLoop enable_audio;
+ audio_tracks.push_back(MediaTrack::Id("2"));
+ demuxer->OnEnabledAudioTracksChanged(
+ audio_tracks, base::TimeDelta(),
+ base::BindOnce(QuitLoop, base::Passed(enable_audio.QuitClosure())));
+ enable_audio.Run();
+
+ scoped_task_environment->RunUntilIdle();
+}
+
+void OnReadDoneExpectEos(DemuxerStream::Status status,
+ const scoped_refptr<DecoderBuffer> buffer) {
+ EXPECT_EQ(status, DemuxerStream::kOk);
+ EXPECT_TRUE(buffer->end_of_stream());
+}
+} // namespace
+
TEST_F(FFmpegDemuxerTest, StreamStatusNotifications) {
CreateDemuxer("bear-320x240.webm");
InitializeDemuxer();
@@ -1808,23 +1825,19 @@ TEST_F(FFmpegDemuxerTest, StreamStatusNotifications) {
EXPECT_NE(nullptr, video_stream);
// Verify stream status notifications delivery without pending read first.
- CheckStreamStatusNotifications(demuxer_.get(), audio_stream,
- &scoped_task_environment_);
- CheckStreamStatusNotifications(demuxer_.get(), video_stream,
- &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
// Verify that stream notifications are delivered properly when stream status
// changes with a pending read. Call FlushBuffers before reading, to ensure
// there is no buffers ready to be returned by the Read right away, thus
// ensuring that status changes occur while an async read is pending.
+
audio_stream->FlushBuffers();
- audio_stream->Read(base::Bind(&media::OnReadDone_ExpectEos));
- CheckStreamStatusNotifications(demuxer_.get(), audio_stream,
- &scoped_task_environment_);
video_stream->FlushBuffers();
- video_stream->Read(base::Bind(&media::OnReadDone_ExpectEos));
- CheckStreamStatusNotifications(demuxer_.get(), video_stream,
- &scoped_task_environment_);
+ audio_stream->Read(base::Bind(&OnReadDoneExpectEos));
+ video_stream->Read(base::Bind(&OnReadDoneExpectEos));
+
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
}
TEST_F(FFmpegDemuxerTest, MultitrackMemoryUsage) {
diff --git a/chromium/media/filters/ffmpeg_glue_unittest.cc b/chromium/media/filters/ffmpeg_glue_unittest.cc
index 2289d837248..e51019cd661 100644
--- a/chromium/media/filters/ffmpeg_glue_unittest.cc
+++ b/chromium/media/filters/ffmpeg_glue_unittest.cc
@@ -299,17 +299,17 @@ TEST_F(FFmpegGlueContainerTest, WAV) {
ExpectContainer(container_names::CONTAINER_WAV);
}
+TEST_F(FFmpegGlueContainerTest, MP3) {
+ InitializeAndOpen("sfx.mp3");
+ ExpectContainer(container_names::CONTAINER_MP3);
+}
+
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST_F(FFmpegGlueContainerTest, MOV) {
InitializeAndOpen("sfx.m4a");
ExpectContainer(container_names::CONTAINER_MOV);
}
-TEST_F(FFmpegGlueContainerTest, MP3) {
- InitializeAndOpen("sfx.mp3");
- ExpectContainer(container_names::CONTAINER_MP3);
-}
-
TEST_F(FFmpegGlueContainerTest, AAC) {
InitializeAndOpen("sfx.adts");
ExpectContainer(container_names::CONTAINER_AAC);
diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
index 60c9badd031..b29c68dc7cc 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder.cc
@@ -149,7 +149,8 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
codec_context->sample_aspect_ratio.num,
codec_context->sample_aspect_ratio.den);
} else {
- natural_size = config_.natural_size();
+ natural_size =
+ GetNaturalSize(gfx::Rect(size), config_.GetPixelAspectRatio());
}
// FFmpeg has specific requirements on the allocation size of the frame. The
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index 5644e955623..1646931504b 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -9,6 +9,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -69,9 +70,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
corrupt_i_frame_buffer_ = ReadTestDataFile("vp8-corrupt-I-frame");
}
- virtual ~FFmpegVideoDecoderTest() {
- Destroy();
- }
+ ~FFmpegVideoDecoderTest() override { Destroy(); }
void Initialize() {
InitializeWithConfig(TestVideoConfig::Normal());
@@ -82,7 +81,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
decoder_->Initialize(
config, false, nullptr, NewExpectedBoolCB(success),
base::Bind(&FFmpegVideoDecoderTest::FrameReady, base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/file_data_source_unittest.cc b/chromium/media/filters/file_data_source_unittest.cc
index ee107a1b1df..15398893299 100644
--- a/chromium/media/filters/file_data_source_unittest.cc
+++ b/chromium/media/filters/file_data_source_unittest.cc
@@ -38,7 +38,7 @@ class ReadCBHandler {
// chars so just return the string from the base::FilePath.
base::FilePath TestFileURL() {
base::FilePath data_dir;
- EXPECT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
+ EXPECT_TRUE(base::PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
data_dir = data_dir.Append(FILE_PATH_LITERAL("media"))
.Append(FILE_PATH_LITERAL("test"))
.Append(FILE_PATH_LITERAL("data"))
diff --git a/chromium/media/filters/gpu_video_decoder.cc b/chromium/media/filters/gpu_video_decoder.cc
index dbd05e84155..b16ed0a110e 100644
--- a/chromium/media/filters/gpu_video_decoder.cc
+++ b/chromium/media/filters/gpu_video_decoder.cc
@@ -29,6 +29,7 @@
#include "media/base/pipeline_status.h"
#include "media/base/surface_manager.h"
#include "media/base/video_decoder_config.h"
+#include "media/base/video_util.h"
#include "media/media_buildflags.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "third_party/skia/include/core/SkBitmap.h"
@@ -94,7 +95,7 @@ GpuVideoDecoder::GpuVideoDecoder(
DCHECK(factories_);
}
-void GpuVideoDecoder::Reset(const base::Closure& closure) {
+void GpuVideoDecoder::Reset(const base::Closure& closure) {
DVLOG(3) << "Reset()";
DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
@@ -120,9 +121,9 @@ static bool IsCodedSizeSupported(const gfx::Size& coded_size,
const gfx::Size& min_resolution,
const gfx::Size& max_resolution) {
return (coded_size.width() <= max_resolution.width() &&
- coded_size.height() <= max_resolution.height() &&
- coded_size.width() >= min_resolution.width() &&
- coded_size.height() >= min_resolution.height());
+ coded_size.height() <= max_resolution.height() &&
+ coded_size.width() >= min_resolution.width() &&
+ coded_size.height() >= min_resolution.height());
}
// Report |success| to UMA and run |cb| with it. This is super-specific to the
@@ -134,8 +135,8 @@ static void ReportGpuVideoDecoderInitializeStatusToUMAAndRunCB(
bool success) {
// TODO(xhwang): Report |success| directly.
PipelineStatus status = success ? PIPELINE_OK : DECODER_ERROR_NOT_SUPPORTED;
- UMA_HISTOGRAM_ENUMERATION(
- "Media.GpuVideoDecoderInitializeStatus", status, PIPELINE_STATUS_MAX + 1);
+ UMA_HISTOGRAM_ENUMERATION("Media.GpuVideoDecoderInitializeStatus", status,
+ PIPELINE_STATUS_MAX + 1);
if (!success) {
media_log->RecordRapporWithSecurityOrigin(
@@ -168,6 +169,14 @@ void GpuVideoDecoder::Initialize(
DVLOG(1) << (previously_initialized ? "Reinitializing" : "Initializing")
<< " GVD with config: " << config.AsHumanReadableString();
+ auto encryption_mode = config.encryption_scheme().mode();
+ if (encryption_mode != EncryptionScheme::CIPHER_MODE_UNENCRYPTED &&
+ encryption_mode != EncryptionScheme::CIPHER_MODE_AES_CTR) {
+ DVLOG(1) << "VDAs only support clear or cenc encrypted streams.";
+ bound_init_cb.Run(false);
+ return;
+ }
+
// Disallow codec changes between configuration changes.
if (previously_initialized && config_.codec() != config.codec()) {
DVLOG(1) << "Codec changed, cannot reinitialize.";
@@ -261,15 +270,13 @@ void GpuVideoDecoder::Initialize(
return;
}
- if (config.is_encrypted()) {
- DCHECK(cdm_context);
+ if (cdm_context)
cdm_id_ = cdm_context->GetCdmId();
- // No need to store |cdm_context| since it's not needed in reinitialization.
- if (cdm_id_ == CdmContext::kInvalidCdmId) {
- DVLOG(1) << "CDM ID not available.";
- bound_init_cb.Run(false);
- return;
- }
+
+ if (config.is_encrypted() && cdm_id_ == CdmContext::kInvalidCdmId) {
+ DVLOG(1) << "CDM ID not available.";
+ bound_init_cb.Run(false);
+ return;
}
init_cb_ = bound_init_cb;
@@ -442,8 +449,11 @@ void GpuVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
shared_memory->handle(), size, 0,
buffer->timestamp());
- if (buffer->decrypt_config())
- bitstream_buffer.SetDecryptConfig(*buffer->decrypt_config());
+ if (buffer->decrypt_config()) {
+ bitstream_buffer.SetDecryptionSettings(
+ buffer->decrypt_config()->key_id(), buffer->decrypt_config()->iv(),
+ buffer->decrypt_config()->subsamples());
+ }
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
@@ -462,10 +472,9 @@ void GpuVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void GpuVideoDecoder::RecordBufferData(const BitstreamBuffer& bitstream_buffer,
const DecoderBuffer& buffer) {
- input_buffer_data_.push_front(BufferData(bitstream_buffer.id(),
- buffer.timestamp(),
- config_.visible_rect(),
- config_.natural_size()));
+ input_buffer_data_.push_front(
+ BufferData(bitstream_buffer.id(), buffer.timestamp(),
+ config_.visible_rect(), config_.natural_size()));
// Why this value? Because why not. avformat.h:MAX_REORDER_DELAY is 16, but
// that's too small for some pathological B-frame test videos. The cost of
// using too-high a value is low (192 bits per extra slot).
@@ -480,9 +489,8 @@ void GpuVideoDecoder::GetBufferData(int32_t id,
base::TimeDelta* timestamp,
gfx::Rect* visible_rect,
gfx::Size* natural_size) {
- for (std::list<BufferData>::const_iterator it =
- input_buffer_data_.begin(); it != input_buffer_data_.end();
- ++it) {
+ for (std::list<BufferData>::const_iterator it = input_buffer_data_.begin();
+ it != input_buffer_data_.end(); ++it) {
if (it->bitstream_buffer_id != id)
continue;
*timestamp = it->timestamp;
@@ -516,8 +524,8 @@ void GpuVideoDecoder::ProvidePictureBuffers(uint32_t count,
uint32_t textures_per_buffer,
const gfx::Size& size,
uint32_t texture_target) {
- DVLOG(3) << "ProvidePictureBuffers(" << count << ", "
- << size.width() << "x" << size.height() << ")";
+ DVLOG(3) << "ProvidePictureBuffers(" << count << ", " << size.width() << "x"
+ << size.height() << ")";
DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
std::vector<uint32_t> texture_ids;
@@ -555,8 +563,10 @@ void GpuVideoDecoder::ProvidePictureBuffers(uint32_t count,
picture_buffers.push_back(PictureBuffer(next_picture_buffer_id_++, size,
ids, mailboxes, texture_target,
format));
- bool inserted = assigned_picture_buffers_.insert(std::make_pair(
- picture_buffers.back().id(), picture_buffers.back())).second;
+ bool inserted = assigned_picture_buffers_
+ .insert(std::make_pair(picture_buffers.back().id(),
+ picture_buffers.back()))
+ .second;
DCHECK(inserted);
}
@@ -648,7 +658,10 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) {
BindToCurrentLoop(base::Bind(
&GpuVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(),
factories_, picture.picture_buffer_id(), pb.client_texture_ids())),
- pb.size(), visible_rect, natural_size, timestamp));
+ pb.size(), visible_rect,
+ GetNaturalSize(visible_rect,
+ GetPixelAspectRatio(visible_rect, natural_size)),
+ timestamp));
if (!frame) {
DLOG(ERROR) << "Create frame failed for: " << picture.picture_buffer_id();
NotifyError(VideoDecodeAccelerator::PLATFORM_FAILURE);
@@ -657,8 +670,8 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) {
frame->set_color_space(picture.color_space());
if (picture.allow_overlay())
frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
- if (picture.surface_texture())
- frame->metadata()->SetBoolean(VideoFrameMetadata::SURFACE_TEXTURE, true);
+ if (picture.texture_owner())
+ frame->metadata()->SetBoolean(VideoFrameMetadata::TEXTURE_OWNER, true);
if (picture.wants_promotion_hint()) {
frame->metadata()->SetBoolean(VideoFrameMetadata::WANTS_PROMOTION_HINT,
true);
@@ -679,8 +692,7 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) {
DeliverFrame(frame);
}
-void GpuVideoDecoder::DeliverFrame(
- const scoped_refptr<VideoFrame>& frame) {
+void GpuVideoDecoder::DeliverFrame(const scoped_refptr<VideoFrame>& frame) {
DCheckGpuVideoAcceleratorFactoriesTaskRunnerIsCurrent();
// During a pending vda->Reset(), we don't accumulate frames. Drop it on the
diff --git a/chromium/media/filters/h264_bitstream_buffer.cc b/chromium/media/filters/h264_bitstream_buffer.cc
index cb81b086ffd..a37def808df 100644
--- a/chromium/media/filters/h264_bitstream_buffer.cc
+++ b/chromium/media/filters/h264_bitstream_buffer.cc
@@ -137,12 +137,12 @@ void H264BitstreamBuffer::FinishNALU() {
FlushReg();
}
-size_t H264BitstreamBuffer::BytesInBuffer() {
+size_t H264BitstreamBuffer::BytesInBuffer() const {
DCHECK_FINISHED();
return pos_;
}
-uint8_t* H264BitstreamBuffer::data() {
+const uint8_t* H264BitstreamBuffer::data() const {
DCHECK(data_);
DCHECK_FINISHED();
diff --git a/chromium/media/filters/h264_bitstream_buffer.h b/chromium/media/filters/h264_bitstream_buffer.h
index 7c16ef07547..4c5d1fb3ec6 100644
--- a/chromium/media/filters/h264_bitstream_buffer.h
+++ b/chromium/media/filters/h264_bitstream_buffer.h
@@ -14,6 +14,7 @@
#include <stdint.h>
#include "base/gtest_prod_util.h"
+#include "base/memory/ref_counted.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/media_export.h"
#include "media/base/video_frame.h"
@@ -24,10 +25,13 @@ namespace media {
// Holds one or more NALUs as a raw bitstream buffer in H.264 Annex-B format.
// Note that this class currently does NOT insert emulation prevention
// three-byte sequences (spec 7.3.1).
-class MEDIA_EXPORT H264BitstreamBuffer {
+// Refcounted as these buffers may be used as arguments to multiple codec jobs
+// (e.g. a buffer containing an H.264 SPS NALU may be used as an argument to all
+// jobs that use parameters contained in that SPS).
+class MEDIA_EXPORT H264BitstreamBuffer
+ : public base::RefCountedThreadSafe<H264BitstreamBuffer> {
public:
H264BitstreamBuffer();
- ~H264BitstreamBuffer();
// Discard all data and reset the buffer for reuse.
void Reset();
@@ -66,14 +70,17 @@ class MEDIA_EXPORT H264BitstreamBuffer {
// Return number of full bytes in the stream. Note that FinishNALU() has to
// be called to flush cached bits, or the return value will not include them.
- size_t BytesInBuffer();
+ size_t BytesInBuffer() const;
// Return a pointer to the stream. FinishNALU() must be called before
// accessing the stream, otherwise some bits may still be cached and not
// in the buffer.
- uint8_t* data();
+ const uint8_t* data() const;
private:
+ friend class base::RefCountedThreadSafe<H264BitstreamBuffer>;
+ ~H264BitstreamBuffer();
+
FRIEND_TEST_ALL_PREFIXES(H264BitstreamBufferAppendBitsTest,
AppendAndVerifyBits);
@@ -116,6 +123,8 @@ class MEDIA_EXPORT H264BitstreamBuffer {
// Buffer for stream data.
uint8_t* data_;
+
+ DISALLOW_COPY_AND_ASSIGN(H264BitstreamBuffer);
};
} // namespace media
diff --git a/chromium/media/filters/h264_bitstream_buffer_unittest.cc b/chromium/media/filters/h264_bitstream_buffer_unittest.cc
index 9c17efe1093..876a878e311 100644
--- a/chromium/media/filters/h264_bitstream_buffer_unittest.cc
+++ b/chromium/media/filters/h264_bitstream_buffer_unittest.cc
@@ -19,18 +19,18 @@ class H264BitstreamBufferAppendBitsTest
// TODO(posciak): More tests!
TEST_P(H264BitstreamBufferAppendBitsTest, AppendAndVerifyBits) {
- H264BitstreamBuffer b;
+ auto b = base::MakeRefCounted<H264BitstreamBuffer>();
uint64_t num_bits = GetParam();
// TODO(posciak): Tests for >64 bits.
ASSERT_LE(num_bits, 64u);
uint64_t num_bytes = (num_bits + 7) / 8;
- b.AppendBits(num_bits, kTestPattern);
- b.FlushReg();
+ b->AppendBits(num_bits, kTestPattern);
+ b->FlushReg();
- EXPECT_EQ(b.BytesInBuffer(), num_bytes);
+ EXPECT_EQ(b->BytesInBuffer(), num_bytes);
- uint8_t* ptr = b.data();
+ const uint8_t* ptr = b->data();
uint64_t got = 0;
uint64_t expected = kTestPattern;
diff --git a/chromium/media/filters/jpeg_parser_unittest.cc b/chromium/media/filters/jpeg_parser_unittest.cc
index 6fbea1a26dd..ac3a2dddbce 100644
--- a/chromium/media/filters/jpeg_parser_unittest.cc
+++ b/chromium/media/filters/jpeg_parser_unittest.cc
@@ -15,7 +15,7 @@ namespace media {
TEST(JpegParserTest, Parsing) {
base::FilePath data_dir;
- ASSERT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
+ ASSERT_TRUE(base::PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
// This sample frame is captured from Chromebook Pixel
base::FilePath file_path = data_dir.AppendASCII("media")
@@ -83,7 +83,7 @@ TEST(JpegParserTest, Parsing) {
TEST(JpegParserTest, CodedSizeNotEqualVisibleSize) {
base::FilePath data_dir;
- ASSERT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
+ ASSERT_TRUE(base::PathService::Get(base::DIR_SOURCE_ROOT, &data_dir));
base::FilePath file_path = data_dir.AppendASCII("media")
.AppendASCII("test")
diff --git a/chromium/media/filters/media_file_checker_unittest.cc b/chromium/media/filters/media_file_checker_unittest.cc
index 27d479605a5..9ee40bf98cf 100644
--- a/chromium/media/filters/media_file_checker_unittest.cc
+++ b/chromium/media/filters/media_file_checker_unittest.cc
@@ -38,10 +38,8 @@ TEST(MediaFileCheckerTest, Audio) {
RunMediaFileChecker("sfx.ogg", true);
}
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
TEST(MediaFileCheckerTest, MP3) {
RunMediaFileChecker("sfx.mp3", true);
}
-#endif
} // namespace media
diff --git a/chromium/media/filters/offloading_video_decoder_unittest.cc b/chromium/media/filters/offloading_video_decoder_unittest.cc
index 1dcdf72d0ac..2dc14e44992 100644
--- a/chromium/media/filters/offloading_video_decoder_unittest.cc
+++ b/chromium/media/filters/offloading_video_decoder_unittest.cc
@@ -5,6 +5,7 @@
#include "media/filters/offloading_video_decoder.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/scoped_task_environment.h"
#include "media/base/decoder_buffer.h"
@@ -105,8 +106,7 @@ class OffloadingVideoDecoderTest : public testing::Test {
.WillOnce(DoAll(VerifyOn(task_env_.GetMainThreadTaskRunner()),
RunCallback<3>(true), SaveArg<4>(&output_cb)));
offloading_decoder_->Initialize(config, false, nullptr, ExpectInitCB(true),
- ExpectOutputCB(),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ ExpectOutputCB(), base::NullCallback());
task_env_.RunUntilIdle();
// Verify decode works and is called on the right thread.
@@ -142,8 +142,7 @@ class OffloadingVideoDecoderTest : public testing::Test {
.WillOnce(VerifyOn(task_env_.GetMainThreadTaskRunner()));
}
offloading_decoder_->Initialize(config, false, nullptr, ExpectInitCB(true),
- ExpectOutputCB(),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ ExpectOutputCB(), base::NullCallback());
EXPECT_CALL(*decoder_, Initialize(_, false, nullptr, _, _, _))
.WillOnce(DoAll(VerifyNotOn(task_env_.GetMainThreadTaskRunner()),
RunCallback<3>(true), SaveArg<4>(&output_cb)));
@@ -225,7 +224,7 @@ TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
TestVideoConfig::Normal(kCodecVP9), false, nullptr, ExpectInitCB(true),
base::Bind(&OffloadingVideoDecoderTest::OutputDone,
base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
EXPECT_CALL(*decoder_, Detach())
.WillOnce(VerifyNotOn(task_env_.GetMainThreadTaskRunner()));
EXPECT_CALL(*decoder_, Initialize(_, false, nullptr, _, _, _))
@@ -254,7 +253,7 @@ TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloading) {
offload_config, false, nullptr, ExpectInitCB(true),
base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
EXPECT_CALL(*decoder_, Initialize(_, false, nullptr, _, _, _))
.WillOnce(DoAll(VerifyNotOn(task_env_.GetMainThreadTaskRunner()),
RunCallback<3>(true), SaveArg<4>(&output_cb)));
@@ -301,7 +300,7 @@ TEST_F(OffloadingVideoDecoderTest, ParallelizedOffloadingResetAbortsDecodes) {
offload_config, false, nullptr, ExpectInitCB(true),
base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
EXPECT_CALL(*decoder_, Initialize(_, false, nullptr, _, _, _))
.WillOnce(DoAll(VerifyNotOn(task_env_.GetMainThreadTaskRunner()),
RunCallback<3>(true), SaveArg<4>(&output_cb)));
diff --git a/chromium/media/filters/pipeline_controller.cc b/chromium/media/filters/pipeline_controller.cc
index b4cb1526866..706b58138af 100644
--- a/chromium/media/filters/pipeline_controller.cc
+++ b/chromium/media/filters/pipeline_controller.cc
@@ -215,7 +215,9 @@ void PipelineController::Dispatch() {
}
// If we have pending operations, and a seek is ongoing, abort it.
- if ((pending_seek_ || pending_suspend_) && waiting_for_seek_) {
+ if ((pending_seek_ || pending_suspend_ || pending_audio_track_change_ ||
+ pending_video_track_change_) &&
+ waiting_for_seek_) {
// If there is no pending seek, return the current seek to pending status.
if (!pending_seek_) {
pending_seek_time_ = seek_time_;
@@ -229,6 +231,34 @@ void PipelineController::Dispatch() {
return;
}
+ // We can only switch tracks if we are not in a transitioning state already.
+ if ((pending_audio_track_change_ || pending_video_track_change_) &&
+ (state_ == State::PLAYING || state_ == State::SUSPENDED)) {
+ State old_state = state_;
+ state_ = State::SWITCHING_TRACKS;
+
+ // Attempt to do a track change _before_ attempting a seek operation,
+ // otherwise the seek will apply to the old tracks instead of the new
+ // one(s). Also attempt audio before video.
+ if (pending_audio_track_change_) {
+ pending_audio_track_change_ = false;
+ pipeline_->OnEnabledAudioTracksChanged(
+ pending_audio_track_change_ids_,
+ base::BindOnce(&PipelineController::OnTrackChangeComplete,
+ weak_factory_.GetWeakPtr(), old_state));
+ return;
+ }
+
+ if (pending_video_track_change_) {
+ pending_video_track_change_ = false;
+ pipeline_->OnSelectedVideoTrackChanged(
+ pending_video_track_change_id_,
+ base::BindOnce(&PipelineController::OnTrackChangeComplete,
+ weak_factory_.GetWeakPtr(), old_state));
+ return;
+ }
+ }
+
// Ordinary seeking.
if (pending_seek_ && state_ == State::PLAYING) {
seek_time_ = pending_seek_time_;
@@ -275,6 +305,8 @@ void PipelineController::Stop() {
pending_seek_ = false;
pending_suspend_ = false;
pending_resume_ = false;
+ pending_audio_track_change_ = false;
+ pending_video_track_change_ = false;
state_ = State::STOPPED;
pipeline_->Stop();
@@ -326,13 +358,37 @@ void PipelineController::SetCdm(CdmContext* cdm_context,
}
void PipelineController::OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabledTrackIds) {
- pipeline_->OnEnabledAudioTracksChanged(enabledTrackIds);
+ const std::vector<MediaTrack::Id>& enabled_track_ids) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ pending_audio_track_change_ = true;
+ pending_audio_track_change_ids_ = enabled_track_ids;
+
+ Dispatch();
}
void PipelineController::OnSelectedVideoTrackChanged(
base::Optional<MediaTrack::Id> selected_track_id) {
- pipeline_->OnSelectedVideoTrackChanged(selected_track_id);
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ pending_video_track_change_ = true;
+ pending_video_track_change_id_ = selected_track_id;
+
+ Dispatch();
+}
+
+void PipelineController::FireOnTrackChangeCompleteForTesting(State set_to) {
+ OnTrackChangeComplete(set_to);
+}
+
+void PipelineController::OnTrackChangeComplete(State previous_state) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (state_ == State::SWITCHING_TRACKS)
+ state_ = previous_state;
+
+ // Other track changed or seek/suspend/resume, etc may be waiting.
+ Dispatch();
}
} // namespace media
diff --git a/chromium/media/filters/pipeline_controller.h b/chromium/media/filters/pipeline_controller.h
index ea2adb8931c..bf80dbb66a9 100644
--- a/chromium/media/filters/pipeline_controller.h
+++ b/chromium/media/filters/pipeline_controller.h
@@ -35,6 +35,7 @@ class MEDIA_EXPORT PipelineController {
PLAYING,
PLAYING_OR_SUSPENDED,
SEEKING,
+ SWITCHING_TRACKS,
SUSPENDING,
SUSPENDED,
RESUMING,
@@ -130,10 +131,14 @@ class MEDIA_EXPORT PipelineController {
PipelineStatistics GetStatistics() const;
void SetCdm(CdmContext* cdm_context, const CdmAttachedCB& cdm_attached_cb);
void OnEnabledAudioTracksChanged(
- const std::vector<MediaTrack::Id>& enabledTrackIds);
+ const std::vector<MediaTrack::Id>& enabled_track_ids);
void OnSelectedVideoTrackChanged(
base::Optional<MediaTrack::Id> selected_track_id);
+ // Used to fire the OnTrackChangeComplete function which is captured in a
+ // OnceCallback, and doesn't play nicely with gmock.
+ void FireOnTrackChangeCompleteForTesting(State set_to);
+
private:
// Attempts to make progress from the current state to the target state.
void Dispatch();
@@ -141,6 +146,8 @@ class MEDIA_EXPORT PipelineController {
// PipelineStaus callback that also carries the target state.
void OnPipelineStatus(State state, PipelineStatus pipeline_status);
+ void OnTrackChangeComplete(State previous_state);
+
// The Pipeline we are managing state for.
std::unique_ptr<Pipeline> pipeline_;
@@ -189,12 +196,22 @@ class MEDIA_EXPORT PipelineController {
// The target time of the active seek; valid while SEEKING or RESUMING.
base::TimeDelta seek_time_;
- // Target state which we will work to achieve. |pending_seek_time_| is only
- // valid when |pending_seek_| is true.
+ // Target state which we will work to achieve.
bool pending_seek_ = false;
- base::TimeDelta pending_seek_time_;
bool pending_suspend_ = false;
bool pending_resume_ = false;
+ bool pending_audio_track_change_ = false;
+ bool pending_video_track_change_ = false;
+
+ // |pending_seek_time_| is only valid when |pending_seek_| is true.
+ // |pending_track_change_type_| is only valid when |pending_track_change_|.
+ // |pending_audio_track_change_ids_| is only valid when
+ // |pending_audio_track_change_|.
+ // |pending_video_track_change_id_| is only valid when
+ // |pending_video_track_change_|.
+ base::TimeDelta pending_seek_time_;
+ std::vector<MediaTrack::Id> pending_audio_track_change_ids_;
+ base::Optional<MediaTrack::Id> pending_video_track_change_id_;
// Set to true during Start(). Indicates that |seeked_cb_| must be fired once
// we've completed startup.
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index bf72dc1da88..36aabf5c3ef 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -14,6 +14,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/time/time.h"
+#include "media/base/gmock_callback_support.h"
#include "media/base/mock_filters.h"
#include "media/base/pipeline.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -148,8 +149,8 @@ class PipelineControllerTest : public ::testing::Test, public Pipeline::Client {
const AddTextTrackDoneCB& done_cb) override {}
void OnWaitingForDecryptionKey() override {}
void OnVideoNaturalSizeChange(const gfx::Size& size) override {}
- void OnAudioConfigChange(const AudioDecoderConfig& config) {}
- void OnVideoConfigChange(const VideoDecoderConfig& config) {}
+ void OnAudioConfigChange(const AudioDecoderConfig& config) override {}
+ void OnVideoConfigChange(const VideoDecoderConfig& config) override {}
void OnVideoOpacityChange(bool opaque) override {}
void OnVideoAverageKeyframeDistanceUpdate() override {}
void OnAudioDecoderChange(const std::string& name) override {}
@@ -392,4 +393,72 @@ TEST_F(PipelineControllerTest, SeekToSeekTimeNotElided) {
Complete(seek_cb_1);
}
+TEST_F(PipelineControllerTest, VideoTrackChangeWhileSuspending) {
+ Complete(StartPipeline());
+ EXPECT_CALL(*pipeline_, Suspend(_));
+ EXPECT_CALL(*pipeline_, OnSelectedVideoTrackChanged(_, _)).Times(0);
+ pipeline_controller_.Suspend();
+ pipeline_controller_.OnSelectedVideoTrackChanged({});
+}
+
+TEST_F(PipelineControllerTest, AudioTrackChangeWhileSuspending) {
+ Complete(StartPipeline());
+ EXPECT_CALL(*pipeline_, Suspend(_));
+ EXPECT_CALL(*pipeline_, OnEnabledAudioTracksChanged(_, _)).Times(0);
+ pipeline_controller_.Suspend();
+ pipeline_controller_.OnEnabledAudioTracksChanged({});
+}
+
+TEST_F(PipelineControllerTest, AudioTrackChangeDuringVideoTrackChange) {
+ Complete(StartPipeline());
+
+ EXPECT_CALL(*pipeline_, OnSelectedVideoTrackChanged(_, _));
+ pipeline_controller_.OnSelectedVideoTrackChanged({});
+ pipeline_controller_.OnEnabledAudioTracksChanged({});
+ EXPECT_CALL(*pipeline_, OnEnabledAudioTracksChanged(_, _));
+
+ pipeline_controller_.FireOnTrackChangeCompleteForTesting(
+ PipelineController::State::PLAYING);
+
+ pipeline_controller_.FireOnTrackChangeCompleteForTesting(
+ PipelineController::State::PLAYING);
+}
+
+TEST_F(PipelineControllerTest, SuspendDuringVideoTrackChange) {
+ Complete(StartPipeline());
+ EXPECT_CALL(*pipeline_, OnSelectedVideoTrackChanged(_, _));
+ was_resumed_ = false;
+ pipeline_controller_.OnSelectedVideoTrackChanged({});
+ pipeline_controller_.Suspend();
+
+ base::RunLoop loop;
+ EXPECT_CALL(*pipeline_, Suspend(_))
+ .WillOnce(RunOnceClosure(loop.QuitClosure()));
+
+ pipeline_controller_.FireOnTrackChangeCompleteForTesting(
+ PipelineController::State::PLAYING);
+
+ loop.Run();
+ EXPECT_FALSE(was_resumed_);
+}
+
+TEST_F(PipelineControllerTest, SuspendDuringAudioTrackChange) {
+ Complete(StartPipeline());
+ EXPECT_CALL(*pipeline_, OnEnabledAudioTracksChanged(_, _));
+ was_resumed_ = false;
+
+ pipeline_controller_.OnEnabledAudioTracksChanged({});
+ pipeline_controller_.Suspend();
+
+ base::RunLoop loop;
+ EXPECT_CALL(*pipeline_, Suspend(_))
+ .WillOnce(RunOnceClosure(loop.QuitClosure()));
+
+ pipeline_controller_.FireOnTrackChangeCompleteForTesting(
+ PipelineController::State::PLAYING);
+
+ loop.Run();
+ EXPECT_FALSE(was_resumed_);
+}
+
} // namespace media
diff --git a/chromium/media/filters/stream_parser_factory.cc b/chromium/media/filters/stream_parser_factory.cc
index 687ac62080f..8b9ff97eab9 100644
--- a/chromium/media/filters/stream_parser_factory.cc
+++ b/chromium/media/filters/stream_parser_factory.cc
@@ -21,7 +21,7 @@
#include "media/formats/mpeg/mpeg1_audio_stream_parser.h"
#include "media/formats/webm/webm_stream_parser.h"
#include "media/media_buildflags.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
#if defined(OS_ANDROID)
#include "media/base/android/media_codec_util.h"
@@ -59,7 +59,9 @@ struct CodecInfo {
HISTOGRAM_DOLBYVISION,
HISTOGRAM_FLAC,
HISTOGRAM_AV1,
- HISTOGRAM_MAX = HISTOGRAM_AV1 // Must be equal to largest logged entry.
+ HISTOGRAM_MPEG_H_AUDIO,
+ HISTOGRAM_MAX =
+ HISTOGRAM_MPEG_H_AUDIO // Must be equal to largest logged entry.
};
const char* pattern;
@@ -206,6 +208,11 @@ static const CodecInfo kEAC3CodecInfo3 = {"mp4a.A6", CodecInfo::AUDIO, nullptr,
CodecInfo::HISTOGRAM_EAC3};
#endif // BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+static const CodecInfo kMpegHAudioCodecInfo = {
+ "mhm1.*", CodecInfo::AUDIO, nullptr, CodecInfo::HISTOGRAM_MPEG_H_AUDIO};
+#endif
+
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
static const CodecInfo kMP3CodecInfo = {nullptr, CodecInfo::AUDIO, nullptr,
@@ -217,15 +224,9 @@ static StreamParser* BuildMP3Parser(const std::vector<std::string>& codecs,
return new MPEG1AudioStreamParser();
}
-bool CheckIfMseFlacInIsobmffEnabled(const std::string& codec_id,
- MediaLog* media_log) {
- return base::FeatureList::IsEnabled(kMseFlacInIsobmff);
-}
-
static const CodecInfo kMPEG4VP09CodecInfo = {
"vp09.*", CodecInfo::VIDEO, nullptr, CodecInfo::HISTOGRAM_VP9};
-static const CodecInfo kMPEG4FLACCodecInfo = {"flac", CodecInfo::AUDIO,
- &CheckIfMseFlacInIsobmffEnabled,
+static const CodecInfo kMPEG4FLACCodecInfo = {"flac", CodecInfo::AUDIO, nullptr,
CodecInfo::HISTOGRAM_FLAC};
static const CodecInfo* const kVideoMP4Codecs[] = {&kMPEG4FLACCodecInfo,
@@ -247,6 +248,9 @@ static const CodecInfo* const kVideoMP4Codecs[] = {&kMPEG4FLACCodecInfo,
#endif
&kMPEG4AACCodecInfo,
&kMPEG2AACLCCodecInfo,
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ &kMpegHAudioCodecInfo,
+#endif
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_AV1_DECODER)
&kAV1CodecInfo,
@@ -257,7 +261,9 @@ static const CodecInfo* const kAudioMP4Codecs[] = {&kMPEG4FLACCodecInfo,
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
&kMPEG4AACCodecInfo,
&kMPEG2AACLCCodecInfo,
-
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ &kMpegHAudioCodecInfo,
+#endif
#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
&kAC3CodecInfo1,
&kAC3CodecInfo2,
diff --git a/chromium/media/filters/video_decoder_selector_unittest.cc b/chromium/media/filters/video_decoder_selector_unittest.cc
index 0bf1757ccdd..9b9c6ee555e 100644
--- a/chromium/media/filters/video_decoder_selector_unittest.cc
+++ b/chromium/media/filters/video_decoder_selector_unittest.cc
@@ -64,7 +64,7 @@ class VideoDecoderSelectorTest : public ::testing::Test {
// InitializeDecoderSelector().
}
- ~VideoDecoderSelectorTest() { base::RunLoop().RunUntilIdle(); }
+ ~VideoDecoderSelectorTest() override { base::RunLoop().RunUntilIdle(); }
MOCK_METHOD2(OnDecoderSelected,
void(VideoDecoder*, DecryptingDemuxerStream*));
@@ -477,7 +477,7 @@ TEST_F(VideoDecoderSelectorTest,
UseEncryptedStream();
InitializeDecoderSelector(kDecryptAndDecode, 2);
- // DecryptingAudioDecoder is blacklisted so we'll fallback to use
+ // DecryptingVideoDecoder is blacklisted so we'll fallback to use
// DecryptingDemuxerStream to do decrypt-only.
EXPECT_CALL(*this, OnDecoderOneInitialized(EncryptedConfig(), _, _, _, _, _))
.WillOnce(RunCallback<3>(false));
diff --git a/chromium/media/filters/video_frame_stream_unittest.cc b/chromium/media/filters/video_frame_stream_unittest.cc
index beea5f61662..6c3f7f3006f 100644
--- a/chromium/media/filters/video_frame_stream_unittest.cc
+++ b/chromium/media/filters/video_frame_stream_unittest.cc
@@ -83,6 +83,7 @@ class VideoFrameStreamTest
num_decoded_bytes_unreported_(0),
has_no_key_(false) {
video_frame_stream_.reset(new VideoFrameStream(
+ std::make_unique<VideoFrameStream::StreamTraits>(&media_log_),
message_loop_.task_runner(),
base::Bind(&VideoFrameStreamTest::CreateVideoDecodersForTest,
base::Unretained(this)),
diff --git a/chromium/media/filters/vp8_parser.cc b/chromium/media/filters/vp8_parser.cc
index ea355bbaee0..cfdcce2ccca 100644
--- a/chromium/media/filters/vp8_parser.cc
+++ b/chromium/media/filters/vp8_parser.cc
@@ -92,7 +92,7 @@ bool Vp8Parser::ParseFrameTag(Vp8FrameHeader* fhdr) {
return false;
uint32_t frame_tag = (stream_[2] << 16) | (stream_[1] << 8) | stream_[0];
- fhdr->key_frame =
+ fhdr->frame_type =
static_cast<Vp8FrameHeader::FrameType>(GetBitsAt(frame_tag, 0, 1));
fhdr->version = GetBitsAt(frame_tag, 1, 2);
fhdr->is_experimental = !!GetBitsAt(frame_tag, 3, 1);
diff --git a/chromium/media/filters/vp8_parser.h b/chromium/media/filters/vp8_parser.h
index df56f9be07d..c1a95cc8a98 100644
--- a/chromium/media/filters/vp8_parser.h
+++ b/chromium/media/filters/vp8_parser.h
@@ -88,24 +88,33 @@ struct Vp8EntropyHeader {
};
const size_t kMaxDCTPartitions = 8;
+const size_t kNumVp8ReferenceBuffers = 3;
+
+enum Vp8RefType : size_t {
+ VP8_FRAME_LAST = 0,
+ VP8_FRAME_GOLDEN = 1,
+ VP8_FRAME_ALTREF = 2,
+};
struct MEDIA_EXPORT Vp8FrameHeader {
Vp8FrameHeader();
enum FrameType { KEYFRAME = 0, INTERFRAME = 1 };
- bool IsKeyframe() const { return key_frame == KEYFRAME; }
+ bool IsKeyframe() const { return frame_type == KEYFRAME; }
enum GoldenRefreshMode {
+ NO_GOLDEN_REFRESH = 0,
COPY_LAST_TO_GOLDEN = 1,
COPY_ALT_TO_GOLDEN = 2,
};
enum AltRefreshMode {
+ NO_ALT_REFRESH = 0,
COPY_LAST_TO_ALT = 1,
COPY_GOLDEN_TO_ALT = 2,
};
- FrameType key_frame;
+ FrameType frame_type;
uint8_t version;
bool is_experimental;
bool show_frame;
diff --git a/chromium/media/filters/vpx_video_decoder.cc b/chromium/media/filters/vpx_video_decoder.cc
index 25cef9548e1..2cc20b95dad 100644
--- a/chromium/media/filters/vpx_video_decoder.cc
+++ b/chromium/media/filters/vpx_video_decoder.cc
@@ -158,6 +158,7 @@ void VpxVideoDecoder::Initialize(
void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
const DecodeCB& decode_cb) {
+ DVLOG(3) << __func__ << ": " << buffer->AsHumanReadableString();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer);
DCHECK(!decode_cb.is_null());
diff --git a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
index 3de0f22bd9d..462796c381e 100644
--- a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
+++ b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
@@ -9,6 +9,7 @@
#include "base/at_exit.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/message_loop/message_loop.h"
@@ -107,8 +108,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
decoder.Initialize(
config, true /* low_delay */, nullptr /* cdm_context */,
base::Bind(&OnInitDone, run_loop.QuitClosure(), &success),
- base::Bind(&OnOutputComplete),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::Bind(&OnOutputComplete), base::NullCallback());
run_loop.Run();
if (!success)
return 0;
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index ce3fd5554d8..273687d27a3 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -6,6 +6,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/scoped_task_environment.h"
#include "build/build_config.h"
@@ -40,7 +41,7 @@ class VpxVideoDecoderTest : public testing::Test {
decoder_->Initialize(
config, false, nullptr, NewExpectedBoolCB(success),
base::Bind(&VpxVideoDecoderTest::FrameReady, base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/formats/BUILD.gn b/chromium/media/formats/BUILD.gn
index 0b58533cbf1..1ed89cb5a83 100644
--- a/chromium/media/formats/BUILD.gn
+++ b/chromium/media/formats/BUILD.gn
@@ -72,7 +72,7 @@ source_set("formats") {
deps = [
"//media/base",
"//skia",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
]
configs += [
@@ -162,6 +162,8 @@ static_library("test_support") {
visibility = [ "//media:test_support" ]
sources = [
+ "common/stream_parser_test_base.cc",
+ "common/stream_parser_test_base.h",
"webm/cluster_builder.cc",
"webm/cluster_builder.h",
"webm/opus_packet_builder.cc",
@@ -178,22 +180,14 @@ static_library("test_support") {
deps = [
"//base/test:test_support",
"//media/base:test_support",
+ "//testing/gtest",
]
- if (proprietary_codecs) {
+ if (proprietary_codecs && enable_mse_mpeg2ts_stream_parser) {
sources += [
- "common/stream_parser_test_base.cc",
- "common/stream_parser_test_base.h",
+ "mp2t/es_parser_test_base.cc",
+ "mp2t/es_parser_test_base.h",
]
-
- deps += [ "//testing/gtest" ]
-
- if (enable_mse_mpeg2ts_stream_parser) {
- sources += [
- "mp2t/es_parser_test_base.cc",
- "mp2t/es_parser_test_base.h",
- ]
- }
}
}
@@ -202,6 +196,7 @@ source_set("unit_tests") {
sources = [
"ac3/ac3_util_unittest.cc",
"common/offset_byte_queue_unittest.cc",
+ "mpeg/mpeg1_audio_stream_parser_unittest.cc",
"webm/webm_cluster_parser_unittest.cc",
"webm/webm_content_encodings_client_unittest.cc",
"webm/webm_crypto_helpers_unittest.cc",
@@ -234,7 +229,6 @@ source_set("unit_tests") {
"mp4/sample_to_group_iterator_unittest.cc",
"mp4/track_run_iterator_unittest.cc",
"mpeg/adts_stream_parser_unittest.cc",
- "mpeg/mpeg1_audio_stream_parser_unittest.cc",
]
deps += [ "//crypto" ]
diff --git a/chromium/media/formats/ac3/ac3_util.cc b/chromium/media/formats/ac3/ac3_util.cc
index 8d0b7869ec0..2c28262da9e 100644
--- a/chromium/media/formats/ac3/ac3_util.cc
+++ b/chromium/media/formats/ac3/ac3_util.cc
@@ -86,7 +86,7 @@ const uint8_t* FindNextSyncWord(const uint8_t* const begin,
while (current < end - 1) {
if (current[0] == 0x0B && current[1] == 0x77) {
if (current != begin)
- DVLOG(2) << __FUNCTION__ << " skip " << current - begin << " bytes.";
+ DVLOG(2) << __func__ << " skip " << current - begin << " bytes.";
return current;
} else if (current[1] != 0x0B) {
@@ -124,7 +124,7 @@ int ParseAc3SyncFrameSize(Ac3Header& header) {
if (header.sample_rate_code() >= arraysize(kSampleRate) ||
header.ac3_frame_size_code() >=
arraysize(kSyncFrameSizeInWordsFor44kHz)) {
- DVLOG(2) << __FUNCTION__ << " Invalid frame header."
+ DVLOG(2) << __func__ << " Invalid frame header."
<< " fscod:" << header.sample_rate_code()
<< " frmsizecod:" << header.ac3_frame_size_code();
return -1;
@@ -171,15 +171,15 @@ int ParseTotalSampleCount(const uint8_t* data, size_t size, bool is_eac3) {
if (frame_size > 0 && sample_count > 0) {
current += frame_size;
if (current > end) {
- DVLOG(2) << __FUNCTION__ << " Incomplete frame, missing "
- << current - end << " bytes.";
+ DVLOG(2) << __func__ << " Incomplete frame, missing " << current - end
+ << " bytes.";
break;
}
total_sample_count += sample_count;
} else {
DVLOG(2)
- << __FUNCTION__
+ << __func__
<< " Invalid frame, skip 2 bytes to find next synchronization word.";
current += 2;
}
diff --git a/chromium/media/formats/mp2t/es_parser_adts.cc b/chromium/media/formats/mp2t/es_parser_adts.cc
index f27c91bfcde..7a2dbb5c469 100644
--- a/chromium/media/formats/mp2t/es_parser_adts.cc
+++ b/chromium/media/formats/mp2t/es_parser_adts.cc
@@ -9,6 +9,7 @@
#include <vector>
#include "base/logging.h"
+#include "base/optional.h"
#include "base/strings/string_number_conversions.h"
#include "media/base/audio_timestamp_helper.h"
#include "media/base/bit_reader.h"
@@ -215,10 +216,11 @@ bool EsParserAdts::ParseFromEsQueue() {
if (base_decrypt_config) {
std::vector<SubsampleEntry> subsamples;
CalculateSubsamplesForAdtsFrame(adts_frame, &subsamples);
- std::unique_ptr<DecryptConfig> decrypt_config(
- new DecryptConfig(base_decrypt_config->key_id(),
- base_decrypt_config->iv(), subsamples));
- stream_parser_buffer->set_decrypt_config(std::move(decrypt_config));
+ stream_parser_buffer->set_decrypt_config(
+ std::make_unique<DecryptConfig>(
+ base_decrypt_config->encryption_mode(),
+ base_decrypt_config->key_id(), base_decrypt_config->iv(),
+ subsamples, base_decrypt_config->encryption_pattern()));
}
}
#endif
diff --git a/chromium/media/formats/mp2t/es_parser_h264.cc b/chromium/media/formats/mp2t/es_parser_h264.cc
index 3b2099f20a0..887b89879ef 100644
--- a/chromium/media/formats/mp2t/es_parser_h264.cc
+++ b/chromium/media/formats/mp2t/es_parser_h264.cc
@@ -466,9 +466,31 @@ bool EsParserH264::EmitFrame(int64_t access_unit_pos,
stream_parser_buffer->set_timestamp(current_timing_desc.pts);
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
if (use_hls_sample_aes_ && base_decrypt_config) {
- std::unique_ptr<DecryptConfig> decrypt_config(new DecryptConfig(
- base_decrypt_config->key_id(), base_decrypt_config->iv(), subsamples));
- stream_parser_buffer->set_decrypt_config(std::move(decrypt_config));
+ switch (base_decrypt_config->encryption_mode()) {
+ case EncryptionMode::kUnencrypted:
+ // As |base_decrypt_config| is specified, the stream is encrypted,
+ // so this shouldn't happen.
+ NOTREACHED();
+ break;
+ case EncryptionMode::kCenc:
+ stream_parser_buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig(base_decrypt_config->key_id(),
+ base_decrypt_config->iv(),
+ subsamples));
+ break;
+ case EncryptionMode::kCbcs:
+ // Note that for SampleAES the (encrypt,skip) pattern is constant.
+ // If not specified in |base_decrypt_config|, use default values.
+ stream_parser_buffer->set_decrypt_config(
+ DecryptConfig::CreateCbcsConfig(
+ base_decrypt_config->key_id(), base_decrypt_config->iv(),
+ subsamples,
+ base_decrypt_config->HasPattern()
+ ? base_decrypt_config->encryption_pattern()
+ : EncryptionPattern(kSampleAESEncryptBlocks,
+ kSampleAESSkipBlocks)));
+ break;
+ }
}
#endif
return es_adapter_.OnNewBuffer(stream_parser_buffer);
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.cc b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
index cd4a8bccab6..60ccf910e2d 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/optional.h"
#include "media/base/media_tracks.h"
#include "media/base/stream_parser_buffer.h"
#include "media/base/text_track_config.h"
@@ -843,8 +844,9 @@ void Mp2tStreamParser::UnregisterCat() {
}
void Mp2tStreamParser::RegisterCencPids(int ca_pid, int pssh_pid) {
- std::unique_ptr<TsSectionCetsEcm> ecm_parser(new TsSectionCetsEcm(base::Bind(
- &Mp2tStreamParser::RegisterDecryptConfig, base::Unretained(this))));
+ std::unique_ptr<TsSectionCetsEcm> ecm_parser(
+ new TsSectionCetsEcm(base::BindRepeating(
+ &Mp2tStreamParser::RegisterNewKeyIdAndIv, base::Unretained(this))));
std::unique_ptr<PidState> ecm_pid_state(
new PidState(ca_pid, PidState::kPidCetsEcm, std::move(ecm_parser)));
ecm_pid_state->Enable();
@@ -883,12 +885,34 @@ void Mp2tStreamParser::RegisterEncryptionScheme(
// Reset the DecryptConfig, so that unless and until a CENC-ECM (containing
// key id and IV) is seen, media data will be considered unencrypted. This is
// similar to the way clear leaders can occur in MP4 containers.
- decrypt_config_.reset(nullptr);
+ decrypt_config_.reset();
}
-void Mp2tStreamParser::RegisterDecryptConfig(const DecryptConfig& config) {
- decrypt_config_.reset(
- new DecryptConfig(config.key_id(), config.iv(), config.subsamples()));
+void Mp2tStreamParser::RegisterNewKeyIdAndIv(const std::string& key_id,
+ const std::string& iv) {
+ if (!iv.empty()) {
+ switch (initial_scheme_.mode()) {
+ case EncryptionScheme::CIPHER_MODE_UNENCRYPTED:
+ decrypt_config_.reset();
+ break;
+ case EncryptionScheme::CIPHER_MODE_AES_CTR:
+ decrypt_config_ = DecryptConfig::CreateCencConfig(key_id, iv, {});
+ break;
+ case EncryptionScheme::CIPHER_MODE_AES_CBC:
+ // MP2 Transport Streams don't always specify the encryption pattern up
+ // front. Instead it is determined later by the stream type. So if the
+ // pattern is unknown, leave it out.
+ EncryptionPattern pattern = initial_scheme_.pattern();
+ if (pattern.IsInEffect()) {
+ decrypt_config_ =
+ DecryptConfig::CreateCbcsConfig(key_id, iv, {}, pattern);
+ } else {
+ decrypt_config_ =
+ DecryptConfig::CreateCbcsConfig(key_id, iv, {}, base::nullopt);
+ }
+ break;
+ }
+ }
}
void Mp2tStreamParser::RegisterPsshBoxes(
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.h b/chromium/media/formats/mp2t/mp2t_stream_parser.h
index 835fafe7400..e46327b723c 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.h
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.h
@@ -125,8 +125,8 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
// be the case during an unencrypted portion of a live stream.
void RegisterEncryptionScheme(const EncryptionScheme& scheme);
- // Register the DecryptConfig (parsed from CENC-ECM).
- void RegisterDecryptConfig(const DecryptConfig& config);
+ // Register the new KeyID and IV (parsed from CENC-ECM).
+ void RegisterNewKeyIdAndIv(const std::string& key_id, const std::string& iv);
// Register the PSSH (parsed from CENC-PSSH).
void RegisterPsshBoxes(const std::vector<uint8_t>& init_data);
@@ -173,6 +173,9 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
EncryptionScheme initial_scheme_;
+
+ // TODO(jrummell): Rather than store the key_id and iv in a DecryptConfig,
+ // provide a better way to access the last values seen in a ECM packet.
std::unique_ptr<DecryptConfig> decrypt_config_;
#endif
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
index 137a74246a2..a0ae6df897e 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
@@ -127,8 +127,7 @@ std::string DecryptBuffer(const StreamParserBuffer& buffer,
EXPECT_TRUE(scheme.is_encrypted());
EXPECT_TRUE(scheme.mode() == EncryptionScheme::CIPHER_MODE_AES_CBC);
bool has_pattern = scheme.pattern().IsInEffect();
- EXPECT_TRUE(!has_pattern ||
- scheme.pattern().Matches(EncryptionPattern(1, 9)));
+ EXPECT_TRUE(!has_pattern || scheme.pattern() == EncryptionPattern(1, 9));
std::string key;
EXPECT_TRUE(
diff --git a/chromium/media/formats/mp2t/ts_section_cets_ecm.cc b/chromium/media/formats/mp2t/ts_section_cets_ecm.cc
index 1679c030718..a50dec10152 100644
--- a/chromium/media/formats/mp2t/ts_section_cets_ecm.cc
+++ b/chromium/media/formats/mp2t/ts_section_cets_ecm.cc
@@ -6,15 +6,14 @@
#include "base/logging.h"
#include "media/base/bit_reader.h"
-#include "media/base/decrypt_config.h"
#include "media/formats/mp2t/mp2t_common.h"
namespace media {
namespace mp2t {
TsSectionCetsEcm::TsSectionCetsEcm(
- const RegisterDecryptConfigCb& register_decrypt_config_cb)
- : register_decrypt_config_cb_(register_decrypt_config_cb) {}
+ const RegisterNewKeyIdAndIvCB& register_new_key_id_and_iv_cb)
+ : register_new_key_id_and_iv_cb_(register_new_key_id_and_iv_cb) {}
TsSectionCetsEcm::~TsSectionCetsEcm() {}
@@ -33,7 +32,6 @@ bool TsSectionCetsEcm::Parse(bool payload_unit_start_indicator,
bool key_id_flag;
int au_byte_offset_size;
std::string iv;
- std::vector<SubsampleEntry> subsamples_empty;
// TODO(dougsteed). Currently we allow only a subset of the possible values.
// When we flesh out this implementation to cover all of ISO/IEC 23001-9 we
// will need to generalize this.
@@ -67,8 +65,7 @@ bool TsSectionCetsEcm::Parse(bool payload_unit_start_indicator,
// The CETS-ECM is supposed to use adaptation field stuffing to fill the TS
// packet, so there should be no data left to read.
RCHECK(bit_reader.bits_available() == 0);
- DecryptConfig decrypt_config(key_id, iv, subsamples_empty);
- register_decrypt_config_cb_.Run(decrypt_config);
+ register_new_key_id_and_iv_cb_.Run(key_id, iv);
return true;
}
diff --git a/chromium/media/formats/mp2t/ts_section_cets_ecm.h b/chromium/media/formats/mp2t/ts_section_cets_ecm.h
index c07be4a0d58..6d17b8bff37 100644
--- a/chromium/media/formats/mp2t/ts_section_cets_ecm.h
+++ b/chromium/media/formats/mp2t/ts_section_cets_ecm.h
@@ -7,24 +7,28 @@
#include <stdint.h>
+#include <string>
+
#include "base/callback.h"
#include "base/macros.h"
-#include "media/base/byte_queue.h"
#include "media/formats/mp2t/ts_section.h"
namespace media {
-class DecryptConfig;
-
namespace mp2t {
class TsSectionCetsEcm : public TsSection {
public:
- // RegisterDecryptConfigCb::Run(const DecryptConfig& decrypt_config);
- using RegisterDecryptConfigCb =
- base::Callback<void(const DecryptConfig& decrypt_config)>;
+ // RegisterNewKeyIdAndIvCB() may be called multiple times. From
+ // ISO/IEC 23001-9:2016, section 7.2: "Key/IV information for every
+ // encrypted PID should be carried in a separate ECM PID." So there may be
+ // ECM's for each audio and video stream (and more if key rotation is used).
+ using RegisterNewKeyIdAndIvCB =
+ base::RepeatingCallback<void(const std::string& key_id,
+ const std::string& iv)>;
+
explicit TsSectionCetsEcm(
- const RegisterDecryptConfigCb& register_decrypt_config_cb);
+ const RegisterNewKeyIdAndIvCB& register_new_key_id_and_iv_cb);
~TsSectionCetsEcm() override;
// TsSection implementation.
@@ -35,7 +39,7 @@ class TsSectionCetsEcm : public TsSection {
void Reset() override;
private:
- RegisterDecryptConfigCb register_decrypt_config_cb_;
+ RegisterNewKeyIdAndIvCB register_new_key_id_and_iv_cb_;
DISALLOW_COPY_AND_ASSIGN(TsSectionCetsEcm);
};
diff --git a/chromium/media/formats/mp4/box_definitions.cc b/chromium/media/formats/mp4/box_definitions.cc
index 307419e733f..6def180d143 100644
--- a/chromium/media/formats/mp4/box_definitions.cc
+++ b/chromium/media/formats/mp4/box_definitions.cc
@@ -17,7 +17,7 @@
#include "media/formats/mp4/es_descriptor.h"
#include "media/formats/mp4/rcheck.h"
#include "media/media_buildflags.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#include "media/formats/mp4/avc.h"
@@ -341,16 +341,25 @@ bool ProtectionSchemeInfo::Parse(BoxReader* reader) {
}
bool ProtectionSchemeInfo::HasSupportedScheme() const {
- FourCC fourCC = type.type;
- if (fourCC == FOURCC_CENC)
+ FourCC four_cc = type.type;
+ if (four_cc == FOURCC_CENC)
return true;
#if BUILDFLAG(ENABLE_CBCS_ENCRYPTION_SCHEME)
- if (fourCC == FOURCC_CBCS)
+ if (four_cc == FOURCC_CBCS)
return true;
#endif
return false;
}
+bool ProtectionSchemeInfo::IsCbcsEncryptionScheme() const {
+#if BUILDFLAG(ENABLE_CBCS_ENCRYPTION_SCHEME)
+ FourCC four_cc = type.type;
+ return (four_cc == FOURCC_CBCS);
+#else
+ return false;
+#endif
+}
+
MovieHeader::MovieHeader()
: version(0),
creation_time(0),
@@ -386,7 +395,7 @@ bool MovieHeader::Parse(BoxReader* reader) {
RCHECK(reader->Read4s(&rate) &&
reader->Read2s(&volume) &&
reader->SkipBytes(10) && // reserved
- reader->SkipBytes(36) && // matrix
+ reader->ReadDisplayMatrix(display_matrix) &&
reader->SkipBytes(24) && // predefined zero
reader->Read4(&next_track_id));
return true;
@@ -427,7 +436,7 @@ bool TrackHeader::Parse(BoxReader* reader) {
reader->Read2s(&alternate_group) &&
reader->Read2s(&volume) &&
reader->SkipBytes(2) && // reserved
- reader->SkipBytes(36) && // matrix
+ reader->ReadDisplayMatrix(display_matrix) &&
reader->Read4(&width) &&
reader->Read4(&height));
@@ -1031,10 +1040,6 @@ bool AudioSampleEntry::Parse(BoxReader* reader) {
// Read the FLACSpecificBox, even if CENC is signalled.
if (format == FOURCC_FLAC ||
(format == FOURCC_ENCA && sinf.format.format == FOURCC_FLAC)) {
- RCHECK_MEDIA_LOGGED(base::FeatureList::IsEnabled(kMseFlacInIsobmff),
- reader->media_log(),
- "MSE support for FLAC in MP4 is not enabled.");
-
RCHECK_MEDIA_LOGGED(reader->ReadChild(&dfla), reader->media_log(),
"Failure parsing FLACSpecificBox (dfLa)");
diff --git a/chromium/media/formats/mp4/box_definitions.h b/chromium/media/formats/mp4/box_definitions.h
index c251c8b0e18..847c6b7e86b 100644
--- a/chromium/media/formats/mp4/box_definitions.h
+++ b/chromium/media/formats/mp4/box_definitions.h
@@ -155,6 +155,7 @@ struct MEDIA_EXPORT ProtectionSchemeInfo : Box {
SchemeInfo info;
bool HasSupportedScheme() const;
+ bool IsCbcsEncryptionScheme() const;
};
struct MEDIA_EXPORT MovieHeader : Box {
@@ -167,6 +168,12 @@ struct MEDIA_EXPORT MovieHeader : Box {
uint64_t duration;
int32_t rate;
int16_t volume;
+ // A 3x3 matrix of [ A B C ]
+ // [ D E F ]
+ // [ U V W ]
+ // Where A-F are 16.16 fixed point decimals
+ // And U, V, W are 2.30 fixed point decimals.
+ DisplayMatrix display_matrix;
uint32_t next_track_id;
};
@@ -180,6 +187,7 @@ struct MEDIA_EXPORT TrackHeader : Box {
int16_t layer;
int16_t alternate_group;
int16_t volume;
+ DisplayMatrix display_matrix; // See MovieHeader.display_matrix
uint32_t width;
uint32_t height;
};
diff --git a/chromium/media/formats/mp4/box_reader.cc b/chromium/media/formats/mp4/box_reader.cc
index bf7e2653c79..cbcd48cff26 100644
--- a/chromium/media/formats/mp4/box_reader.cc
+++ b/chromium/media/formats/mp4/box_reader.cc
@@ -204,6 +204,15 @@ bool BoxReader::ScanChildren() {
return true;
}
+bool BoxReader::ReadDisplayMatrix(DisplayMatrix matrix) {
+ for (int i = 0; i < kDisplayMatrixDimension; i++) {
+ if (!Read4s(&matrix[i])) {
+ return false;
+ }
+ }
+ return true;
+}
+
bool BoxReader::HasChild(Box* child) {
DCHECK(scanned_);
DCHECK(child);
diff --git a/chromium/media/formats/mp4/box_reader.h b/chromium/media/formats/mp4/box_reader.h
index 140818aaa4a..2da377323fd 100644
--- a/chromium/media/formats/mp4/box_reader.h
+++ b/chromium/media/formats/mp4/box_reader.h
@@ -24,6 +24,14 @@
namespace media {
namespace mp4 {
+enum DisplayMatrixSize {
+ kDisplayMatrixWidth = 3,
+ kDisplayMatrixHeight = 3,
+ kDisplayMatrixDimension = kDisplayMatrixHeight * kDisplayMatrixWidth
+};
+
+using DisplayMatrix = int32_t[kDisplayMatrixDimension];
+
class BoxReader;
struct MEDIA_EXPORT Box {
@@ -145,6 +153,10 @@ class MEDIA_EXPORT BoxReader : public BufferReader {
// read or on child absent.
bool MaybeReadChild(Box* child) WARN_UNUSED_RESULT;
+ // ISO-BMFF streams files use a 3x3 matrix consisting of 6 16.16 fixed point
+ // decimals and 3 2.30 fixed point decimals.
+ bool ReadDisplayMatrix(DisplayMatrix matrix);
+
// Read at least one child. False means error or no such child present.
template<typename T> bool ReadChildren(
std::vector<T>* children) WARN_UNUSED_RESULT;
diff --git a/chromium/media/formats/mp4/fourccs.h b/chromium/media/formats/mp4/fourccs.h
index bdf3d5faf5e..6d028ac788a 100644
--- a/chromium/media/formats/mp4/fourccs.h
+++ b/chromium/media/formats/mp4/fourccs.h
@@ -8,7 +8,7 @@
#include <string>
#include "media/media_buildflags.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
namespace media {
namespace mp4 {
@@ -67,6 +67,9 @@ enum FourCC {
FOURCC_META = 0x6d657461,
FOURCC_MFHD = 0x6d666864,
FOURCC_MFRA = 0x6d667261,
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ FOURCC_MHM1 = 0x6d686d31,
+#endif
FOURCC_MINF = 0x6d696e66,
FOURCC_MOOF = 0x6d6f6f66,
FOURCC_MOOV = 0x6d6f6f76,
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.cc b/chromium/media/formats/mp4/mp4_stream_parser.cc
index f2027534916..ee14479e26a 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser.cc
@@ -13,6 +13,7 @@
#include "base/callback_helpers.h"
#include "base/logging.h"
+#include "base/numerics/math_constants.h"
#include "base/strings/string_number_conversions.h"
#include "base/time/time.h"
#include "build/build_config.h"
@@ -37,6 +38,7 @@ namespace media {
namespace mp4 {
namespace {
+
const int kMaxEmptySampleLogs = 20;
const int kMaxInvalidConversionLogs = 20;
@@ -89,7 +91,6 @@ MP4StreamParser::MP4StreamParser(const std::set<int>& audio_object_types,
has_flac_(has_flac),
num_empty_samples_skipped_(0),
num_invalid_conversions_(0) {
- DCHECK(!has_flac || base::FeatureList::IsEnabled(kMseFlacInIsobmff));
}
MP4StreamParser::~MP4StreamParser() = default;
@@ -238,13 +239,77 @@ ParseResult MP4StreamParser::ParseBox() {
return ParseResult::kOk;
}
+static inline double FixedToFloatingPoint(const int32_t& i) {
+ return static_cast<double>(i >> 16);
+}
+
+VideoRotation MP4StreamParser::CalculateRotation(const TrackHeader& track,
+ const MovieHeader& movie) {
+ static_assert(kDisplayMatrixDimension == 9, "Display matrix must be 3x3");
+ // 3x3 matrix: [ a b c ]
+ // [ d e f ]
+ // [ x y z ]
+ int32_t rotation_matrix[kDisplayMatrixDimension] = {0};
+
+ // Shift values for fixed point multiplications.
+ const int32_t shifts[kDisplayMatrixHeight] = {16, 16, 30};
+
+ // Matrix multiplication for
+ // track.display_matrix * movie.display_matrix
+ // with special consideration taken that entries a-f are 16.16 fixed point
+ // decimals and x-z are 2.30 fixed point decimals.
+ for (int i = 0; i < kDisplayMatrixWidth; i++) {
+ for (int j = 0; j < kDisplayMatrixHeight; j++) {
+ for (int e = 0; e < kDisplayMatrixHeight; e++) {
+ rotation_matrix[i * kDisplayMatrixHeight + j] +=
+ ((int64_t)track.display_matrix[i * kDisplayMatrixHeight + e] *
+ movie.display_matrix[e * kDisplayMatrixHeight + j]) >>
+ shifts[e];
+ }
+ }
+ }
+
+ // Rotation by angle Θ is represented in the matrix as:
+ // [ cos(Θ), -sin(Θ), ...]
+ // [ sin(Θ), cos(Θ), ...]
+ // [ ..., ..., 1 ]
+ // But we only need cos(Θ) for the angle and sin(Θ) for the quadrant.
+ double angle = acos(FixedToFloatingPoint(rotation_matrix[0]))
+ * 180 / base::kPiDouble;
+
+ if (angle < 0)
+ angle += 360;
+
+ if (angle >= 360)
+ angle -= 360;
+
+ // 16 bits of fixed point decimal is enough to give 6 decimals of precision
+ // to cos(Θ). A delta of ±0.000001 causes acos(cos(Θ)) to differ by a minimum
+ // of 0.0002, which is why we only need to check that the angle is only
+ // accurate to within four decimal places. This is preferred to checking for
+ // a more precise accuracy, as the 'double' type is architecture dependant and
+ // ther may variance in floating point errors.
+ if (abs(angle - 0) < 1e-4)
+ return VIDEO_ROTATION_0;
+
+ if (abs(angle - 180) < 1e-4)
+ return VIDEO_ROTATION_180;
+
+ if (abs(angle - 90) < 1e-4) {
+ bool quadrant = asin(FixedToFloatingPoint(rotation_matrix[3])) < 0;
+ return quadrant ? VIDEO_ROTATION_90 : VIDEO_ROTATION_270;
+ }
+
+ // TODO(tmathmeyer): Record this event and the faulty matrix somewhere.
+ return VIDEO_ROTATION_0;
+}
+
bool MP4StreamParser::ParseMoov(BoxReader* reader) {
moov_.reset(new Movie);
RCHECK(moov_->Parse(reader));
runs_.reset();
audio_track_ids_.clear();
video_track_ids_.clear();
- is_track_encrypted_.clear();
has_audio_ = false;
has_video_ = false;
@@ -292,12 +357,14 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
? entry.sinf.format.format
: entry.format;
+ if (audio_format != FOURCC_FLAC &&
#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
- if (audio_format != FOURCC_MP4A && audio_format != FOURCC_FLAC &&
- audio_format != FOURCC_AC3 && audio_format != FOURCC_EAC3) {
-#else
- if (audio_format != FOURCC_MP4A && audio_format != FOURCC_FLAC) {
+ audio_format != FOURCC_AC3 && audio_format != FOURCC_EAC3 &&
#endif
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ audio_format != FOURCC_MHM1 &&
+#endif
+ audio_format != FOURCC_MP4A) {
MEDIA_LOG(ERROR, media_log_) << "Unsupported audio format 0x"
<< std::hex << entry.format
<< " in stsd box.";
@@ -324,6 +391,13 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
sample_per_second = entry.samplerate;
extra_data = entry.dfla.stream_info;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+#if BUILDFLAG(ENABLE_MPEG_H_AUDIO_DEMUXING)
+ } else if (audio_format == FOURCC_MHM1) {
+ codec = kCodecMpegHAudio;
+ channel_layout = CHANNEL_LAYOUT_BITSTREAM;
+ sample_per_second = entry.samplerate;
+ extra_data = entry.dfla.stream_info;
+#endif
} else {
uint8_t audio_type = entry.esds.object_type;
#if BUILDFLAG(ENABLE_AC3_EAC3_AUDIO_DEMUXING)
@@ -394,7 +468,6 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
return false;
}
bool is_track_encrypted = entry.sinf.info.track_encryption.is_encrypted;
- is_track_encrypted_[audio_track_id] = is_track_encrypted;
EncryptionScheme scheme = Unencrypted();
if (is_track_encrypted) {
scheme = GetEncryptionScheme(entry.sinf);
@@ -463,7 +536,6 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
return false;
}
bool is_track_encrypted = entry.sinf.info.track_encryption.is_encrypted;
- is_track_encrypted_[video_track_id] = is_track_encrypted;
EncryptionScheme scheme = Unencrypted();
if (is_track_encrypted) {
scheme = GetEncryptionScheme(entry.sinf);
@@ -472,8 +544,8 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
}
video_config.Initialize(entry.video_codec, entry.video_codec_profile,
PIXEL_FORMAT_I420, COLOR_SPACE_HD_REC709,
- VIDEO_ROTATION_0, coded_size, visible_rect,
- natural_size,
+ CalculateRotation(track->header, moov_->header),
+ coded_size, visible_rect, natural_size,
// No decoder-specific buffer needed for AVC;
// SPS/PPS are embedded in the video stream
EmptyExtraData(), scheme);
@@ -753,15 +825,12 @@ ParseResult MP4StreamParser::EnqueueSample(BufferQueueMap* buffers) {
if (decrypt_config) {
if (!subsamples.empty()) {
// Create a new config with the updated subsamples.
- decrypt_config.reset(new DecryptConfig(decrypt_config->key_id(),
- decrypt_config->iv(), subsamples));
+ decrypt_config.reset(
+ new DecryptConfig(decrypt_config->encryption_mode(),
+ decrypt_config->key_id(), decrypt_config->iv(),
+ subsamples, decrypt_config->encryption_pattern()));
}
// else, use the existing config.
- } else if (is_track_encrypted_[runs_->track_id()]) {
- // The media pipeline requires a DecryptConfig with an empty |iv|.
- // TODO(ddorwin): Refactor so we do not need a fake key ID ("1");
- decrypt_config.reset(
- new DecryptConfig("1", "", std::vector<SubsampleEntry>()));
}
StreamParserBuffer::Type buffer_type = audio ? DemuxerStream::AUDIO :
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.h b/chromium/media/formats/mp4/mp4_stream_parser.h
index 97bdfd66972..585a789e807 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.h
+++ b/chromium/media/formats/mp4/mp4_stream_parser.h
@@ -28,6 +28,8 @@ namespace media {
namespace mp4 {
struct Movie;
+struct MovieHeader;
+struct TrackHeader;
class BoxReader;
class MEDIA_EXPORT MP4StreamParser : public StreamParser {
@@ -48,6 +50,10 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
void Flush() override;
bool Parse(const uint8_t* buf, int size) override;
+ // Calculates the rotation value from the track header display matricies.
+ VideoRotation CalculateRotation(const TrackHeader& track,
+ const MovieHeader& movie);
+
private:
enum State {
kWaitingForInit,
@@ -134,7 +140,6 @@ class MEDIA_EXPORT MP4StreamParser : public StreamParser {
const std::set<int> audio_object_types_;
const bool has_sbr_;
const bool has_flac_;
- std::map<uint32_t, bool> is_track_encrypted_;
// Tracks the number of MEDIA_LOGS for skipping empty trun samples.
int num_empty_samples_skipped_;
diff --git a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
index 588ebafa7d3..dac384d7c87 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -10,6 +10,7 @@
#include <algorithm>
#include <memory>
#include <string>
+#include <tuple>
#include "base/bind.h"
#include "base/bind_helpers.h"
@@ -33,6 +34,7 @@
#include "media/formats/mp4/fourccs.h"
#include "media/media_buildflags.h"
#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest-param-test.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::InSequence;
@@ -470,10 +472,6 @@ TEST_F(MP4StreamParserTest, DemuxingEAC3) {
}
TEST_F(MP4StreamParserTest, Flac) {
- // The feature is disabled by default. Enable it.
- base::test::ScopedFeatureList features;
- features.InitAndEnableFeature(kMseFlacInIsobmff);
-
parser_.reset(new MP4StreamParser(std::set<int>(), false, true));
auto params = GetDefaultInitParametersExpectations();
@@ -485,10 +483,6 @@ TEST_F(MP4StreamParserTest, Flac) {
}
TEST_F(MP4StreamParserTest, Flac192kHz) {
- // The feature is disabled by default. Enable it.
- base::test::ScopedFeatureList features;
- features.InitAndEnableFeature(kMseFlacInIsobmff);
-
parser_.reset(new MP4StreamParser(std::set<int>(), false, true));
auto params = GetDefaultInitParametersExpectations();
@@ -586,5 +580,55 @@ TEST_F(MP4StreamParserTest, MultiTrackFile) {
EXPECT_EQ(audio_track2.language(), "und");
}
+// <cos(θ), sin(θ), θ expressed as a rotation Enum>
+using MatrixRotationTestCaseParam = std::tuple<double, double, VideoRotation>;
+
+class MP4StreamParserRotationMatrixEvaluatorTest
+ : public ::testing::TestWithParam<MatrixRotationTestCaseParam> {
+ public:
+ MP4StreamParserRotationMatrixEvaluatorTest() {
+ std::set<int> audio_object_types;
+ audio_object_types.insert(kISO_14496_3);
+ parser_.reset(new MP4StreamParser(audio_object_types, false, false));
+ }
+
+ protected:
+ std::unique_ptr<MP4StreamParser> parser_;
+};
+
+TEST_P(MP4StreamParserRotationMatrixEvaluatorTest, RotationCalculation) {
+ TrackHeader track_header;
+ MovieHeader movie_header;
+
+ // Identity matrix, with 16.16 and 2.30 fixed points.
+ uint32_t identity_matrix[9] = {1 << 16, 0, 0, 0, 1 << 16, 0, 0, 0, 1 << 30};
+
+ memcpy(movie_header.display_matrix, identity_matrix, sizeof(identity_matrix));
+ memcpy(track_header.display_matrix, identity_matrix, sizeof(identity_matrix));
+
+ MatrixRotationTestCaseParam data = GetParam();
+
+ // Insert fixed point decimal data into the rotation matrix.
+ track_header.display_matrix[0] = std::get<0>(data) * (1 << 16);
+ track_header.display_matrix[4] = std::get<0>(data) * (1 << 16);
+ track_header.display_matrix[1] = -(std::get<1>(data) * (1 << 16));
+ track_header.display_matrix[3] = std::get<1>(data) * (1 << 16);
+
+ EXPECT_EQ(parser_->CalculateRotation(track_header, movie_header),
+ std::get<2>(data));
+}
+
+MatrixRotationTestCaseParam rotation_test_cases[6] = {
+ {1, 0, VIDEO_ROTATION_0}, // cos(0) = 1, sin(0) = 0
+ {0, -1, VIDEO_ROTATION_90}, // cos(90) = 0, sin(90) =-1
+ {-1, 0, VIDEO_ROTATION_180}, // cos(180)=-1, sin(180)= 0
+ {0, 1, VIDEO_ROTATION_270}, // cos(270)= 0, sin(270)= 1
+ {1, 1, VIDEO_ROTATION_0}, // Error case
+ {5, 5, VIDEO_ROTATION_0}, // Error case
+};
+INSTANTIATE_TEST_CASE_P(CheckMath,
+ MP4StreamParserRotationMatrixEvaluatorTest,
+ testing::ValuesIn(rotation_test_cases));
+
} // namespace mp4
} // namespace media
diff --git a/chromium/media/formats/mp4/track_run_iterator.cc b/chromium/media/formats/mp4/track_run_iterator.cc
index 1e35058e6cd..cb797448675 100644
--- a/chromium/media/formats/mp4/track_run_iterator.cc
+++ b/chromium/media/formats/mp4/track_run_iterator.cc
@@ -13,6 +13,8 @@
#include "base/numerics/checked_math.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/demuxer_memory_limit.h"
+#include "media/base/encryption_scheme.h"
+#include "media/base/media_util.h"
#include "media/base/timestamp_constants.h"
#include "media/formats/mp4/rcheck.h"
#include "media/formats/mp4/sample_to_group_iterator.h"
@@ -52,6 +54,8 @@ struct TrackRunInfo {
std::vector<uint8_t> aux_info_sizes; // Populated if default_size == 0.
int aux_info_total_size;
+ EncryptionScheme encryption_scheme;
+
std::vector<CencSampleEncryptionInfoEntry> fragment_sample_encryption_info;
TrackRunInfo();
@@ -342,20 +346,40 @@ bool TrackRunIterator::Init(const MovieFragment& moof) {
traf.sample_group_description.entries;
const TrackEncryption* track_encryption;
+ const ProtectionSchemeInfo* sinf;
tri.is_audio = (stsd.type == kAudio);
if (tri.is_audio) {
RCHECK(!stsd.audio_entries.empty());
if (desc_idx >= stsd.audio_entries.size())
desc_idx = 0;
tri.audio_description = &stsd.audio_entries[desc_idx];
+ sinf = &tri.audio_description->sinf;
track_encryption = &tri.audio_description->sinf.info.track_encryption;
} else {
RCHECK(!stsd.video_entries.empty());
if (desc_idx >= stsd.video_entries.size())
desc_idx = 0;
tri.video_description = &stsd.video_entries[desc_idx];
+ sinf = &tri.video_description->sinf;
track_encryption = &tri.video_description->sinf.info.track_encryption;
}
+
+ if (!sinf->HasSupportedScheme()) {
+ tri.encryption_scheme = Unencrypted();
+ } else {
+#if BUILDFLAG(ENABLE_CBCS_ENCRYPTION_SCHEME)
+ tri.encryption_scheme = EncryptionScheme(
+ sinf->IsCbcsEncryptionScheme()
+ ? EncryptionScheme::CIPHER_MODE_AES_CBC
+ : EncryptionScheme::CIPHER_MODE_AES_CTR,
+ EncryptionPattern(track_encryption->default_crypt_byte_block,
+ track_encryption->default_skip_byte_block));
+#else
+ DCHECK(!sinf->IsCbcsEncryptionScheme());
+ tri.encryption_scheme = AesCtrEncryptionScheme();
+#endif
+ }
+
// Initialize aux_info variables only if no sample encryption entries.
if (sample_encryption_entries_count == 0 &&
traf.auxiliary_offset.offsets.size() > j) {
@@ -670,16 +694,21 @@ bool TrackRunIterator::is_keyframe() const {
return sample_itr_->is_keyframe;
}
-const TrackEncryption& TrackRunIterator::track_encryption() const {
+const ProtectionSchemeInfo& TrackRunIterator::protection_scheme_info() const {
if (is_audio())
- return audio_description().sinf.info.track_encryption;
- return video_description().sinf.info.track_encryption;
+ return audio_description().sinf;
+ return video_description().sinf;
+}
+
+const TrackEncryption& TrackRunIterator::track_encryption() const {
+ return protection_scheme_info().info.track_encryption;
}
std::unique_ptr<DecryptConfig> TrackRunIterator::GetDecryptConfig() {
DCHECK(is_encrypted());
size_t sample_idx = sample_itr_ - run_itr_->samples.begin();
const std::vector<uint8_t>& kid = GetKeyId(sample_idx);
+ std::string key_id(kid.begin(), kid.end());
if (run_itr_->sample_encryption_entries.empty()) {
DCHECK_EQ(0, aux_info_size());
@@ -688,36 +717,60 @@ std::unique_ptr<DecryptConfig> TrackRunIterator::GetDecryptConfig() {
// with full sample encryption. That case will fall through to here.
SampleEncryptionEntry sample_encryption_entry;
if (ApplyConstantIv(sample_idx, &sample_encryption_entry)) {
- return std::unique_ptr<DecryptConfig>(new DecryptConfig(
- std::string(reinterpret_cast<const char*>(&kid[0]), kid.size()),
- std::string(reinterpret_cast<const char*>(
- sample_encryption_entry.initialization_vector),
- arraysize(sample_encryption_entry.initialization_vector)),
- sample_encryption_entry.subsamples));
+ std::string iv(reinterpret_cast<const char*>(
+ sample_encryption_entry.initialization_vector),
+ arraysize(sample_encryption_entry.initialization_vector));
+ switch (run_itr_->encryption_scheme.mode()) {
+ case EncryptionScheme::CIPHER_MODE_UNENCRYPTED:
+ return nullptr;
+ case EncryptionScheme::CIPHER_MODE_AES_CTR:
+ return DecryptConfig::CreateCencConfig(
+ key_id, iv, sample_encryption_entry.subsamples);
+ case EncryptionScheme::CIPHER_MODE_AES_CBC:
+ return DecryptConfig::CreateCbcsConfig(
+ key_id, iv, sample_encryption_entry.subsamples,
+ run_itr_->encryption_scheme.pattern());
+ }
}
#endif
MEDIA_LOG(ERROR, media_log_) << "Sample encryption info is not available.";
- return std::unique_ptr<DecryptConfig>();
+ return nullptr;
}
DCHECK_LT(sample_idx, run_itr_->sample_encryption_entries.size());
const SampleEncryptionEntry& sample_encryption_entry =
run_itr_->sample_encryption_entries[sample_idx];
+ std::string iv(reinterpret_cast<const char*>(
+ sample_encryption_entry.initialization_vector),
+ arraysize(sample_encryption_entry.initialization_vector));
size_t total_size = 0;
if (!sample_encryption_entry.subsamples.empty() &&
(!sample_encryption_entry.GetTotalSizeOfSubsamples(&total_size) ||
total_size != static_cast<size_t>(sample_size()))) {
MEDIA_LOG(ERROR, media_log_) << "Incorrect CENC subsample size.";
- return std::unique_ptr<DecryptConfig>();
+ return nullptr;
}
- return std::unique_ptr<DecryptConfig>(new DecryptConfig(
- std::string(reinterpret_cast<const char*>(&kid[0]), kid.size()),
- std::string(reinterpret_cast<const char*>(
- sample_encryption_entry.initialization_vector),
- arraysize(sample_encryption_entry.initialization_vector)),
- sample_encryption_entry.subsamples));
+#if BUILDFLAG(ENABLE_CBCS_ENCRYPTION_SCHEME)
+ if (protection_scheme_info().IsCbcsEncryptionScheme()) {
+ uint32_t index = GetGroupDescriptionIndex(sample_idx);
+ uint32_t encrypt_blocks =
+ (index == 0)
+ ? track_encryption().default_crypt_byte_block
+ : GetSampleEncryptionInfoEntry(*run_itr_, index)->crypt_byte_block;
+ uint32_t skip_blocks =
+ (index == 0)
+ ? track_encryption().default_skip_byte_block
+ : GetSampleEncryptionInfoEntry(*run_itr_, index)->skip_byte_block;
+ return DecryptConfig::CreateCbcsConfig(
+ key_id, iv, sample_encryption_entry.subsamples,
+ EncryptionPattern(encrypt_blocks, skip_blocks));
+ }
+#endif
+
+ return DecryptConfig::CreateCencConfig(key_id, iv,
+ sample_encryption_entry.subsamples);
}
uint32_t TrackRunIterator::GetGroupDescriptionIndex(
diff --git a/chromium/media/formats/mp4/track_run_iterator.h b/chromium/media/formats/mp4/track_run_iterator.h
index 59521c36c50..213acd1f0f4 100644
--- a/chromium/media/formats/mp4/track_run_iterator.h
+++ b/chromium/media/formats/mp4/track_run_iterator.h
@@ -95,6 +95,7 @@ class MEDIA_EXPORT TrackRunIterator {
private:
bool UpdateCts();
bool ResetRun();
+ const ProtectionSchemeInfo& protection_scheme_info() const;
const TrackEncryption& track_encryption() const;
uint32_t GetGroupDescriptionIndex(uint32_t sample_index) const;
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index 80be6da0546..8717ac299f6 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -8,7 +8,6 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
-#include "base/message_loop/message_loop.h"
#include "media/base/media_log.h"
#include "media/base/media_tracks.h"
#include "media/base/media_util.h"
diff --git a/chromium/media/formats/webm/webm_crypto_helpers.cc b/chromium/media/formats/webm/webm_crypto_helpers.cc
index 1f8447ab8bd..846e6b7650a 100644
--- a/chromium/media/formats/webm/webm_crypto_helpers.cc
+++ b/chromium/media/formats/webm/webm_crypto_helpers.cc
@@ -106,10 +106,6 @@ bool WebMCreateDecryptConfig(const uint8_t* data,
const uint8_t signal_byte = data[0];
int frame_offset = sizeof(signal_byte);
-
- // Setting the DecryptConfig object of the buffer while leaving the
- // initialization vector empty will tell the decryptor that the frame is
- // unencrypted.
std::string counter_block;
std::vector<SubsampleEntry> subsample_entries;
@@ -145,9 +141,14 @@ bool WebMCreateDecryptConfig(const uint8_t* data,
}
}
- decrypt_config->reset(new DecryptConfig(
- std::string(reinterpret_cast<const char*>(key_id), key_id_size),
- counter_block, subsample_entries));
+ if (counter_block.empty()) {
+ // If the frame is unencrypted the DecryptConfig object should be NULL.
+ decrypt_config->reset();
+ } else {
+ *decrypt_config = DecryptConfig::CreateCencConfig(
+ std::string(reinterpret_cast<const char*>(key_id), key_id_size),
+ counter_block, subsample_entries);
+ }
*data_offset = frame_offset;
return true;
diff --git a/chromium/media/formats/webm/webm_crypto_helpers.h b/chromium/media/formats/webm/webm_crypto_helpers.h
index 21e53f7409d..e1bf6c48115 100644
--- a/chromium/media/formats/webm/webm_crypto_helpers.h
+++ b/chromium/media/formats/webm/webm_crypto_helpers.h
@@ -14,11 +14,11 @@
namespace media {
-// Fills an initialized DecryptConfig, which can be sent to the Decryptor if
-// the stream has potentially encrypted frames. Also sets |data_offset| which
-// indicates where the encrypted data starts. Leaving the IV empty will tell
-// the decryptor that the frame is unencrypted. Returns true if |data| is valid,
-// false otherwise, in which case |decrypt_config| and |data_offset| will not be
+// Fills |decrypt_config|, which can be sent to the Decryptor if the stream
+// has potentially encrypted frames. Also sets |data_offset| which indicates
+// where the encrypted data starts. If the frame is unencrypted
+// |*decrypt_config| will be null. Returns true if |data| is valid, false
+// otherwise, in which case |decrypt_config| and |data_offset| will not be
// changed. Current encrypted WebM request for comments specification is here
// http://wiki.webmproject.org/encryption/webm-encryption-rfc
bool MEDIA_EXPORT
diff --git a/chromium/media/formats/webm/webm_crypto_helpers_unittest.cc b/chromium/media/formats/webm/webm_crypto_helpers_unittest.cc
index e6e184fa184..842cc0d7289 100644
--- a/chromium/media/formats/webm/webm_crypto_helpers_unittest.cc
+++ b/chromium/media/formats/webm/webm_crypto_helpers_unittest.cc
@@ -37,7 +37,7 @@ TEST(WebMCryptoHelpersTest, ClearData) {
sizeof(kKeyId), &decrypt_config,
&data_offset));
EXPECT_EQ(1, data_offset);
- EXPECT_FALSE(decrypt_config->is_encrypted());
+ EXPECT_FALSE(decrypt_config);
}
TEST(WebMCryptoHelpersTest, EncryptedButNotEnoughBytes) {
@@ -68,7 +68,7 @@ TEST(WebMCryptoHelpersTest, EncryptedNotPartitioned) {
ASSERT_TRUE(WebMCreateDecryptConfig(kData, sizeof(kData), kKeyId,
sizeof(kKeyId), &decrypt_config,
&data_offset));
- EXPECT_TRUE(decrypt_config->is_encrypted());
+ EXPECT_TRUE(decrypt_config);
EXPECT_EQ(std::string(kKeyId, kKeyId + sizeof(kKeyId)),
decrypt_config->key_id());
EXPECT_EQ(std::string(kExpectedIv, kExpectedIv + sizeof(kExpectedIv)),
@@ -191,7 +191,7 @@ TEST(WebMCryptoHelpersTest, EncryptedPartitionedEvenNumberOfPartitions) {
ASSERT_TRUE(WebMCreateDecryptConfig(kData, sizeof(kData), kKeyId,
sizeof(kKeyId), &decrypt_config,
&data_offset));
- EXPECT_TRUE(decrypt_config->is_encrypted());
+ EXPECT_TRUE(decrypt_config);
EXPECT_EQ(std::string(kKeyId, kKeyId + sizeof(kKeyId)),
decrypt_config->key_id());
EXPECT_EQ(std::string(kExpectedIv, kExpectedIv + sizeof(kExpectedIv)),
@@ -224,7 +224,7 @@ TEST(WebMCryptoHelpersTest, EncryptedPartitionedOddNumberOfPartitions) {
ASSERT_TRUE(WebMCreateDecryptConfig(kData, sizeof(kData), kKeyId,
sizeof(kKeyId), &decrypt_config,
&data_offset));
- EXPECT_TRUE(decrypt_config->is_encrypted());
+ EXPECT_TRUE(decrypt_config);
EXPECT_EQ(std::string(kKeyId, kKeyId + sizeof(kKeyId)),
decrypt_config->key_id());
EXPECT_EQ(std::string(kExpectedIv, kExpectedIv + sizeof(kExpectedIv)),
@@ -254,7 +254,7 @@ TEST(WebMCryptoHelpersTest, EncryptedPartitionedZeroNumberOfPartitions) {
ASSERT_TRUE(WebMCreateDecryptConfig(kData, sizeof(kData), kKeyId,
sizeof(kKeyId), &decrypt_config,
&data_offset));
- EXPECT_TRUE(decrypt_config->is_encrypted());
+ EXPECT_TRUE(decrypt_config);
EXPECT_EQ(std::string(kKeyId, kKeyId + sizeof(kKeyId)),
decrypt_config->key_id());
EXPECT_EQ(std::string(kExpectedIv, kExpectedIv + sizeof(kExpectedIv)),
diff --git a/chromium/media/formats/webm/webm_parser_unittest.cc b/chromium/media/formats/webm/webm_parser_unittest.cc
index b24fb485046..11f3380417b 100644
--- a/chromium/media/formats/webm/webm_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_parser_unittest.cc
@@ -27,7 +27,7 @@ enum { kBlockCount = 5 };
class MockWebMParserClient : public WebMParserClient {
public:
- virtual ~MockWebMParserClient() = default;
+ ~MockWebMParserClient() override = default;
// WebMParserClient methods.
MOCK_METHOD1(OnListStart, WebMParserClient*(int));
diff --git a/chromium/media/formats/webm/webm_stream_parser_unittest.cc b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
index c009bb08f9f..cfc3d6fc66c 100644
--- a/chromium/media/formats/webm/webm_stream_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
@@ -13,6 +13,7 @@
#include "media/base/mock_media_log.h"
#include "media/base/stream_parser.h"
#include "media/base/test_data_util.h"
+#include "media/base/test_helpers.h"
#include "media/base/text_track_config.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -91,7 +92,7 @@ class WebMStreamParserTest : public testing::Test {
};
TEST_F(WebMStreamParserTest, VerifyMediaTrackMetadata) {
- EXPECT_MEDIA_LOG(testing::HasSubstr("Estimating WebM block duration"))
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimatedAny())
.Times(testing::AnyNumber());
StreamParser::InitParameters params(kInfiniteDuration);
params.detected_audio_track_count = 1;
@@ -118,7 +119,7 @@ TEST_F(WebMStreamParserTest, VerifyMediaTrackMetadata) {
}
TEST_F(WebMStreamParserTest, VerifyDetectedTrack_AudioOnly) {
- EXPECT_MEDIA_LOG(testing::HasSubstr("Estimating WebM block duration"))
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimatedAny())
.Times(testing::AnyNumber());
StreamParser::InitParameters params(kInfiniteDuration);
params.detected_audio_track_count = 1;
@@ -140,7 +141,7 @@ TEST_F(WebMStreamParserTest, VerifyDetectedTrack_VideoOnly) {
}
TEST_F(WebMStreamParserTest, VerifyDetectedTracks_AVText) {
- EXPECT_MEDIA_LOG(testing::HasSubstr("Estimating WebM block duration"))
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimatedAny())
.Times(testing::AnyNumber());
StreamParser::InitParameters params(kInfiniteDuration);
params.detected_audio_track_count = 1;
@@ -153,7 +154,7 @@ TEST_F(WebMStreamParserTest, VerifyDetectedTracks_AVText) {
}
TEST_F(WebMStreamParserTest, ColourElement) {
- EXPECT_MEDIA_LOG(testing::HasSubstr("Estimating WebM block duration"))
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimatedAny())
.Times(testing::AnyNumber());
StreamParser::InitParameters params(kInfiniteDuration);
params.detected_audio_track_count = 0;
@@ -192,4 +193,27 @@ TEST_F(WebMStreamParserTest, ColourElement) {
EXPECT_EQ(mmdata.luminance_min, 30);
}
+TEST_F(WebMStreamParserTest, ColourElementWithUnspecifiedRange) {
+ EXPECT_MEDIA_LOG(WebMSimpleBlockDurationEstimatedAny())
+ .Times(testing::AnyNumber());
+ StreamParser::InitParameters params(kInfiniteDuration);
+ params.detected_audio_track_count = 0;
+ params.detected_video_track_count = 1;
+ params.detected_text_track_count = 0;
+ ParseWebMFile("colour_unspecified_range.webm", params);
+ EXPECT_EQ(media_tracks_->tracks().size(), 1u);
+
+ const auto& video_track = media_tracks_->tracks()[0];
+ EXPECT_EQ(video_track->type(), MediaTrack::Video);
+
+ const VideoDecoderConfig& video_config =
+ media_tracks_->getVideoConfig(video_track->bytestream_track_id());
+
+ VideoColorSpace expected_color_space(VideoColorSpace::PrimaryID::SMPTEST428_1,
+ VideoColorSpace::TransferID::LOG,
+ VideoColorSpace::MatrixID::RGB,
+ gfx::ColorSpace::RangeID::INVALID);
+ EXPECT_EQ(video_config.color_space_info(), expected_color_space);
+}
+
} // namespace media
diff --git a/chromium/media/formats/webm/webm_video_client.cc b/chromium/media/formats/webm/webm_video_client.cc
index 651b88f9a3d..61bc28ed979 100644
--- a/chromium/media/formats/webm/webm_video_client.cc
+++ b/chromium/media/formats/webm/webm_video_client.cc
@@ -6,7 +6,7 @@
#include "media/base/video_decoder_config.h"
#include "media/formats/webm/webm_constants.h"
-#include "third_party/libaom/av1_features.h"
+#include "third_party/libaom/av1_buildflags.h"
namespace media {
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index 35cff6bfbee..28436882c29 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -87,6 +87,8 @@ component("gpu") {
defines = [ "MEDIA_GPU_IMPLEMENTATION" ]
sources = [
+ "command_buffer_helper.cc",
+ "command_buffer_helper.h",
"fake_jpeg_decode_accelerator.cc",
"fake_jpeg_decode_accelerator.h",
"fake_video_decode_accelerator.cc",
@@ -111,9 +113,11 @@ component("gpu") {
"//base",
"//gpu",
"//media",
+ "//third_party/mesa:mesa_headers",
"//ui/gfx/geometry",
]
deps = [
+ "//gpu/command_buffer/common:gles2_utils",
"//gpu/command_buffer/service:gles2",
"//gpu/ipc/service",
"//ui/base",
@@ -131,12 +135,12 @@ component("gpu") {
"vt_video_encode_accelerator_mac.cc",
"vt_video_encode_accelerator_mac.h",
]
- deps += [ "//third_party/webrtc/system_wrappers" ]
public_deps += [ "//third_party/webrtc/common_video" ]
libs += [
"CoreFoundation.framework",
"CoreMedia.framework",
"Foundation.framework",
+ "IOSurface.framework",
"QuartzCore.framework",
"VideoToolbox.framework",
]
@@ -144,8 +148,13 @@ component("gpu") {
if (is_android) {
sources += [
+ "android/android_image_reader_abi.h",
+ "android/android_image_reader_compat.cc",
+ "android/android_image_reader_compat.h",
"android/android_video_decode_accelerator.cc",
"android/android_video_decode_accelerator.h",
+ "android/android_video_encode_accelerator.cc",
+ "android/android_video_encode_accelerator.h",
"android/android_video_surface_chooser.h",
"android/android_video_surface_chooser_impl.cc",
"android/android_video_surface_chooser_impl.h",
@@ -166,9 +175,6 @@ component("gpu") {
"android/codec_image_group.h",
"android/codec_wrapper.cc",
"android/codec_wrapper.h",
- "android/command_buffer_stub_wrapper.h",
- "android/command_buffer_stub_wrapper_impl.cc",
- "android/command_buffer_stub_wrapper_impl.h",
"android/content_video_view_overlay.cc",
"android/content_video_view_overlay.h",
"android/content_video_view_overlay_allocator.cc",
@@ -184,6 +190,8 @@ component("gpu") {
"android/surface_chooser_helper.h",
"android/surface_texture_gl_owner.cc",
"android/surface_texture_gl_owner.h",
+ "android/texture_owner.cc",
+ "android/texture_owner.h",
"android/texture_pool.cc",
"android/texture_pool.h",
"android/texture_wrapper.cc",
@@ -196,14 +204,8 @@ component("gpu") {
# TODO(crbug.com/789435): This can be removed once CdmManager is removed.
"//media/mojo:buildflags",
"//services/service_manager/public/cpp:cpp",
+ "//third_party/libyuv",
]
- if (enable_webrtc) {
- deps += [ "//third_party/libyuv" ]
- sources += [
- "android/android_video_encode_accelerator.cc",
- "android/android_video_encode_accelerator.h",
- ]
- }
# TODO(crbug.com/789435): This is needed for AVDA to access the CDM
# directly. Remove this dependency after VDAs are also running as part of
@@ -304,6 +306,8 @@ component("gpu") {
sources += [
"windows/d3d11_cdm_proxy.cc",
"windows/d3d11_cdm_proxy.h",
+ "windows/d3d11_decryptor.cc",
+ "windows/d3d11_decryptor.h",
]
}
}
@@ -313,6 +317,7 @@ source_set("common") {
defines = [ "MEDIA_GPU_IMPLEMENTATION" ]
sources = [
"accelerated_video_decoder.h",
+ "codec_picture.cc",
"codec_picture.h",
"format_utils.cc",
"format_utils.h",
@@ -330,6 +335,8 @@ source_set("common") {
"vp8_decoder.h",
"vp8_picture.cc",
"vp8_picture.h",
+ "vp8_reference_frame_vector.cc",
+ "vp8_reference_frame_vector.h",
"vp9_decoder.cc",
"vp9_decoder.h",
"vp9_picture.cc",
@@ -350,7 +357,7 @@ source_set("common") {
if (is_win || is_android || use_v4l2_codec || use_vaapi) {
test("video_decode_accelerator_unittest") {
sources = [
- "video_accelerator_unittest_helpers.h",
+ "test/video_accelerator_unittest_helpers.h",
]
data = [
@@ -378,8 +385,10 @@ if (is_win || is_android || use_v4l2_codec || use_vaapi) {
if (is_win || is_chromeos || use_v4l2_codec) {
sources += [
- "rendering_helper.cc",
- "rendering_helper.h",
+ "test/rendering_helper.cc",
+ "test/rendering_helper.h",
+ "test/video_decode_accelerator_unittest_helpers.cc",
+ "test/video_decode_accelerator_unittest_helpers.h",
"video_decode_accelerator_unittest.cc",
]
deps += [
@@ -422,6 +431,7 @@ source_set("android_video_decode_accelerator_unittests") {
if (is_android) {
testonly = true
sources = [
+ "android/android_image_reader_compat_unittest.cc",
"android/android_video_decode_accelerator_unittest.cc",
"android/android_video_surface_chooser_impl_unittest.cc",
"android/avda_codec_allocator_unittest.cc",
@@ -434,14 +444,12 @@ source_set("android_video_decode_accelerator_unittests") {
"android/media_codec_video_decoder_unittest.cc",
"android/mock_android_video_surface_chooser.cc",
"android/mock_android_video_surface_chooser.h",
- "android/mock_command_buffer_stub_wrapper.cc",
- "android/mock_command_buffer_stub_wrapper.h",
"android/mock_device_info.cc",
"android/mock_device_info.h",
"android/mock_promotion_hint_aggregator.cc",
"android/mock_promotion_hint_aggregator.h",
- "android/mock_surface_texture_gl_owner.cc",
- "android/mock_surface_texture_gl_owner.h",
+ "android/mock_texture_owner.cc",
+ "android/mock_texture_owner.h",
"android/promotion_hint_aggregator_impl_unittest.cc",
"android/surface_chooser_helper_unittest.cc",
"android/surface_texture_gl_owner_unittest.cc",
@@ -467,6 +475,7 @@ if (use_v4l2_codec || use_vaapi || is_mac || is_win) {
test("video_encode_accelerator_unittest") {
deps = [
"//base",
+ "//base/test:test_support",
"//media:test_support",
"//media/gpu",
"//testing/gtest",
@@ -479,7 +488,7 @@ if (use_v4l2_codec || use_vaapi || is_mac || is_win) {
]
configs += [ "//third_party/libyuv:libyuv_config" ]
sources = [
- "video_accelerator_unittest_helpers.h",
+ "test/video_accelerator_unittest_helpers.h",
"video_encode_accelerator_unittest.cc",
]
if (use_x11) {
@@ -498,7 +507,6 @@ if (use_vaapi) {
"//base/test:test_support",
"//media:test_support",
"//media/gpu",
- "//media/gpu/ipc/service",
"//testing/gtest",
"//third_party:jpeg",
"//third_party/libyuv",
@@ -512,7 +520,7 @@ if (use_vaapi) {
configs += [ "//third_party/libyuv:libyuv_config" ]
sources = [
"jpeg_encode_accelerator_unittest.cc",
- "video_accelerator_unittest_helpers.h",
+ "test/video_accelerator_unittest_helpers.h",
]
if (use_x11) {
deps += [ "//ui/gfx/x" ]
@@ -529,7 +537,6 @@ if (use_v4l2_codec || use_vaapi) {
"//base",
"//media:test_support",
"//media/gpu",
- "//media/gpu/ipc/service",
"//media/mojo/services",
"//testing/gtest",
"//third_party/libyuv",
@@ -543,7 +550,7 @@ if (use_v4l2_codec || use_vaapi) {
configs += [ "//third_party/libyuv:libyuv_config" ]
sources = [
"jpeg_decode_accelerator_unittest.cc",
- "video_accelerator_unittest_helpers.h",
+ "test/video_accelerator_unittest_helpers.h",
]
if (use_x11) {
deps += [ "//ui/gfx/x" ]
@@ -554,12 +561,31 @@ if (use_v4l2_codec || use_vaapi) {
}
}
+static_library("test_support") {
+ visibility = [ "//media:test_support" ]
+ testonly = true
+ sources = [
+ "fake_command_buffer_helper.cc",
+ "fake_command_buffer_helper.h",
+ ]
+ configs += [ "//media:media_config" ]
+ deps = [
+ ":gpu",
+ ]
+ public_deps = [
+ "//base",
+ "//media",
+ ]
+}
+
source_set("unit_tests") {
testonly = true
deps = [
"//base",
+ "//base/test:test_support",
"//media:test_support",
"//media/gpu",
+ "//media/gpu/ipc/service:unit_tests",
"//testing/gmock",
"//testing/gtest",
]
@@ -575,6 +601,7 @@ source_set("unit_tests") {
"windows/d3d11_cdm_proxy_unittest.cc",
"windows/d3d11_mocks.cc",
"windows/d3d11_mocks.h",
+ "windows/d3d11_video_decoder_unittest.cc",
]
libs = [ "dxguid.lib" ]
}
diff --git a/chromium/media/gpu/DEPS b/chromium/media/gpu/DEPS
index f0a927f64d7..41a66a57415 100644
--- a/chromium/media/gpu/DEPS
+++ b/chromium/media/gpu/DEPS
@@ -5,10 +5,9 @@ include_rules = [
"+third_party/libyuv",
"+third_party/v4l-utils",
"+third_party/webrtc/common_video",
- "+third_party/webrtc/system_wrappers",
"+ui/base",
"+ui/display/display_switches.h",
- "+ui/display/manager/chromeos",
+ "+ui/display/manager",
"+ui/display/types",
"+ui/platform_window",
diff --git a/chromium/media/gpu/accelerated_video_decoder.h b/chromium/media/gpu/accelerated_video_decoder.h
index d2c08c2c4a2..67800c501ab 100644
--- a/chromium/media/gpu/accelerated_video_decoder.h
+++ b/chromium/media/gpu/accelerated_video_decoder.h
@@ -9,6 +9,7 @@
#include <stdint.h>
#include "base/macros.h"
+#include "media/base/decrypt_config.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gfx/geometry/size.h"
@@ -25,8 +26,13 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
// Set the buffer at |ptr| of |size| bytes as the current source of encoded
// stream data. Pictures produced as a result of this call should be assigned
- // the passed stream |id|.
- virtual void SetStream(int32_t id, const uint8_t* ptr, size_t size) = 0;
+ // the passed stream |id|. |decrypt_config| may specify the decryption
+ // configuration of the specified buffer, and in that case, Decode() may
+ // return kNoKey.
+ virtual void SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config = nullptr) = 0;
// Have the decoder flush its state and trigger output of all previously
// decoded surfaces. Return false on failure.
@@ -49,6 +55,8 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
kRanOutOfSurfaces, // Waiting for the client to free up output surfaces.
kNeedContextUpdate, // Waiting for the client to update decoding context
// with data acquired from the accelerator.
+ kNoKey, // The buffer is encrypted and could not be processed because the
+ // key for decryption is missing.
};
// Try to decode more of the stream, returning decoded frames asynchronously.
diff --git a/chromium/media/gpu/android/android_image_reader_abi.h b/chromium/media/gpu/android/android_image_reader_abi.h
new file mode 100644
index 00000000000..02e1d3871eb
--- /dev/null
+++ b/chromium/media/gpu/android/android_image_reader_abi.h
@@ -0,0 +1,95 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
+#define MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
+
+// Minimal binary interface definitions for AImage,AImageReader
+// and ANativeWindow based on include/media/NdkImage.h,
+// include/media/NdkImageReader.h and include/android/native_window_jni.h
+// from the Android NDK for platform level 26+. This is only
+// intended for use from the AndroidImageReader wrapper for building
+// without NDK platform level support, it is not a general-use header
+// and is not complete. Only the functions/data types which
+// are currently needed by media/gpu/android/image_reader_gl_owner.h are
+// included in this ABI
+//
+// Please refer to the API documentation for details:
+// https://developer.android.com/ndk/reference/group/media (AIMage and
+// AImageReader)
+// https://developer.android.com/ndk/reference/group/native-activity
+// (ANativeWindow)
+
+#include <android/native_window.h>
+#include <media/NdkMediaError.h>
+
+#include <jni.h>
+#include <stdint.h>
+
+// Use "C" linkage to match the original header file. This isn't strictly
+// required since the file is not declaring global functions, but the types
+// should remain in the global namespace for compatibility, and it's a reminder
+// that forward declarations elsewhere should use "extern "C" to avoid
+// namespace issues.
+extern "C" {
+
+// For AImage
+typedef struct AHardwareBuffer AHardwareBuffer;
+
+typedef struct AImage AImage;
+
+enum AIMAGE_FORMATS {
+ AIMAGE_FORMAT_YUV_420_888 = 0x23,
+ IMAGE_FORMAT_PRIVATE = 0x22
+};
+
+using pAImage_delete = void (*)(AImage* image);
+
+using pAImage_getHardwareBuffer = media_status_t (*)(const AImage* image,
+ AHardwareBuffer** buffer);
+
+using pAImage_getWidth = media_status_t (*)(const AImage* image,
+ int32_t* width);
+
+using pAImage_getHeight = media_status_t (*)(const AImage* image,
+ int32_t* height);
+
+// For AImageReader
+
+typedef struct AImageReader AImageReader;
+
+typedef void (*AImageReader_ImageCallback)(void* context, AImageReader* reader);
+
+typedef struct AImageReader_ImageListener {
+ void* context;
+ AImageReader_ImageCallback onImageAvailable;
+} AImageReader_ImageListener;
+
+using pAImageReader_new = media_status_t (*)(int32_t width,
+ int32_t height,
+ int32_t format,
+ int32_t maxImages,
+ AImageReader** reader);
+
+using pAImageReader_setImageListener =
+ media_status_t (*)(AImageReader* reader,
+ AImageReader_ImageListener* listener);
+
+using pAImageReader_delete = void (*)(AImageReader* reader);
+
+using pAImageReader_getWindow = media_status_t (*)(AImageReader* reader,
+ ANativeWindow** window);
+
+using pAImageReader_acquireLatestImageAsync =
+ media_status_t (*)(AImageReader* reader,
+ AImage** image,
+ int* acquireFenceFd);
+
+// For ANativeWindow
+using pANativeWindow_toSurface = jobject (*)(JNIEnv* env,
+ ANativeWindow* window);
+
+} // extern "C"
+
+#endif // MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_ABI_H_
diff --git a/chromium/media/gpu/android/android_image_reader_compat.cc b/chromium/media/gpu/android/android_image_reader_compat.cc
new file mode 100644
index 00000000000..19230ed84db
--- /dev/null
+++ b/chromium/media/gpu/android/android_image_reader_compat.cc
@@ -0,0 +1,138 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/android_image_reader_compat.h"
+
+#include <dlfcn.h>
+
+#include "base/android/build_info.h"
+#include "base/feature_list.h"
+#include "base/logging.h"
+#include "media/base/media_switches.h"
+
+#define LOAD_FUNCTION(lib, func) \
+ do { \
+ func##_ = reinterpret_cast<p##func>(dlsym(lib, #func)); \
+ if (!func##_) { \
+ DLOG(ERROR) << "Unable to load function " << #func; \
+ return false; \
+ } \
+ } while (0)
+
+namespace media {
+
+AndroidImageReader& AndroidImageReader::GetInstance() {
+ // C++11 static local variable initialization is
+ // thread-safe.
+ static base::NoDestructor<AndroidImageReader> instance;
+ return *instance;
+}
+
+bool AndroidImageReader::IsSupported() {
+ return is_supported_;
+}
+
+AndroidImageReader::AndroidImageReader() {
+ is_supported_ =
+ base::FeatureList::IsEnabled(media::kAImageReaderVideoOutput) &&
+ LoadFunctions();
+}
+
+bool AndroidImageReader::LoadFunctions() {
+ // If the Chromium build requires __ANDROID_API__ >= 26 at some
+ // point in the future, we could directly use the global functions instead of
+ // dynamic loading. However, since this would be incompatible with pre-Oreo
+ // devices, this is unlikely to happen in the foreseeable future, so we use
+ // dynamic loading.
+
+ // Functions are not present for android version older than OREO
+ if (base::android::BuildInfo::GetInstance()->sdk_int() <
+ base::android::SDK_VERSION_OREO) {
+ return false;
+ }
+
+ void* libmediandk = dlopen("libmediandk.so", RTLD_NOW);
+ if (libmediandk == nullptr) {
+ LOG(ERROR) << "Couldnt open libmediandk.so";
+ return false;
+ }
+
+ LOAD_FUNCTION(libmediandk, AImage_delete);
+ LOAD_FUNCTION(libmediandk, AImage_getHardwareBuffer);
+ LOAD_FUNCTION(libmediandk, AImage_getWidth);
+ LOAD_FUNCTION(libmediandk, AImage_getHeight);
+ LOAD_FUNCTION(libmediandk, AImageReader_new);
+ LOAD_FUNCTION(libmediandk, AImageReader_setImageListener);
+ LOAD_FUNCTION(libmediandk, AImageReader_delete);
+ LOAD_FUNCTION(libmediandk, AImageReader_getWindow);
+ LOAD_FUNCTION(libmediandk, AImageReader_acquireLatestImageAsync);
+
+ void* libandroid = dlopen("libandroid.so", RTLD_NOW);
+ if (libandroid == nullptr) {
+ LOG(ERROR) << "Couldnt open libandroid.so";
+ return false;
+ }
+
+ LOAD_FUNCTION(libandroid, ANativeWindow_toSurface);
+
+ return true;
+}
+
+void AndroidImageReader::AImage_delete(AImage* image) {
+ AImage_delete_(image);
+}
+
+media_status_t AndroidImageReader::AImage_getHardwareBuffer(
+ const AImage* image,
+ AHardwareBuffer** buffer) {
+ return AImage_getHardwareBuffer_(image, buffer);
+}
+
+media_status_t AndroidImageReader::AImage_getWidth(const AImage* image,
+ int32_t* width) {
+ return AImage_getWidth_(image, width);
+}
+
+media_status_t AndroidImageReader::AImage_getHeight(const AImage* image,
+ int32_t* height) {
+ return AImage_getHeight_(image, height);
+}
+
+media_status_t AndroidImageReader::AImageReader_new(int32_t width,
+ int32_t height,
+ int32_t format,
+ int32_t maxImages,
+ AImageReader** reader) {
+ return AImageReader_new_(width, height, format, maxImages, reader);
+}
+
+media_status_t AndroidImageReader::AImageReader_setImageListener(
+ AImageReader* reader,
+ AImageReader_ImageListener* listener) {
+ return AImageReader_setImageListener_(reader, listener);
+}
+
+void AndroidImageReader::AImageReader_delete(AImageReader* reader) {
+ AImageReader_delete_(reader);
+}
+
+media_status_t AndroidImageReader::AImageReader_getWindow(
+ AImageReader* reader,
+ ANativeWindow** window) {
+ return AImageReader_getWindow_(reader, window);
+}
+
+media_status_t AndroidImageReader::AImageReader_acquireLatestImageAsync(
+ AImageReader* reader,
+ AImage** image,
+ int* acquireFenceFd) {
+ return AImageReader_acquireLatestImageAsync_(reader, image, acquireFenceFd);
+}
+
+jobject AndroidImageReader::ANativeWindow_toSurface(JNIEnv* env,
+ ANativeWindow* window) {
+ return ANativeWindow_toSurface_(env, window);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/android/android_image_reader_compat.h b/chromium/media/gpu/android/android_image_reader_compat.h
new file mode 100644
index 00000000000..4dd2ccdaae2
--- /dev/null
+++ b/chromium/media/gpu/android/android_image_reader_compat.h
@@ -0,0 +1,75 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
+#define MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
+
+#include "base/macros.h"
+#include "base/no_destructor.h"
+#include "media/gpu/android/android_image_reader_abi.h"
+#include "media/gpu/media_gpu_export.h"
+
+namespace media {
+
+// This class provides runtime support for working with AImage, AImageReader and
+// ANativeWindow objects on Android O systems without requiring building for the
+// Android O NDK level. Don't call GetInstance() unless IsSupported() returns
+// true.
+class MEDIA_GPU_EXPORT AndroidImageReader {
+ public:
+ // Thread safe GetInstance.
+ static AndroidImageReader& GetInstance();
+
+ // Check if the image reader usage is supported. This function returns TRUE
+ // if android version is >=OREO, the media flag is enabled and all the
+ // required functions are loaded.
+ bool IsSupported();
+
+ // Naming convention of all the below functions are chosen to exactly match
+ // the function names in the NDK.
+ void AImage_delete(AImage* image);
+ media_status_t AImage_getHardwareBuffer(const AImage* image,
+ AHardwareBuffer** buffer);
+ media_status_t AImage_getWidth(const AImage* image, int32_t* width);
+ media_status_t AImage_getHeight(const AImage* image, int32_t* height);
+ media_status_t AImageReader_new(int32_t width,
+ int32_t height,
+ int32_t format,
+ int32_t maxImages,
+ AImageReader** reader);
+ media_status_t AImageReader_setImageListener(
+ AImageReader* reader,
+ AImageReader_ImageListener* listener);
+ void AImageReader_delete(AImageReader* reader);
+ media_status_t AImageReader_getWindow(AImageReader* reader,
+ ANativeWindow** window);
+ media_status_t AImageReader_acquireLatestImageAsync(AImageReader* reader,
+ AImage** image,
+ int* acquireFenceFd);
+ jobject ANativeWindow_toSurface(JNIEnv* env, ANativeWindow* window);
+
+ private:
+ friend class base::NoDestructor<AndroidImageReader>;
+
+ AndroidImageReader();
+ bool LoadFunctions();
+
+ bool is_supported_;
+ pAImage_delete AImage_delete_;
+ pAImage_getHardwareBuffer AImage_getHardwareBuffer_;
+ pAImage_getWidth AImage_getWidth_;
+ pAImage_getHeight AImage_getHeight_;
+ pAImageReader_new AImageReader_new_;
+ pAImageReader_setImageListener AImageReader_setImageListener_;
+ pAImageReader_delete AImageReader_delete_;
+ pAImageReader_getWindow AImageReader_getWindow_;
+ pAImageReader_acquireLatestImageAsync AImageReader_acquireLatestImageAsync_;
+ pANativeWindow_toSurface ANativeWindow_toSurface_;
+
+ DISALLOW_COPY_AND_ASSIGN(AndroidImageReader);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_ANDROID_ANDROID_IMAGE_READER_COMPAT_H_
diff --git a/chromium/media/gpu/android/android_image_reader_compat_unittest.cc b/chromium/media/gpu/android/android_image_reader_compat_unittest.cc
new file mode 100644
index 00000000000..622225519a9
--- /dev/null
+++ b/chromium/media/gpu/android/android_image_reader_compat_unittest.cc
@@ -0,0 +1,47 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/android_image_reader_compat.h"
+
+#include <stdint.h>
+#include <memory>
+
+#include "base/android/build_info.h"
+#include "base/test/scoped_feature_list.h"
+#include "media/base/media_switches.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class AndroidImageReaderTest : public testing::Test {
+ public:
+ AndroidImageReaderTest() {
+ scoped_feature_list_.InitAndEnableFeature(media::kAImageReaderVideoOutput);
+ }
+ ~AndroidImageReaderTest() override = default;
+
+ private:
+ base::test::ScopedFeatureList scoped_feature_list_;
+};
+
+// Getting instance of AndroidImageReader will invoke AndroidImageReader
+// constructor which will dlopen the mediandk and androidndk .so files and do
+// all the required symbol lookups.
+TEST_F(AndroidImageReaderTest, GetImageReaderInstance) {
+ // It is expected that image reader support will be available from android
+ // version OREO.
+ EXPECT_EQ(AndroidImageReader::GetInstance().IsSupported(),
+ base::android::BuildInfo::GetInstance()->sdk_int() >=
+ base::android::SDK_VERSION_OREO);
+}
+
+// There should be only 1 instance of AndroidImageReader im memory. Hence 2
+// instances should have same memory address.
+TEST_F(AndroidImageReaderTest, CompareImageReaderInstance) {
+ AndroidImageReader& a1 = AndroidImageReader::GetInstance();
+ AndroidImageReader& a2 = AndroidImageReader::GetInstance();
+ ASSERT_EQ(&a1, &a2);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator.cc b/chromium/media/gpu/android/android_video_decode_accelerator.cc
index 4a66299d9fd..b9624012ad6 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator.cc
+++ b/chromium/media/gpu/android/android_video_decode_accelerator.cc
@@ -16,7 +16,6 @@
#include "base/command_line.h"
#include "base/containers/queue.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram_macros.h"
#include "base/sys_info.h"
#include "base/task_runner_util.h"
@@ -34,6 +33,7 @@
#include "media/base/limits.h"
#include "media/base/media.h"
#include "media/base/media_switches.h"
+#include "media/base/media_util.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_decoder_config.h"
#include "media/gpu/android/android_video_surface_chooser_impl.h"
@@ -42,6 +42,7 @@
#include "media/gpu/android/device_info.h"
#include "media/gpu/android/promotion_hint_aggregator_impl.h"
#include "media/gpu/shared_memory_region.h"
+#include "media/media_buildflags.h"
#include "media/mojo/buildflags.h"
#include "media/video/picture.h"
#include "services/service_manager/public/cpp/service_context_ref.h"
@@ -73,6 +74,7 @@ enum { kMaxBitstreamsNotifiedInAdvance = 32 };
// support others. Advertise support for all H264 profiles and let the
// MediaCodec fail when decoding if it's not actually supported. It's assumed
// that consumers won't have software fallback for H264 on Android anyway.
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
constexpr VideoCodecProfile kSupportedH264Profiles[] = {
H264PROFILE_BASELINE,
H264PROFILE_MAIN,
@@ -90,6 +92,7 @@ constexpr VideoCodecProfile kSupportedH264Profiles[] = {
constexpr VideoCodecProfile kSupportedHevcProfiles[] = {HEVCPROFILE_MAIN,
HEVCPROFILE_MAIN10};
#endif
+#endif
// Because MediaCodec is thread-hostile (must be poked on a single thread) and
// has no callback mechanism (b/11990118), we must drive it by polling for
@@ -129,6 +132,32 @@ bool ShouldDeferSurfaceCreation(AVDACodecAllocator* codec_allocator,
device_info->SdkVersion() <= base::android::SDK_VERSION_JELLY_BEAN_MR2;
}
+bool HasValidCdm(int cdm_id) {
+#if !BUILDFLAG(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS)
+ return false;
+#else
+ auto cdm = CdmManager::GetInstance()->GetCdm(cdm_id);
+ if (!cdm) {
+ // This could happen during the destruction of the media element and the CDM
+ // and due to IPC CDM could be destroyed before the decoder.
+ DVLOG(1) << "CDM not available.";
+ return false;
+ }
+
+ auto* cdm_context = cdm->GetCdmContext();
+ auto* media_crypto_context =
+ cdm_context ? cdm_context->GetMediaCryptoContext() : nullptr;
+ // This could happen if the CDM is not MediaDrmBridge, which could happen in
+ // test cases.
+ if (!media_crypto_context) {
+ DVLOG(1) << "MediaCryptoContext not available.";
+ return false;
+ }
+
+ return true;
+#endif
+}
+
} // namespace
// AVDAManager manages a RepeatingTimer so that AVDAs can get a regular callback
@@ -256,6 +285,7 @@ AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
base::FeatureList::IsEnabled(media::kUseAndroidOverlayAggressively)),
device_info_(device_info),
force_defer_surface_creation_for_testing_(false),
+ force_allow_software_decoding_for_testing_(false),
overlay_factory_cb_(overlay_factory_cb),
weak_this_factory_(this) {}
@@ -268,13 +298,12 @@ AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
if (!media_crypto_context_)
return;
- DCHECK(cdm_registration_id_);
-
// Cancel previously registered callback (if any).
media_crypto_context_->SetMediaCryptoReadyCB(
MediaCryptoContext::MediaCryptoReadyCB());
- media_crypto_context_->UnregisterPlayer(cdm_registration_id_);
+ if (cdm_registration_id_)
+ media_crypto_context_->UnregisterPlayer(cdm_registration_id_);
#endif // BUILDFLAG(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS)
}
@@ -304,23 +333,29 @@ bool AndroidVideoDecodeAccelerator::Initialize(const Config& config,
codec_config_->initial_expected_coded_size =
config.initial_expected_coded_size;
- if (codec_config_->codec != kCodecVP8 && codec_config_->codec != kCodecVP9 &&
+ switch (codec_config_->codec) {
+ case kCodecVP8:
+ case kCodecVP9:
+ break;
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ case kCodecH264:
+ codec_config_->csd0 = config.sps;
+ codec_config_->csd1 = config.pps;
+ break;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- codec_config_->codec != kCodecHEVC &&
+ case kCodecHEVC:
+ break;
#endif
- codec_config_->codec != kCodecH264) {
- DLOG(ERROR) << "Unsupported profile: " << GetProfileName(config.profile);
- return false;
+#endif
+ default:
+ DLOG(ERROR) << "Unsupported profile: " << GetProfileName(config.profile);
+ return false;
}
codec_config_->software_codec_forbidden =
IsMediaCodecSoftwareDecodingForbidden();
- if (codec_config_->codec == kCodecH264) {
- codec_config_->csd0 = config.sps;
- codec_config_->csd1 = config.pps;
- }
-
codec_config_->container_color_space = config.container_color_space;
codec_config_->hdr_metadata = config.hdr_metadata;
@@ -361,19 +396,20 @@ bool AndroidVideoDecodeAccelerator::Initialize(const Config& config,
codec_allocator_->StartThread(this);
- // For encrypted media, start by initializing the CDM. Otherwise, start with
- // the surface.
- if (config_.is_encrypted()) {
- if (!deferred_initialization_pending_) {
- DLOG(ERROR)
- << "Deferred initialization must be used for encrypted streams";
- return false;
- }
+ // If has valid CDM, start by initializing the CDM, even for clear stream.
+ if (HasValidCdm(config_.cdm_id) && deferred_initialization_pending_) {
InitializeCdm();
- } else {
- StartSurfaceChooser();
+ return state_ != ERROR;
+ }
+
+ // Cannot handle encrypted stream without valid CDM.
+ if (config_.is_encrypted()) {
+ DLOG(ERROR) << "Deferred initialization must be used for encrypted streams";
+ return false;
}
+ StartSurfaceChooser();
+
// Fail / complete / defer initialization.
return state_ != ERROR;
}
@@ -397,7 +433,7 @@ void AndroidVideoDecodeAccelerator::StartSurfaceChooser() {
base::Bind(&AndroidVideoDecodeAccelerator::OnSurfaceTransition,
weak_this_factory_.GetWeakPtr(), nullptr));
- // Handle the sync path, which must use SurfaceTexture anyway. Note that we
+ // Handle the sync path, which must use TextureOwner anyway. Note that we
// check both |during_initialize_| and |deferred_initialization_pending_|,
// since we might get here during deferred surface creation. In that case,
// Decode will call us (after clearing |defer_surface_creation_|), but
@@ -415,7 +451,7 @@ void AndroidVideoDecodeAccelerator::StartSurfaceChooser() {
DCHECK(!config_.overlay_info.HasValidSurfaceId());
DCHECK(!config_.overlay_info.HasValidRoutingToken());
// Note that we might still send feedback to |surface_chooser_|, which might
- // call us back. However, it will only ever tell us to use SurfaceTexture,
+ // call us back. However, it will only ever tell us to use TextureOwner,
// since we have no overlay factory anyway.
OnSurfaceTransition(nullptr);
return;
@@ -437,7 +473,7 @@ void AndroidVideoDecodeAccelerator::StartSurfaceChooser() {
// Notify |surface_chooser_| that we've started. This guarantees that we'll
// get a callback. It might not be a synchronous callback, but we're not in
// the synchronous case. It will be soon, though. For pre-M, we rely on the
- // fact that |surface_chooser_| won't tell us to use a SurfaceTexture while
+ // fact that |surface_chooser_| won't tell us to use a TextureOwner while
// waiting for an overlay to become ready, for example.
surface_chooser_helper_.UpdateChooserState(std::move(factory));
}
@@ -467,9 +503,9 @@ void AndroidVideoDecodeAccelerator::OnSurfaceTransition(
if (!device_info_->IsSetOutputSurfaceSupported())
return;
- // If we're using a SurfaceTexture and are told to switch to one, then just
+ // If we're using a TextureOwner and are told to switch to one, then just
// do nothing. |surface_chooser_| doesn't really know if we've switched to
- // SurfaceTexture or not. Note that it can't ask us to switch to the same
+ // TextureOwner or not. Note that it can't ask us to switch to the same
// overlay we're using, since it's unique_ptr.
if (!overlay && codec_config_->surface_bundle &&
!codec_config_->surface_bundle->overlay) {
@@ -496,10 +532,10 @@ void AndroidVideoDecodeAccelerator::InitializePictureBufferManager() {
// incoming bundle properly, since we don't want to accidentally overwrite
// |surface_bundle| for a codec that's being released elsewhere.
// TODO(liberato): it doesn't make sense anymore for the PictureBufferManager
- // to create the surface texture. We can probably make an overlay impl out
- // of it, and provide the surface texture to |picture_buffer_manager_|.
+ // to create the texture owner. We can probably make an overlay impl out
+ // of it, and provide the texture owner to |picture_buffer_manager_|.
if (!picture_buffer_manager_.Initialize(incoming_bundle_)) {
- NOTIFY_ERROR(PLATFORM_FAILURE, "Could not allocate surface texture");
+ NOTIFY_ERROR(PLATFORM_FAILURE, "Could not allocate texture owner");
incoming_bundle_ = nullptr;
return;
}
@@ -660,9 +696,10 @@ bool AndroidVideoDecodeAccelerator::QueueInput() {
bitstream_buffer.size(),
presentation_timestamp);
} else {
+ // VDAs only support "cenc" encryption scheme.
status = media_codec_->QueueSecureInputBuffer(
input_buf_index, memory, bitstream_buffer.size(), key_id, iv,
- subsamples, config_.encryption_scheme, presentation_timestamp);
+ subsamples, AesCtrEncryptionScheme(), presentation_timestamp);
}
DVLOG(2) << __func__
@@ -928,7 +965,7 @@ void AndroidVideoDecodeAccelerator::SendDecodedFrameToClient(
if (want_promotion_hint) {
picture.set_wants_promotion_hint(true);
// This will prevent it from actually being promoted if it shouldn't be.
- picture.set_surface_texture(!allow_overlay);
+ picture.set_texture_owner(!allow_overlay);
}
// Notify picture ready before calling UseCodecBufferForPictureBuffer() since
@@ -1371,7 +1408,7 @@ void AndroidVideoDecodeAccelerator::OnStopUsingOverlayImmediately(
// We cannot get here if we're before surface allocation, since we transition
// to WAITING_FOR_CODEC (or NO_ERROR, if sync) when we get the surface without
// posting. If we do ever lose the surface before starting codec allocation,
- // then we could just update the config to use a SurfaceTexture and return
+ // then we could just update the config to use a TextureOwner and return
// without changing state.
DCHECK_NE(state_, BEFORE_OVERLAY_INIT);
@@ -1394,7 +1431,7 @@ void AndroidVideoDecodeAccelerator::OnStopUsingOverlayImmediately(
// overlay that was destroyed.
if (state_ == WAITING_FOR_CODEC) {
// What we should do here is to set |incoming_overlay_| to nullptr, to start
- // a transistion to SurfaceTexture. OnCodecConfigured could notice that
+ // a transistion to TextureOwner. OnCodecConfigured could notice that
// there's an incoming overlay, and then immediately transition the codec /
// drop and re-allocate the codec using it. However, for CVV, that won't
// work, since CVV-based overlays block the main thread waiting for the
@@ -1418,16 +1455,16 @@ void AndroidVideoDecodeAccelerator::OnStopUsingOverlayImmediately(
picture_buffer_manager_.ReleaseCodecBuffers(output_picture_buffers_);
// If we aren't transitioning to some other surface, then transition to a
- // SurfaceTexture. Remember that, if |incoming_overlay_| is an overlay,
+ // TextureOwner. Remember that, if |incoming_overlay_| is an overlay,
// then it's already ready and can be transitioned to immediately. We were
// just waiting for codec buffers to come back, but we just dropped them.
// Note that we want |incoming_overlay_| to has_value(), but that value
- // should be a nullptr to indicate that we should switch to SurfaceTexture.
+ // should be a nullptr to indicate that we should switch to TextureOwner.
if (!incoming_overlay_)
incoming_overlay_ = std::unique_ptr<AndroidOverlay>();
UpdateSurface();
- // Switching to a SurfaceTexture should never need to wait. If it does,
+ // Switching to a TextureOwner should never need to wait. If it does,
// then the codec might still be using the destroyed surface, which is bad.
return;
}
@@ -1449,42 +1486,22 @@ void AndroidVideoDecodeAccelerator::InitializeCdm() {
DVLOG(2) << __func__ << ": " << config_.cdm_id;
#if !BUILDFLAG(ENABLE_MOJO_MEDIA_IN_GPU_PROCESS)
- NOTIMPLEMENTED();
- NOTIFY_ERROR(PLATFORM_FAILURE, "Cdm support needs mojo in the gpu process");
- return;
+ NOTREACHED();
#else
// Store the CDM to hold a reference to it.
cdm_for_reference_holding_only_ =
CdmManager::GetInstance()->GetCdm(config_.cdm_id);
- if (!cdm_for_reference_holding_only_) {
- // This could happen during the destruction of the media element and the CDM
- // and due to IPC CDM could be destroyed before the decoder.
- NOTIFY_ERROR(PLATFORM_FAILURE, "CDM not available.");
- return;
- }
-
- auto* cdm_context = cdm_for_reference_holding_only_->GetCdmContext();
- media_crypto_context_ =
- cdm_context ? cdm_context->GetMediaCryptoContext() : nullptr;
- if (!media_crypto_context_) {
- NOTIFY_ERROR(PLATFORM_FAILURE, "MediaCryptoContext not available.");
- return;
- }
- // Register CDM callbacks. The callbacks registered will be posted back to
- // this thread via BindToCurrentLoop.
+ // We can DCHECK here and below because we checked HasValidCdm() before
+ // calling InitializeCdm(), and the status shouldn't have changed since then.
+ DCHECK(cdm_for_reference_holding_only_) << "CDM not available";
- // Since |this| holds a reference to the |cdm_|, by the time the CDM is
- // destructed, UnregisterPlayer() must have been called and |this| has been
- // destructed as well. So the |cdm_unset_cb| will never have a chance to be
- // called.
- // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms.
- cdm_registration_id_ = media_crypto_context_->RegisterPlayer(
- BindToCurrentLoop(base::Bind(&AndroidVideoDecodeAccelerator::OnKeyAdded,
- weak_this_factory_.GetWeakPtr())),
- base::DoNothing());
+ media_crypto_context_ =
+ cdm_for_reference_holding_only_->GetCdmContext()->GetMediaCryptoContext();
+ DCHECK(media_crypto_context_) << "MediaCryptoContext not available.";
- // Deferred initialization will continue in OnMediaCryptoReady().
+ // Deferred initialization will continue in OnMediaCryptoReady(). The callback
+ // registered will be posted back to this thread via BindToCurrentLoop.
media_crypto_context_->SetMediaCryptoReadyCB(BindToCurrentLoop(
base::Bind(&AndroidVideoDecodeAccelerator::OnMediaCryptoReady,
weak_this_factory_.GetWeakPtr())));
@@ -1495,14 +1512,25 @@ void AndroidVideoDecodeAccelerator::OnMediaCryptoReady(
JavaObjectPtr media_crypto,
bool requires_secure_video_codec) {
DVLOG(1) << __func__;
-
DCHECK(media_crypto);
if (media_crypto->is_null()) {
- LOG(ERROR) << "MediaCrypto is not available, can't play encrypted stream.";
- cdm_for_reference_holding_only_ = nullptr;
+ media_crypto_context_->SetMediaCryptoReadyCB(
+ MediaCryptoContext::MediaCryptoReadyCB());
media_crypto_context_ = nullptr;
- NOTIFY_ERROR(PLATFORM_FAILURE, "MediaCrypto is not available");
+ cdm_for_reference_holding_only_ = nullptr;
+
+ if (config_.is_encrypted()) {
+ LOG(ERROR)
+ << "MediaCrypto is not available, can't play encrypted stream.";
+ NOTIFY_ERROR(PLATFORM_FAILURE, "MediaCrypto is not available");
+ return;
+ }
+
+ // MediaCrypto is not available, but the stream is clear. So we can still
+ // play the current stream. But if we switch to an encrypted stream playback
+ // will fail.
+ StartSurfaceChooser();
return;
}
@@ -1511,10 +1539,21 @@ void AndroidVideoDecodeAccelerator::OnMediaCryptoReady(
DCHECK(!media_codec_);
DCHECK(deferred_initialization_pending_);
+ // Since |this| holds a reference to the |cdm_|, by the time the CDM is
+ // destructed, UnregisterPlayer() must have been called and |this| has been
+ // destructed as well. So the |cdm_unset_cb| will never have a chance to be
+ // called.
+ // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms.
+ cdm_registration_id_ = media_crypto_context_->RegisterPlayer(
+ BindToCurrentLoop(base::Bind(&AndroidVideoDecodeAccelerator::OnKeyAdded,
+ weak_this_factory_.GetWeakPtr())),
+ base::DoNothing());
+
codec_config_->media_crypto = std::move(media_crypto);
codec_config_->requires_secure_codec = requires_secure_video_codec;
+
// Request a secure surface in all cases. For L3, it's okay if we fall back
- // to SurfaceTexture rather than fail composition. For L1, it's required.
+ // to TextureOwner rather than fail composition. For L1, it's required.
// It's also required if the command line says so.
surface_chooser_helper_.SetSecureSurfaceMode(
requires_secure_video_codec
@@ -1678,6 +1717,7 @@ AndroidVideoDecodeAccelerator::GetCapabilities(
}
}
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
for (const auto& supported_profile : kSupportedH264Profiles) {
SupportedProfile profile;
profile.profile = supported_profile;
@@ -1689,6 +1729,17 @@ AndroidVideoDecodeAccelerator::GetCapabilities(
profiles.push_back(profile);
}
+#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
+ for (const auto& supported_profile : kSupportedHevcProfiles) {
+ SupportedProfile profile;
+ profile.profile = supported_profile;
+ profile.min_resolution.SetSize(0, 0);
+ profile.max_resolution.SetSize(3840, 2160);
+ profiles.push_back(profile);
+ }
+#endif
+#endif
+
capabilities.flags = Capabilities::SUPPORTS_DEFERRED_INITIALIZATION |
Capabilities::NEEDS_ALL_PICTURE_BUFFERS_TO_DECODE |
Capabilities::SUPPORTS_ENCRYPTED_STREAMS;
@@ -1704,16 +1755,6 @@ AndroidVideoDecodeAccelerator::GetCapabilities(
capabilities.flags |= Capabilities::SUPPORTS_SET_EXTERNAL_OUTPUT_SURFACE;
}
-#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- for (const auto& supported_profile : kSupportedHevcProfiles) {
- SupportedProfile profile;
- profile.profile = supported_profile;
- profile.min_resolution.SetSize(0, 0);
- profile.max_resolution.SetSize(3840, 2160);
- profiles.push_back(profile);
- }
-#endif
-
return capabilities;
}
@@ -1721,8 +1762,10 @@ bool AndroidVideoDecodeAccelerator::IsMediaCodecSoftwareDecodingForbidden()
const {
// Prevent MediaCodec from using its internal software decoders when we have
// more secure and up to date versions in the renderer process.
- return !config_.is_encrypted() && (codec_config_->codec == kCodecVP8 ||
- codec_config_->codec == kCodecVP9);
+ return !config_.is_encrypted() &&
+ (codec_config_->codec == kCodecVP8 ||
+ codec_config_->codec == kCodecVP9) &&
+ !force_allow_software_decoding_for_testing_;
}
bool AndroidVideoDecodeAccelerator::UpdateSurface() {
@@ -1747,7 +1790,7 @@ bool AndroidVideoDecodeAccelerator::UpdateSurface() {
// wouldn't be necessarily true anymore.
// Also note that we might not have switched surfaces yet, which is also bad
// for OnSurfaceDestroyed, because of BEFORE_OVERLAY_INIT. Shouldn't
- // happen with SurfaceTexture, and OnSurfaceDestroyed checks for it. In
+ // happen with TextureOwner, and OnSurfaceDestroyed checks for it. In
// either case, we definitely should not still have an incoming bundle; it
// should have been dropped.
DCHECK(!incoming_bundle_);
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator.h b/chromium/media/gpu/android/android_video_decode_accelerator.h
index c8e31abb886..a58c8cc4aef 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator.h
+++ b/chromium/media/gpu/android/android_video_decode_accelerator.h
@@ -120,7 +120,7 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
// |surface_chooser_| with our initial factory from VDA::Config.
void StartSurfaceChooser();
- // Start a transition to an overlay, or, if |!overlay|, SurfaceTexture. The
+ // Start a transition to an overlay, or, if |!overlay|, TextureOwner. The
// transition doesn't have to be immediate; we'll favor not dropping frames.
void OnSurfaceTransition(std::unique_ptr<AndroidOverlay> overlay);
@@ -386,7 +386,7 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
scoped_refptr<AVDASurfaceBundle> incoming_bundle_;
// If we have been given an overlay to use, then this is it. If we've been
- // told to move to SurfaceTexture, then this will be value() == nullptr.
+ // told to move to TextureOwner, then this will be value() == nullptr.
base::Optional<std::unique_ptr<AndroidOverlay>> incoming_overlay_;
SurfaceChooserHelper surface_chooser_helper_;
@@ -395,6 +395,8 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
bool force_defer_surface_creation_for_testing_;
+ bool force_allow_software_decoding_for_testing_;
+
// Optional factory to produce mojo AndroidOverlay instances.
AndroidOverlayMojoFactoryCB overlay_factory_cb_;
@@ -406,7 +408,7 @@ class MEDIA_GPU_EXPORT AndroidVideoDecodeAccelerator
// Most recently cached frame information, so that we can dispatch it without
// recomputing it on every frame. It changes very rarely.
SurfaceChooserHelper::FrameInformation cached_frame_information_ =
- SurfaceChooserHelper::FrameInformation::SURFACETEXTURE_INSECURE;
+ SurfaceChooserHelper::FrameInformation::NON_OVERLAY_INSECURE;
// WeakPtrFactory for posting tasks back to |this|.
base::WeakPtrFactory<AndroidVideoDecodeAccelerator> weak_this_factory_;
diff --git a/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc b/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
index 65ddf27ed4a..a947cc548ea 100644
--- a/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/android/android_video_decode_accelerator_unittest.cc
@@ -11,7 +11,6 @@
#include "base/bind.h"
#include "base/logging.h"
#include "base/memory/weak_ptr.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/test/scoped_task_environment.h"
@@ -31,6 +30,7 @@
#include "media/gpu/android/fake_codec_allocator.h"
#include "media/gpu/android/mock_android_video_surface_chooser.h"
#include "media/gpu/android/mock_device_info.h"
+#include "media/media_buildflags.h"
#include "media/video/picture.h"
#include "media/video/video_decode_accelerator.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -84,10 +84,11 @@ class MockVDAClient : public VideoDecodeAccelerator::Client {
} // namespace
-class AndroidVideoDecodeAcceleratorTest : public testing::Test {
+class AndroidVideoDecodeAcceleratorTest
+ : public testing::TestWithParam<VideoCodecProfile> {
public:
// Default to baseline H264 because it's always supported.
- AndroidVideoDecodeAcceleratorTest() : config_(H264PROFILE_BASELINE) {}
+ AndroidVideoDecodeAcceleratorTest() : config_(GetParam()) {}
void SetUp() override {
ASSERT_TRUE(gl::init::InitializeGLOneOff());
@@ -138,6 +139,7 @@ class AndroidVideoDecodeAcceleratorTest : public testing::Test {
vda_.reset(avda);
avda->force_defer_surface_creation_for_testing_ =
force_defer_surface_creation;
+ avda->force_allow_software_decoding_for_testing_ = true;
bool result = vda_->Initialize(config_, &client_);
base::RunLoop().RunUntilIdle();
@@ -169,16 +171,16 @@ class AndroidVideoDecodeAcceleratorTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
- void InitializeAVDAWithSurfaceTexture() {
+ void InitializeAVDAWithTextureOwner() {
ASSERT_TRUE(InitializeAVDA());
base::RunLoop().RunUntilIdle();
- // We do not expect a factory, since we are using SurfaceTexture.
+ // We do not expect a factory, since we are using TextureOwner.
ASSERT_FALSE(chooser_->factory_);
// Set the expectations first, since ProvideOverlay might cause callbacks.
EXPECT_CALL(*codec_allocator_,
MockCreateMediaCodecAsync(nullptr, NotNull()));
- chooser_->ProvideSurfaceTexture();
+ chooser_->ProvideTextureOwner();
// Provide the codec so that we can check if it's freed properly.
EXPECT_CALL(client_, NotifyInitializationComplete(true));
@@ -236,14 +238,14 @@ class AndroidVideoDecodeAcceleratorTest : public testing::Test {
}
};
-TEST_F(AndroidVideoDecodeAcceleratorTest, ConfigureUnsupportedCodec) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, ConfigureUnsupportedCodec) {
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
config_ = VideoDecodeAccelerator::Config(VIDEO_CODEC_PROFILE_UNKNOWN);
ASSERT_FALSE(InitializeAVDA());
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
ConfigureSupportedCodecSynchronously) {
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
@@ -257,7 +259,7 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
testing::Mock::VerifyAndClearExpectations(chooser_);
}
-TEST_F(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecSyncIsAnError) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecSyncIsAnError) {
// Failuew to create a codec during sync init should cause Initialize to fail.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
@@ -268,7 +270,7 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecSyncIsAnError) {
ASSERT_FALSE(InitializeAVDA());
}
-TEST_F(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecAsyncIsAnError) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecAsyncIsAnError) {
// Verify that a null codec signals error for async init when it doesn't get a
// mediacodec instance.
//
@@ -285,14 +287,14 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, FailingToCreateACodecAsyncIsAnError) {
EXPECT_CALL(client_, NotifyInitializationComplete(false));
ASSERT_TRUE(InitializeAVDA());
- chooser_->ProvideSurfaceTexture();
+ chooser_->ProvideTextureOwner();
codec_allocator_->ProvideNullCodecAsync();
// Make sure that codec allocation has happened before destroying the VDA.
testing::Mock::VerifyAndClearExpectations(codec_allocator_.get());
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
LowEndDevicesSucceedInitWithoutASurface) {
// If AVDA decides that we should defer surface creation, then it should
// signal success before we provide a surface. It should still ask for a
@@ -311,28 +313,27 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
InitializeAVDA(force_defer_surface_creation);
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
- AsyncInitWithSurfaceTextureAndDelete) {
- // When configuring with a SurfaceTexture and deferred init, we should be
+TEST_P(AndroidVideoDecodeAcceleratorTest, AsyncInitWithTextureOwnerAndDelete) {
+ // When configuring with a TextureOwner and deferred init, we should be
// asked for a codec, and be notified of init success if we provide one. When
- // AVDA is destroyed, it should release the codec and surface texture.
+ // AVDA is destroyed, it should release the codec and texture owner.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
- InitializeAVDAWithSurfaceTexture();
+ InitializeAVDAWithTextureOwner();
// Delete the VDA, and make sure that it tries to free the codec and the right
- // surface texture.
+ // texture owner.
EXPECT_CALL(
*codec_allocator_,
MockReleaseMediaCodec(codec_allocator_->most_recent_codec,
codec_allocator_->most_recent_overlay,
- codec_allocator_->most_recent_surface_texture));
+ codec_allocator_->most_recent_texture_owner));
codec_allocator_->most_recent_codec_destruction_observer->ExpectDestruction();
vda_ = nullptr;
base::RunLoop().RunUntilIdle();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest, AsyncInitWithSurfaceAndDelete) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, AsyncInitWithSurfaceAndDelete) {
// When |config_| specifies a surface, we should be given a factory during
// startup for it. When |chooser_| provides an overlay, the codec should be
// allocated using it. Shutdown should provide the overlay when releasing the
@@ -347,21 +348,21 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, AsyncInitWithSurfaceAndDelete) {
*codec_allocator_,
MockReleaseMediaCodec(codec_allocator_->most_recent_codec,
codec_allocator_->most_recent_overlay,
- codec_allocator_->most_recent_surface_texture));
+ codec_allocator_->most_recent_texture_owner));
codec_allocator_->most_recent_codec_destruction_observer->ExpectDestruction();
vda_ = nullptr;
base::RunLoop().RunUntilIdle();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
- SwitchesToSurfaceTextureWhenSurfaceDestroyed) {
+TEST_P(AndroidVideoDecodeAcceleratorTest,
+ SwitchesToTextureOwnerWhenSurfaceDestroyed) {
// Provide a surface, and a codec, then destroy the surface. AVDA should use
- // SetSurface to switch to SurfaceTexture.
+ // SetSurface to switch to TextureOwner.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
InitializeAVDAWithOverlay();
- // It would be nice if we knew that this was a surface texture. As it is, we
+ // It would be nice if we knew that this was a texture owner. As it is, we
// just destroy the VDA and expect that we're provided with one. Hopefully,
// AVDA is actually calling SetSurface properly.
EXPECT_CALL(*codec_allocator_->most_recent_codec, SetSurface(_))
@@ -378,9 +379,9 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
base::RunLoop().RunUntilIdle();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest, SwitchesToSurfaceTextureEventually) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, SwitchesToTextureOwnerEventually) {
// Provide a surface, and a codec, then request that AVDA switches to a
- // surface texture. Verify that it does.
+ // texture owner. Verify that it does.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
InitializeAVDAWithOverlay();
@@ -388,12 +389,12 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, SwitchesToSurfaceTextureEventually) {
EXPECT_CALL(*codec_allocator_->most_recent_codec, SetSurface(_))
.WillOnce(Return(true));
- // Note that it's okay if |avda_| switches before ProvideSurfaceTexture
+ // Note that it's okay if |avda_| switches before ProvideTextureOwner
// returns, since it has no queued output anyway.
- chooser_->ProvideSurfaceTexture();
+ chooser_->ProvideTextureOwner();
LetAVDAUpdateSurface();
- // Verify that we're now using some surface texture.
+ // Verify that we're now using some texture owner.
EXPECT_CALL(*codec_allocator_,
MockReleaseMediaCodec(codec_allocator_->most_recent_codec,
nullptr, NotNull()));
@@ -402,10 +403,10 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, SwitchesToSurfaceTextureEventually) {
base::RunLoop().RunUntilIdle();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
SetSurfaceFailureDoesntSwitchSurfaces) {
// Initialize AVDA with a surface, then request that AVDA switches to a
- // surface texture. When it tries to UpdateSurface, pretend to fail. AVDA
+ // texture owner. When it tries to UpdateSurface, pretend to fail. AVDA
// should notify error, and also release the original surface.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
@@ -418,17 +419,17 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
.Times(1);
codec_allocator_->most_recent_codec_destruction_observer
->VerifyAndClearExpectations();
- chooser_->ProvideSurfaceTexture();
+ chooser_->ProvideTextureOwner();
LetAVDAUpdateSurface();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
SwitchToSurfaceAndBackBeforeSetSurface) {
// Ask AVDA to switch from ST to overlay, then back to ST before it has a
// chance to do the first switch. It should simply drop the overlay.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
- InitializeAVDAWithSurfaceTexture();
+ InitializeAVDAWithTextureOwner();
// Don't let AVDA switch immediately, else it could choose to SetSurface when
// it first gets the overlay.
@@ -446,15 +447,15 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
// Now it is expected to drop the overlay.
observer->ExpectDestruction();
- // While the incoming surface is pending, switch back to SurfaceTexture.
- chooser_->ProvideSurfaceTexture();
+ // While the incoming surface is pending, switch back to TextureOwner.
+ chooser_->ProvideTextureOwner();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
ChangingOutputSurfaceVoluntarilyWithoutSetSurfaceIsIgnored) {
- // If we ask AVDA to change to SurfaceTexture should be ignored on platforms
+ // If we ask AVDA to change to TextureOwner should be ignored on platforms
// that don't support SetSurface (pre-M or blacklisted). It should also
- // ignore SurfaceTexture => overlay, but we don't check that.
+ // ignore TextureOwner => overlay, but we don't check that.
//
// Also note that there are other probably reasonable things to do (like
// signal an error), but we want to be sure that it doesn't try to SetSurface.
@@ -470,12 +471,12 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
InitializeAVDAWithOverlay();
EXPECT_CALL(*codec_allocator_->most_recent_codec, SetSurface(_)).Times(0);
- // This should not switch to SurfaceTexture.
- chooser_->ProvideSurfaceTexture();
+ // This should not switch to TextureOwner.
+ chooser_->ProvideTextureOwner();
LetAVDAUpdateSurface();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
OnSurfaceDestroyedWithoutSetSurfaceFreesTheCodec) {
// If AVDA receives OnSurfaceDestroyed without support for SetSurface, then it
// should free the codec.
@@ -501,21 +502,21 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
->VerifyAndClearExpectations();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
- MultipleSurfaceTextureCallbacksAreIgnored) {
+TEST_P(AndroidVideoDecodeAcceleratorTest,
+ MultipleTextureOwnerCallbacksAreIgnored) {
// Ask AVDA to switch to ST when it's already using ST, nothing should happen.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
- InitializeAVDAWithSurfaceTexture();
+ InitializeAVDAWithTextureOwner();
// This should do nothing.
EXPECT_CALL(*codec_allocator_->most_recent_codec, SetSurface(_)).Times(0);
- chooser_->ProvideSurfaceTexture();
+ chooser_->ProvideTextureOwner();
base::RunLoop().RunUntilIdle();
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
OverlayInfoWithDuplicateSurfaceIDDoesntChangeTheFactory) {
// Send OverlayInfo with duplicate info, and verify that it doesn't change
// the factory.
@@ -528,7 +529,7 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
avda()->SetOverlayInfo(overlay_info);
}
-TEST_F(AndroidVideoDecodeAcceleratorTest,
+TEST_P(AndroidVideoDecodeAcceleratorTest,
OverlayInfoWithNewSurfaceIDDoesChangeTheFactory) {
// Send OverlayInfo with new surface info, and verify that it does change the
// overlay factory.
@@ -541,7 +542,7 @@ TEST_F(AndroidVideoDecodeAcceleratorTest,
avda()->SetOverlayInfo(overlay_info);
}
-TEST_F(AndroidVideoDecodeAcceleratorTest, FullscreenSignalIsSentToChooser) {
+TEST_P(AndroidVideoDecodeAcceleratorTest, FullscreenSignalIsSentToChooser) {
// Send OverlayInfo that has |is_fullscreen| set, and verify that the chooser
// is notified about it.
SKIP_IF_MEDIACODEC_IS_NOT_AVAILABLE();
@@ -552,4 +553,23 @@ TEST_F(AndroidVideoDecodeAcceleratorTest, FullscreenSignalIsSentToChooser) {
ASSERT_EQ(chooser_->current_state_.is_fullscreen, overlay_info.is_fullscreen);
}
+static std::vector<VideoCodecProfile> GetTestList() {
+ std::vector<VideoCodecProfile> test_profiles;
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ if (MediaCodecUtil::IsMediaCodecAvailable())
+ test_profiles.push_back(H264PROFILE_BASELINE);
+#endif
+
+ if (MediaCodecUtil::IsVp8DecoderAvailable())
+ test_profiles.push_back(VP8PROFILE_ANY);
+ if (MediaCodecUtil::IsVp9DecoderAvailable())
+ test_profiles.push_back(VP9PROFILE_PROFILE0);
+ return test_profiles;
+}
+
+INSTANTIATE_TEST_CASE_P(AndroidVideoDecodeAcceleratorTest,
+ AndroidVideoDecodeAcceleratorTest,
+ testing::ValuesIn(GetTestList()));
+
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_surface_chooser.h b/chromium/media/gpu/android/android_video_surface_chooser.h
index dc738138828..fd8e2bdac37 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser.h
+++ b/chromium/media/gpu/android/android_video_surface_chooser.h
@@ -10,6 +10,7 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "media/base/android/android_overlay.h"
+#include "media/base/video_rotation.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gfx/geometry/rect.h"
@@ -43,6 +44,9 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooser {
// signals, like fs or secure, before we promote.
bool promote_aggressively = false;
+ // Default orientation for the video.
+ VideoRotation video_rotation = VIDEO_ROTATION_0;
+
// Hint to use for the initial position when transitioning to an overlay.
gfx::Rect initial_position;
};
@@ -54,17 +58,16 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooser {
// Notify the client that the most recently provided overlay should be
// discarded. The overlay is still valid, but we recommend against
- // using it soon, in favor of a SurfaceTexture.
- using UseSurfaceTextureCB = base::RepeatingCallback<void(void)>;
+ // using it soon, in favor of a TextureOwner.
+ using UseTextureOwnerCB = base::RepeatingCallback<void(void)>;
AndroidVideoSurfaceChooser() {}
virtual ~AndroidVideoSurfaceChooser() {}
// Sets the client callbacks to be called when a new surface choice is made.
// Must be called before UpdateState();
- virtual void SetClientCallbacks(
- UseOverlayCB use_overlay_cb,
- UseSurfaceTextureCB use_surface_texture_cb) = 0;
+ virtual void SetClientCallbacks(UseOverlayCB use_overlay_cb,
+ UseTextureOwnerCB use_texture_owner_cb) = 0;
// Updates the current state and makes a new surface choice with the new
// state. If |new_factory| is empty, the factory is left as-is. Otherwise,
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
index f1faaa4e2aa..547f8fa7295 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
@@ -30,10 +30,10 @@ AndroidVideoSurfaceChooserImpl::~AndroidVideoSurfaceChooserImpl() {}
void AndroidVideoSurfaceChooserImpl::SetClientCallbacks(
UseOverlayCB use_overlay_cb,
- UseSurfaceTextureCB use_surface_texture_cb) {
- DCHECK(use_overlay_cb && use_surface_texture_cb);
+ UseTextureOwnerCB use_texture_owner_cb) {
+ DCHECK(use_overlay_cb && use_texture_owner_cb);
use_overlay_cb_ = std::move(use_overlay_cb);
- use_surface_texture_cb_ = std::move(use_surface_texture_cb);
+ use_texture_owner_cb_ = std::move(use_texture_owner_cb);
}
void AndroidVideoSurfaceChooserImpl::UpdateState(
@@ -53,13 +53,14 @@ void AndroidVideoSurfaceChooserImpl::UpdateState(
initial_state_received_ = true;
// Choose here so that Choose() doesn't have to handle non-dynamic.
// Note that we ignore |is_expecting_relayout| here, since it's transient.
- // We don't want to pick SurfaceTexture permanently for that.
+ // We don't want to pick TextureOwner permanently for that.
if (overlay_factory_ &&
(current_state_.is_fullscreen || current_state_.is_secure ||
- current_state_.is_required)) {
+ current_state_.is_required) &&
+ current_state_.video_rotation == VIDEO_ROTATION_0) {
SwitchToOverlay(false);
} else {
- SwitchToSurfaceTexture();
+ SwitchToTextureOwner();
}
}
return;
@@ -87,9 +88,8 @@ void AndroidVideoSurfaceChooserImpl::Choose() {
DCHECK(allow_dynamic_);
// TODO(liberato): should this depend on resolution?
- OverlayState new_overlay_state = current_state_.promote_aggressively
- ? kUsingOverlay
- : kUsingSurfaceTexture;
+ OverlayState new_overlay_state =
+ current_state_.promote_aggressively ? kUsingOverlay : kUsingTextureOwner;
// Do we require a power-efficient overlay?
bool needs_power_efficient = current_state_.promote_aggressively;
@@ -111,7 +111,7 @@ void AndroidVideoSurfaceChooserImpl::Choose() {
// If the compositor won't promote, then don't.
if (!current_state_.is_compositor_promotable)
- new_overlay_state = kUsingSurfaceTexture;
+ new_overlay_state = kUsingTextureOwner;
// If we're expecting a relayout, then don't transition to overlay if we're
// not already in one. We don't want to transition out, though. This lets us
@@ -119,7 +119,7 @@ void AndroidVideoSurfaceChooserImpl::Choose() {
// TODO(liberato): Detect this more directly.
if (current_state_.is_expecting_relayout &&
client_overlay_state_ != kUsingOverlay)
- new_overlay_state = kUsingSurfaceTexture;
+ new_overlay_state = kUsingTextureOwner;
// If we're requesting an overlay, check that we haven't asked too recently
// since the last failure. This includes L1. We don't bother to check for
@@ -129,29 +129,34 @@ void AndroidVideoSurfaceChooserImpl::Choose() {
base::TimeDelta time_since_last_failure =
tick_clock_->NowTicks() - most_recent_overlay_failure_;
if (time_since_last_failure < MinimumDelayAfterFailedOverlay)
- new_overlay_state = kUsingSurfaceTexture;
+ new_overlay_state = kUsingTextureOwner;
}
// If an overlay is required, then choose one. The only way we won't is if we
- // don't have a factory or our request fails.
+ // don't have a factory or our request fails, or if it's rotated.
if (current_state_.is_required) {
new_overlay_state = kUsingOverlay;
// Required overlays don't need to be power efficient.
needs_power_efficient = false;
}
+ // Specifying a rotated overlay can NOTREACHED() in the compositor, so it's
+ // better to fail.
+ if (current_state_.video_rotation != VIDEO_ROTATION_0)
+ new_overlay_state = kUsingTextureOwner;
+
// If we have no factory, then we definitely don't want to use overlays.
if (!overlay_factory_)
- new_overlay_state = kUsingSurfaceTexture;
+ new_overlay_state = kUsingTextureOwner;
// Make sure that we're in |new_overlay_state_|.
- if (new_overlay_state == kUsingSurfaceTexture)
- SwitchToSurfaceTexture();
+ if (new_overlay_state == kUsingTextureOwner)
+ SwitchToTextureOwner();
else
SwitchToOverlay(needs_power_efficient);
}
-void AndroidVideoSurfaceChooserImpl::SwitchToSurfaceTexture() {
+void AndroidVideoSurfaceChooserImpl::SwitchToTextureOwner() {
// Invalidate any outstanding deletion callbacks for any overlays that we've
// provided to the client already. We assume that it will eventually drop
// them in response to the callback. Ready / failed callbacks aren't affected
@@ -164,11 +169,11 @@ void AndroidVideoSurfaceChooserImpl::SwitchToSurfaceTexture() {
overlay_ = nullptr;
// Notify the client to switch if it's in the wrong state.
- if (client_overlay_state_ != kUsingSurfaceTexture) {
- DCHECK(use_surface_texture_cb_);
+ if (client_overlay_state_ != kUsingTextureOwner) {
+ DCHECK(use_texture_owner_cb_);
- client_overlay_state_ = kUsingSurfaceTexture;
- use_surface_texture_cb_.Run();
+ client_overlay_state_ = kUsingTextureOwner;
+ use_texture_owner_cb_.Run();
}
}
@@ -218,7 +223,7 @@ void AndroidVideoSurfaceChooserImpl::SwitchToOverlay(
overlay_ = overlay_factory_.Run(std::move(config));
if (!overlay_)
- SwitchToSurfaceTexture();
+ SwitchToTextureOwner();
}
void AndroidVideoSurfaceChooserImpl::OnOverlayReady(AndroidOverlay* overlay) {
@@ -243,18 +248,18 @@ void AndroidVideoSurfaceChooserImpl::OnOverlayFailed(AndroidOverlay* overlay) {
overlay_ = nullptr;
most_recent_overlay_failure_ = tick_clock_->NowTicks();
- // If the client isn't already using a SurfaceTexture, then switch to it.
+ // If the client isn't already using a TextureOwner, then switch to it.
// Note that this covers the case of kUnknown, when we might not have told the
// client anything yet. That's important for Initialize, so that a failed
// overlay request still results in some callback to the client to know what
// surface to start with.
- SwitchToSurfaceTexture();
+ SwitchToTextureOwner();
}
void AndroidVideoSurfaceChooserImpl::OnOverlayDeleted(AndroidOverlay* overlay) {
- client_overlay_state_ = kUsingSurfaceTexture;
- // We don't call SwitchToSurfaceTexture since the client dropped the overlay.
- // It's already using SurfaceTexture.
+ client_overlay_state_ = kUsingTextureOwner;
+ // We don't call SwitchToTextureOwner since the client dropped the overlay.
+ // It's already using TextureOwner.
}
void AndroidVideoSurfaceChooserImpl::OnPowerEfficientState(
@@ -264,7 +269,7 @@ void AndroidVideoSurfaceChooserImpl::OnPowerEfficientState(
// callback if it arrives. Getting a new overlay clears any previous cbs.
DCHECK(!overlay_);
- // We cannot receive it after switching to SurfaceTexture, since that also
+ // We cannot receive it after switching to TextureOwner, since that also
// clears all callbacks.
DCHECK(client_overlay_state_ == kUsingOverlay);
@@ -285,7 +290,7 @@ void AndroidVideoSurfaceChooserImpl::OnPowerEfficientState(
// We don't want to delay transitioning to an overlay if the user re-enters
// fullscreen. TODO(liberato): Perhaps we should just clear the failure timer
// if we detect a transition into fs when we get new state from the client.
- SwitchToSurfaceTexture();
+ SwitchToTextureOwner();
}
} // namespace media
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.h b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
index e3b1a21277d..f5a5ecd5ada 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.h
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
@@ -30,20 +30,20 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
// AndroidVideoSurfaceChooser
void SetClientCallbacks(UseOverlayCB use_overlay_cb,
- UseSurfaceTextureCB use_surface_texture_cb) override;
+ UseTextureOwnerCB use_texture_owner_cb) override;
void UpdateState(base::Optional<AndroidOverlayFactoryCB> new_factory,
const State& new_state) override;
private:
- // Choose whether we should be using a SurfaceTexture or overlay, and issue
+ // Choose whether we should be using a TextureOwner or overlay, and issue
// the right callbacks if we're changing between them. This should only be
// called if |allow_dynamic_|.
void Choose();
- // Start switching to SurfaceTexture or overlay, as needed. These will call
+ // Start switching to TextureOwner or overlay, as needed. These will call
// the client callbacks if we're changing state, though those callbacks might
// happen after this returns.
- void SwitchToSurfaceTexture();
+ void SwitchToTextureOwner();
// If |overlay_| has an in-flight request, then this will do nothing. If
// |power_efficient|, then we will require a power-efficient overlay, and
// cancel it if it becomes not power efficient.
@@ -57,7 +57,7 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
// Client callbacks.
UseOverlayCB use_overlay_cb_;
- UseSurfaceTextureCB use_surface_texture_cb_;
+ UseTextureOwnerCB use_texture_owner_cb_;
// Current overlay that we've constructed but haven't received ready / failed
// callbacks yet. Will be nullptr if we haven't constructed one, or if we
@@ -72,7 +72,7 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
enum OverlayState {
kUnknown,
- kUsingSurfaceTexture,
+ kUsingTextureOwner,
kUsingOverlay,
};
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
index 76e760d4e0c..a14c7468b6f 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
@@ -40,7 +40,7 @@ class MockClient {
}
// Note that this won't clear |overlay_|, which is helpful.
- MOCK_METHOD0(UseSurfaceTexture, void(void));
+ MOCK_METHOD0(UseTextureOwner, void(void));
// Let the test have the overlay.
std::unique_ptr<AndroidOverlay> ReleaseOverlay() {
@@ -67,6 +67,7 @@ enum class IsSecure { No, Yes };
enum class IsCCPromotable { No, Yes };
enum class IsExpectingRelayout { No, Yes };
enum class PromoteAggressively { No, Yes };
+enum class IsVideoRotated { No, Yes };
using TestParams = std::tuple<ShouldUseOverlay,
ShouldBePowerEfficient,
@@ -76,7 +77,8 @@ using TestParams = std::tuple<ShouldUseOverlay,
IsSecure,
IsCCPromotable,
IsExpectingRelayout,
- PromoteAggressively>;
+ PromoteAggressively,
+ IsVideoRotated>;
// Useful macro for instantiating tests.
#define Either(x) Values(x::No, x::Yes)
@@ -84,8 +86,8 @@ using TestParams = std::tuple<ShouldUseOverlay,
// Check if a parameter of type |type| is Yes. |n| is the location of the
// parameter of that type.
// c++14 can remove |n|, and std::get() by type.
-#define IsYes(type, n) (::testing::get<n>(GetParam()) == type::Yes);
-#define IsIgnored(type, n) (::testing::get<n>(GetParam()) == type::Ignored);
+#define IsYes(type, n) (::testing::get<n>(GetParam()) == type::Yes)
+#define IsIgnored(type, n) (::testing::get<n>(GetParam()) == type::Ignored)
} // namespace
@@ -128,8 +130,10 @@ class AndroidVideoSurfaceChooserImplTest
chooser_ = std::make_unique<AndroidVideoSurfaceChooserImpl>(allow_dynamic_,
&tick_clock_);
chooser_->SetClientCallbacks(
- base::Bind(&MockClient::UseOverlayImpl, base::Unretained(&client_)),
- base::Bind(&MockClient::UseSurfaceTexture, base::Unretained(&client_)));
+ base::BindRepeating(&MockClient::UseOverlayImpl,
+ base::Unretained(&client_)),
+ base::BindRepeating(&MockClient::UseTextureOwner,
+ base::Unretained(&client_)));
chooser_->UpdateState(
factory ? base::make_optional(std::move(factory)) : base::nullopt,
chooser_state_);
@@ -207,28 +211,27 @@ class AndroidVideoSurfaceChooserImplTest
};
TEST_F(AndroidVideoSurfaceChooserImplTest,
- InitializeWithoutFactoryUsesSurfaceTexture) {
+ InitializeWithoutFactoryUsesTextureOwner) {
// Calling Initialize() with no factory should result in a callback to use
- // surface texture.
- EXPECT_CALL(client_, UseSurfaceTexture());
+ // texture owner.
+ EXPECT_CALL(client_, UseTextureOwner());
StartChooser(AndroidOverlayFactoryCB());
}
-TEST_F(AndroidVideoSurfaceChooserImplTest,
- NullInitialOverlayUsesSurfaceTexture) {
+TEST_F(AndroidVideoSurfaceChooserImplTest, NullInitialOverlayUsesTextureOwner) {
// If we provide a factory, but it fails to create an overlay, then |client_|
- // should be notified to use a surface texture.
+ // should be notified to use a texture owner.
chooser_state_.is_fullscreen = true;
EXPECT_CALL(*this, MockOnOverlayCreated());
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
StartChooser(FactoryFor(nullptr));
}
TEST_F(AndroidVideoSurfaceChooserImplTest,
- FailedInitialOverlayUsesSurfaceTexture) {
+ FailedInitialOverlayUsesTextureOwner) {
// If we provide a factory, but the overlay that it provides returns 'failed',
- // then |client_| should use surface texture. Also check that it won't retry
+ // then |client_| should use texture owner. Also check that it won't retry
// after a failed overlay too soon.
chooser_state_.is_fullscreen = true;
EXPECT_CALL(*this, MockOnOverlayCreated());
@@ -241,7 +244,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
// doesn't have to be destroyed. We just care that it hasn't been destroyed
// before now.
destruction_observer_ = nullptr;
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
overlay_callbacks_.OverlayFailed.Run();
testing::Mock::VerifyAndClearExpectations(&client_);
testing::Mock::VerifyAndClearExpectations(this);
@@ -262,38 +265,38 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
testing::Mock::VerifyAndClearExpectations(this);
}
-TEST_F(AndroidVideoSurfaceChooserImplTest, NullLaterOverlayUsesSurfaceTexture) {
+TEST_F(AndroidVideoSurfaceChooserImplTest, NullLaterOverlayUsesTextureOwner) {
// If an overlay factory is provided after startup that returns a null overlay
// from CreateOverlay, |chooser_| should, at most, notify |client_| to use
- // SurfaceTexture zero or more times.
+ // TextureOwner zero or more times.
- // Start with SurfaceTexture.
+ // Start with TextureOwner.
chooser_state_.is_fullscreen = true;
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
allow_dynamic_ = true;
StartChooser(AndroidOverlayFactoryCB());
testing::Mock::VerifyAndClearExpectations(&client_);
// Provide a factory that will return a null overlay.
EXPECT_CALL(*this, MockOnOverlayCreated());
- EXPECT_CALL(client_, UseSurfaceTexture()).Times(AnyNumber());
+ EXPECT_CALL(client_, UseTextureOwner()).Times(AnyNumber());
chooser_->UpdateState(FactoryFor(nullptr), chooser_state_);
}
TEST_F(AndroidVideoSurfaceChooserImplTest, FailedLaterOverlayDoesNothing) {
// If we send an overlay factory that returns an overlay, and that overlay
// fails, then the client should not be notified except for zero or more
- // callbacks to switch to surface texture.
+ // callbacks to switch to texture owner.
- // Start with SurfaceTexture.
+ // Start with TextureOwner.
chooser_state_.is_fullscreen = true;
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
StartChooser(AndroidOverlayFactoryCB());
testing::Mock::VerifyAndClearExpectations(&client_);
// Provide a factory.
EXPECT_CALL(*this, MockOnOverlayCreated());
- EXPECT_CALL(client_, UseSurfaceTexture()).Times(AnyNumber());
+ EXPECT_CALL(client_, UseTextureOwner()).Times(AnyNumber());
chooser_->UpdateState(FactoryFor(std::move(overlay_)), chooser_state_);
testing::Mock::VerifyAndClearExpectations(&client_);
@@ -307,17 +310,17 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
SuccessfulLaterOverlayNotifiesClient) {
// |client_| is notified if we provide a factory that gets an overlay.
- // Start with SurfaceTexture.
+ // Start with TextureOwner.
chooser_state_.is_fullscreen = true;
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
StartChooser(AndroidOverlayFactoryCB());
testing::Mock::VerifyAndClearExpectations(&client_);
// Provide a factory. |chooser_| should try to create an overlay. We don't
- // care if a call to UseSurfaceTexture is elided or not. Note that AVDA will
- // ignore duplicate calls anyway (MultipleSurfaceTextureCallbacksAreIgnored).
+ // care if a call to UseTextureOwner is elided or not. Note that AVDA will
+ // ignore duplicate calls anyway (MultipleTextureOwnerCallbacksAreIgnored).
EXPECT_CALL(*this, MockOnOverlayCreated());
- EXPECT_CALL(client_, UseSurfaceTexture()).Times(AnyNumber());
+ EXPECT_CALL(client_, UseTextureOwner()).Times(AnyNumber());
chooser_->UpdateState(FactoryFor(std::move(overlay_)), chooser_state_);
testing::Mock::VerifyAndClearExpectations(&client_);
testing::Mock::VerifyAndClearExpectations(this);
@@ -330,7 +333,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
TEST_F(AndroidVideoSurfaceChooserImplTest,
UpdateStateAfterDeleteRetriesOverlay) {
// Make sure that SurfaceChooser notices that we delete the overlay, and have
- // switched back to SurfaceTexture mode.
+ // switched back to TextureOwner mode.
chooser_state_.is_fullscreen = true;
StartChooserAndProvideOverlay();
@@ -341,7 +344,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
// Force chooser to choose again. We expect that it will retry the overlay,
// since the delete should have informed it that we've switched back to
- // SurfaceTexture without a callback from SurfaceChooser. If it didn't know
+ // TextureOwner without a callback from SurfaceChooser. If it didn't know
// this, then it would think that the client is still using an overlay, and
// take no action.
@@ -355,7 +358,7 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
TEST_F(AndroidVideoSurfaceChooserImplTest,
PowerEffcientOverlayCancelsIfNotPowerEfficient) {
// If we request a power efficient overlay that later becomes not power
- // efficient, then the client should switch to SurfaceTexture.
+ // efficient, then the client should switch to TextureOwner.
chooser_state_.promote_aggressively = true;
MockAndroidOverlay* overlay = StartChooserAndProvideOverlay();
@@ -366,14 +369,14 @@ TEST_F(AndroidVideoSurfaceChooserImplTest,
ASSERT_TRUE(overlay->config()->power_efficient);
// Notify the chooser that it's not power efficient anymore.
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
overlay_callbacks_.PowerEfficientState.Run(false);
}
TEST_P(AndroidVideoSurfaceChooserImplTest, OverlayIsUsedOrNotBasedOnState) {
// Provide a factory, and verify that it is used when the state says that it
// should be. If the overlay is used, then we also verify that it does not
- // switch to SurfaceTexture first, since pre-M requires it.
+ // switch to TextureOwner first, since pre-M requires it.
const bool should_use_overlay = IsYes(ShouldUseOverlay, 0);
const bool should_be_power_efficient = IsYes(ShouldBePowerEfficient, 1);
@@ -385,14 +388,16 @@ TEST_P(AndroidVideoSurfaceChooserImplTest, OverlayIsUsedOrNotBasedOnState) {
chooser_state_.is_compositor_promotable = IsYes(IsCCPromotable, 6);
chooser_state_.is_expecting_relayout = IsYes(IsExpectingRelayout, 7);
chooser_state_.promote_aggressively = IsYes(PromoteAggressively, 8);
+ chooser_state_.video_rotation =
+ IsYes(IsVideoRotated, 9) ? VIDEO_ROTATION_90 : VIDEO_ROTATION_0;
MockAndroidOverlay* overlay = overlay_.get();
if (should_use_overlay) {
- EXPECT_CALL(client_, UseSurfaceTexture()).Times(0);
+ EXPECT_CALL(client_, UseTextureOwner()).Times(0);
EXPECT_CALL(*this, MockOnOverlayCreated());
} else {
- EXPECT_CALL(client_, UseSurfaceTexture());
+ EXPECT_CALL(client_, UseTextureOwner());
EXPECT_CALL(*this, MockOnOverlayCreated()).Times(0);
}
@@ -409,8 +414,8 @@ TEST_P(AndroidVideoSurfaceChooserImplTest, OverlayIsUsedOrNotBasedOnState) {
}
}
-// Unless we're promoting aggressively, we should default to SurfaceTexture.
-INSTANTIATE_TEST_CASE_P(NoFullscreenUsesSurfaceTexture,
+// Unless we're promoting aggressively, we should default to TextureOwner.
+INSTANTIATE_TEST_CASE_P(NoFullscreenUsesTextureOwner,
AndroidVideoSurfaceChooserImplTest,
Combine(Values(ShouldUseOverlay::No),
Values(ShouldBePowerEfficient::Ignored),
@@ -420,7 +425,8 @@ INSTANTIATE_TEST_CASE_P(NoFullscreenUsesSurfaceTexture,
Values(IsSecure::No),
Either(IsCCPromotable),
Either(IsExpectingRelayout),
- Values(PromoteAggressively::No)));
+ Values(PromoteAggressively::No),
+ Either(IsVideoRotated)));
INSTANTIATE_TEST_CASE_P(FullscreenUsesOverlay,
AndroidVideoSurfaceChooserImplTest,
@@ -432,7 +438,8 @@ INSTANTIATE_TEST_CASE_P(FullscreenUsesOverlay,
Values(IsSecure::No),
Values(IsCCPromotable::Yes),
Values(IsExpectingRelayout::No),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
INSTANTIATE_TEST_CASE_P(RequiredUsesOverlay,
AndroidVideoSurfaceChooserImplTest,
@@ -444,7 +451,8 @@ INSTANTIATE_TEST_CASE_P(RequiredUsesOverlay,
Either(IsSecure),
Either(IsCCPromotable),
Either(IsExpectingRelayout),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
// Secure textures should use an overlay if the compositor will promote them.
// We don't care about relayout, since it's transient; either behavior is okay
@@ -459,14 +467,15 @@ INSTANTIATE_TEST_CASE_P(SecureUsesOverlayIfPromotable,
Values(IsSecure::Yes),
Values(IsCCPromotable::Yes),
Values(IsExpectingRelayout::No),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
// For all dynamic cases, we shouldn't use an overlay if the compositor won't
// promote it, unless it's marked as required. This includes secure surfaces,
-// so that L3 will fall back to SurfaceTexture. Non-dynamic is excluded, since
+// so that L3 will fall back to TextureOwner. Non-dynamic is excluded, since
// we don't get (or use) compositor feedback before the first frame. At that
// point, we've already chosen the output surface and can't switch it.
-INSTANTIATE_TEST_CASE_P(NotCCPromotableNotRequiredUsesSurfaceTexture,
+INSTANTIATE_TEST_CASE_P(NotCCPromotableNotRequiredUsesTextureOwner,
AndroidVideoSurfaceChooserImplTest,
Combine(Values(ShouldUseOverlay::No),
Values(ShouldBePowerEfficient::No),
@@ -476,11 +485,12 @@ INSTANTIATE_TEST_CASE_P(NotCCPromotableNotRequiredUsesSurfaceTexture,
Either(IsSecure),
Values(IsCCPromotable::No),
Either(IsExpectingRelayout),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Either(IsVideoRotated)));
// If we're expecting a relayout, then we should never use an overlay unless
// it's required.
-INSTANTIATE_TEST_CASE_P(InsecureExpectingRelayoutUsesSurfaceTexture,
+INSTANTIATE_TEST_CASE_P(InsecureExpectingRelayoutUsesTextureOwner,
AndroidVideoSurfaceChooserImplTest,
Combine(Values(ShouldUseOverlay::No),
Values(ShouldBePowerEfficient::No),
@@ -490,7 +500,8 @@ INSTANTIATE_TEST_CASE_P(InsecureExpectingRelayoutUsesSurfaceTexture,
Either(IsSecure),
Either(IsCCPromotable),
Values(IsExpectingRelayout::Yes),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Either(IsVideoRotated)));
// "is_fullscreen" should be enough to trigger an overlay pre-M.
INSTANTIATE_TEST_CASE_P(NotDynamicInFullscreenUsesOverlay,
@@ -503,7 +514,8 @@ INSTANTIATE_TEST_CASE_P(NotDynamicInFullscreenUsesOverlay,
Either(IsSecure),
Either(IsCCPromotable),
Either(IsExpectingRelayout),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
// "is_secure" should be enough to trigger an overlay pre-M.
INSTANTIATE_TEST_CASE_P(NotDynamicSecureUsesOverlay,
@@ -516,7 +528,8 @@ INSTANTIATE_TEST_CASE_P(NotDynamicSecureUsesOverlay,
Values(IsSecure::Yes),
Either(IsCCPromotable),
Either(IsExpectingRelayout),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
// "is_required" should be enough to trigger an overlay pre-M.
INSTANTIATE_TEST_CASE_P(NotDynamicRequiredUsesOverlay,
@@ -529,7 +542,8 @@ INSTANTIATE_TEST_CASE_P(NotDynamicRequiredUsesOverlay,
Either(IsSecure),
Either(IsCCPromotable),
Either(IsExpectingRelayout),
- Either(PromoteAggressively)));
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::No)));
// If we're promoting aggressively, then we should request power efficient.
INSTANTIATE_TEST_CASE_P(AggressiveOverlayIsPowerEfficient,
@@ -542,6 +556,21 @@ INSTANTIATE_TEST_CASE_P(AggressiveOverlayIsPowerEfficient,
Values(IsSecure::No),
Values(IsCCPromotable::Yes),
Values(IsExpectingRelayout::No),
- Values(PromoteAggressively::Yes)));
+ Values(PromoteAggressively::Yes),
+ Values(IsVideoRotated::No)));
+
+// Rotated video is unsupported for overlays in all cases.
+INSTANTIATE_TEST_CASE_P(IsVideoRotatedUsesTextureOwner,
+ AndroidVideoSurfaceChooserImplTest,
+ Combine(Values(ShouldUseOverlay::No),
+ Either(ShouldBePowerEfficient),
+ Either(AllowDynamic),
+ Either(IsRequired),
+ Either(IsFullscreen),
+ Either(IsSecure),
+ Either(IsCCPromotable),
+ Either(IsExpectingRelayout),
+ Either(PromoteAggressively),
+ Values(IsVideoRotated::Yes)));
} // namespace media
diff --git a/chromium/media/gpu/android/avda_codec_allocator.h b/chromium/media/gpu/android/avda_codec_allocator.h
index db83c84e098..a5faf4e2001 100644
--- a/chromium/media/gpu/android/avda_codec_allocator.h
+++ b/chromium/media/gpu/android/avda_codec_allocator.h
@@ -14,6 +14,7 @@
#include "base/logging.h"
#include "base/message_loop/message_loop.h"
#include "base/optional.h"
+#include "base/sequenced_task_runner.h"
#include "base/synchronization/waitable_event.h"
#include "base/sys_info.h"
#include "base/task_scheduler/post_task.h"
diff --git a/chromium/media/gpu/android/avda_codec_image.cc b/chromium/media/gpu/android/avda_codec_image.cc
index e1a2b60e583..7b94cc8c218 100644
--- a/chromium/media/gpu/android/avda_codec_image.cc
+++ b/chromium/media/gpu/android/avda_codec_image.cc
@@ -23,7 +23,7 @@ AVDACodecImage::AVDACodecImage(
: shared_state_(shared_state),
codec_buffer_index_(kInvalidCodecBufferIndex),
media_codec_(codec),
- has_surface_texture_(false),
+ has_texture_owner_(false),
texture_(0) {}
AVDACodecImage::~AVDACodecImage() {}
@@ -43,25 +43,25 @@ bool AVDACodecImage::BindTexImage(unsigned target) {
void AVDACodecImage::ReleaseTexImage(unsigned target) {}
bool AVDACodecImage::CopyTexImage(unsigned target) {
- if (!has_surface_texture_ || target != GL_TEXTURE_EXTERNAL_OES)
+ if (!has_texture_owner_ || target != GL_TEXTURE_EXTERNAL_OES)
return false;
GLint bound_service_id = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
// We insist that the currently bound texture is the right one.
if (bound_service_id !=
- static_cast<GLint>(shared_state_->surface_texture_service_id())) {
+ static_cast<GLint>(shared_state_->texture_owner_service_id())) {
return false;
}
// Make sure that we have the right image in the front buffer. Note that the
- // bound_service_id is guaranteed to be equal to the surface texture's client
+ // bound_service_id is guaranteed to be equal to the texture owner's client
// texture id, so we can skip preserving it if the right context is current.
UpdateSurfaceInternal(UpdateMode::RENDER_TO_FRONT_BUFFER,
kDontRestoreBindings);
// By setting image state to UNBOUND instead of COPIED we ensure that
- // CopyTexImage() is called each time the surface texture is used for drawing.
+ // CopyTexImage() is called each time the texture owner is used for drawing.
// It would be nice if we could do this via asking for the currently bound
// Texture, but the active unit never seems to change.
texture_->SetLevelImageState(GL_TEXTURE_EXTERNAL_OES, 0,
@@ -81,11 +81,12 @@ bool AVDACodecImage::ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
gfx::OverlayTransform transform,
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
- bool enable_blend) {
+ bool enable_blend,
+ gfx::GpuFence* gpu_fence) {
// This should only be called when we're rendering to a SurfaceView.
- if (has_surface_texture_) {
+ if (has_texture_owner_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
- "SurfaceTexture backed.";
+ "TextureOwner backed.";
return false;
}
@@ -103,8 +104,8 @@ void AVDACodecImage::OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
uint64_t process_tracing_id,
const std::string& dump_name) {}
-void AVDACodecImage::UpdateSurfaceTexture(RestoreBindingsMode mode) {
- DCHECK(has_surface_texture_);
+void AVDACodecImage::UpdateTextureOwner(RestoreBindingsMode mode) {
+ DCHECK(has_texture_owner_);
DCHECK_EQ(codec_buffer_index_, kUpdateOnly);
codec_buffer_index_ = kRendered;
@@ -137,9 +138,9 @@ void AVDACodecImage::CodecChanged(MediaCodecBridge* codec) {
}
void AVDACodecImage::SetBufferMetadata(int buffer_index,
- bool has_surface_texture,
+ bool has_texture_owner,
const gfx::Size& size) {
- has_surface_texture_ = has_surface_texture;
+ has_texture_owner_ = has_texture_owner;
codec_buffer_index_ = buffer_index;
size_ = size;
}
@@ -162,7 +163,7 @@ void AVDACodecImage::UpdateSurfaceInternal(
ReleaseOutputBuffer(update_mode);
// SurfaceViews are updated implicitly, so no further steps are necessary.
- if (!has_surface_texture_) {
+ if (!has_texture_owner_) {
DCHECK(update_mode != UpdateMode::RENDER_TO_BACK_BUFFER);
return;
}
@@ -171,7 +172,7 @@ void AVDACodecImage::UpdateSurfaceInternal(
if (update_mode != UpdateMode::RENDER_TO_FRONT_BUFFER)
return;
- UpdateSurfaceTexture(attached_bindings_mode);
+ UpdateTextureOwner(attached_bindings_mode);
}
void AVDACodecImage::ReleaseOutputBuffer(UpdateMode update_mode) {
@@ -191,7 +192,7 @@ void AVDACodecImage::ReleaseOutputBuffer(UpdateMode update_mode) {
DCHECK(update_mode == UpdateMode::RENDER_TO_BACK_BUFFER ||
update_mode == UpdateMode::RENDER_TO_FRONT_BUFFER);
- if (!has_surface_texture_) {
+ if (!has_texture_owner_) {
DCHECK(update_mode == UpdateMode::RENDER_TO_FRONT_BUFFER);
DCHECK_GE(codec_buffer_index_, 0);
media_codec_->ReleaseOutputBuffer(codec_buffer_index_, true);
@@ -202,14 +203,14 @@ void AVDACodecImage::ReleaseOutputBuffer(UpdateMode update_mode) {
// If we've already released to the back buffer, there's nothing left to do,
// but wait for the previously released buffer if necessary.
if (codec_buffer_index_ != kUpdateOnly) {
- DCHECK(has_surface_texture_);
+ DCHECK(has_texture_owner_);
DCHECK_GE(codec_buffer_index_, 0);
- shared_state_->RenderCodecBufferToSurfaceTexture(media_codec_,
- codec_buffer_index_);
+ shared_state_->RenderCodecBufferToTextureOwner(media_codec_,
+ codec_buffer_index_);
codec_buffer_index_ = kUpdateOnly;
}
- // Only wait for the SurfaceTexture update if we're rendering to the front.
+ // Only wait for the TextureOwner update if we're rendering to the front.
if (update_mode == UpdateMode::RENDER_TO_FRONT_BUFFER)
shared_state_->WaitForFrameAvailable();
}
@@ -227,7 +228,7 @@ std::unique_ptr<ui::ScopedMakeCurrent> AVDACodecImage::MakeCurrentIfNeeded() {
void AVDACodecImage::GetTextureMatrix(float matrix[16]) {
// Our current matrix may be stale. Update it if possible.
- if (has_surface_texture_)
+ if (has_texture_owner_)
UpdateSurface(UpdateMode::RENDER_TO_FRONT_BUFFER);
shared_state_->GetTransformMatrix(matrix);
YInvertMatrix(matrix);
diff --git a/chromium/media/gpu/android/avda_codec_image.h b/chromium/media/gpu/android/avda_codec_image.h
index 78c5978c750..2e210b70de6 100644
--- a/chromium/media/gpu/android/avda_codec_image.h
+++ b/chromium/media/gpu/android/avda_codec_image.h
@@ -21,7 +21,7 @@ namespace media {
class MediaCodecBridge;
-// GLImage that renders MediaCodec buffers to a SurfaceTexture or SurfaceView as
+// GLImage that renders MediaCodec buffers to a TextureOwner or SurfaceView as
// needed in order to draw them.
class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
public:
@@ -42,7 +42,8 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
gfx::OverlayTransform transform,
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
- bool enable_blend) override;
+ bool enable_blend,
+ gfx::GpuFence* gpu_fence) override;
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
@@ -61,11 +62,11 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
DISCARD_CODEC_BUFFER,
// Renders to back buffer, no UpdateTexImage(); can only be used with a
- // valid |surface_texture_|.
+ // valid |texture_owner_|.
RENDER_TO_BACK_BUFFER,
// Renders to the back buffer. When used with a SurfaceView, promotion to
- // the front buffer is automatic. When using a |surface_texture_|,
+ // the front buffer is automatic. When using a |texture_owner_|,
// UpdateTexImage() is called to promote the back buffer into the front.
RENDER_TO_FRONT_BUFFER
};
@@ -81,10 +82,10 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
void set_texture(gpu::gles2::Texture* texture) { texture_ = texture; }
// Sets up the properties necessary for the image to render. |buffer_index| is
- // supplied to ReleaseOutputBuffer(), |has_surface_texture| controls which
+ // supplied to ReleaseOutputBuffer(), |has_texture_owner| controls which
// rendering path is used, and |size| is used by the compositor.
void SetBufferMetadata(int buffer_index,
- bool has_surface_texture,
+ bool has_texture_owner,
const gfx::Size& size);
bool SetSharedState(scoped_refptr<AVDASharedState> shared_state);
@@ -105,18 +106,18 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
~AVDACodecImage() override;
private:
- // Make sure that the surface texture's front buffer is current. This will
+ // Make sure that the texture owner's front buffer is current. This will
// save / restore the current context. It will optionally restore the texture
- // bindings in the surface texture's context, based on |mode|. This is
+ // bindings in the texture owner's context, based on |mode|. This is
// intended as a hint if we don't need to change contexts. If we do need to
// change contexts, then we'll always preserve the texture bindings in the
// both contexts. In other words, the caller is telling us whether it's
// okay to change the binding in the current context.
enum RestoreBindingsMode { kDontRestoreBindings, kDoRestoreBindings };
- void UpdateSurfaceTexture(RestoreBindingsMode mode);
+ void UpdateTextureOwner(RestoreBindingsMode mode);
// Internal helper for UpdateSurface() that allows callers to specify the
- // RestoreBindingsMode when a SurfaceTexture is already attached prior to
+ // RestoreBindingsMode when a TextureOwner is already attached prior to
// calling this method.
void UpdateSurfaceInternal(UpdateMode update_mode,
RestoreBindingsMode attached_bindings_mode);
@@ -148,9 +149,9 @@ class AVDACodecImage : public gpu::gles2::GLStreamTextureImage {
// May be null.
MediaCodecBridge* media_codec_;
- // Indicates if we're rendering to a SurfaceTexture or not. Set during the
+ // Indicates if we're rendering to a TextureOwner or not. Set during the
// call to SetBufferMetadata().
- bool has_surface_texture_;
+ bool has_texture_owner_;
// The texture that we're attached to.
gpu::gles2::Texture* texture_;
diff --git a/chromium/media/gpu/android/avda_picture_buffer_manager.cc b/chromium/media/gpu/android/avda_picture_buffer_manager.cc
index fde92b992b4..ee4c4ee3067 100644
--- a/chromium/media/gpu/android/avda_picture_buffer_manager.cc
+++ b/chromium/media/gpu/android/avda_picture_buffer_manager.cc
@@ -10,7 +10,6 @@
#include "base/android/build_info.h"
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/metrics/histogram.h"
#include "gpu/command_buffer/service/context_group.h"
#include "gpu/command_buffer/service/gl_stream_texture_image.h"
@@ -50,20 +49,19 @@ AVDAPictureBufferManager::~AVDAPictureBufferManager() {}
bool AVDAPictureBufferManager::Initialize(
scoped_refptr<AVDASurfaceBundle> surface_bundle) {
shared_state_ = nullptr;
- surface_texture_ = nullptr;
+ texture_owner_ = nullptr;
if (!surface_bundle->overlay) {
- // Create the surface texture.
- surface_texture_ = SurfaceTextureGLOwnerImpl::Create();
- if (!surface_texture_)
+ // Create the texture owner.
+ texture_owner_ = SurfaceTextureGLOwner::Create();
+ if (!texture_owner_)
return false;
- surface_bundle->surface_texture_surface =
- surface_texture_->CreateJavaSurface();
- surface_bundle->surface_texture = surface_texture_;
+ surface_bundle->texture_owner_surface = texture_owner_->CreateJavaSurface();
+ surface_bundle->texture_owner_ = texture_owner_;
}
- // Only do this once the surface texture is filled in, since the constructor
+ // Only do this once the texture owner is filled in, since the constructor
// assumes that it will be.
shared_state_ = new AVDASharedState(surface_bundle);
shared_state_->SetPromotionHintCB(state_provider_->GetPromotionHintCB());
@@ -78,7 +76,7 @@ void AVDAPictureBufferManager::Destroy(const PictureBufferMap& buffers) {
ReleaseCodecBuffers(buffers);
CodecChanged(nullptr);
- surface_texture_ = nullptr;
+ texture_owner_ = nullptr;
}
void AVDAPictureBufferManager::SetImageForPicture(
@@ -99,8 +97,8 @@ void AVDAPictureBufferManager::SetImageForPicture(
GLuint stream_texture_service_id = 0;
if (image) {
// Override the Texture's service id, so that it will use the one that is
- // attached to the SurfaceTexture.
- stream_texture_service_id = shared_state_->surface_texture_service_id();
+ // attached to the TextureOwner
+ stream_texture_service_id = shared_state_->texture_owner_service_id();
// Also set the parameters for the level if we're not clearing the image.
const gfx::Size size = state_provider_->GetSize();
@@ -111,16 +109,16 @@ void AVDAPictureBufferManager::SetImageForPicture(
static_cast<AVDACodecImage*>(image)->set_texture(texture_ref->texture());
}
- // If we're clearing the image, or setting a SurfaceTexture backed image, we
- // set the state to UNBOUND. For SurfaceTexture images, this ensures that the
+ // If we're clearing the image, or setting a TextureOwner backed image, we
+ // set the state to UNBOUND. For TextureOwner images, this ensures that the
// implementation will call CopyTexImage, which is where AVDACodecImage
- // updates the SurfaceTexture to the right frame.
+ // updates the TextureOwner to the right frame.
auto image_state = gpu::gles2::Texture::UNBOUND;
// For SurfaceView we set the state to BOUND because ScheduleOverlayPlane
// requires it. If something tries to sample from this texture it won't work,
// but there's no way to sample from a SurfaceView anyway, so it doesn't
// matter.
- if (image && !surface_texture_)
+ if (image && !texture_owner_)
image_state = gpu::gles2::Texture::BOUND;
texture_manager->SetLevelStreamTextureImage(texture_ref, kTextureTarget, 0,
image, image_state,
@@ -145,7 +143,7 @@ void AVDAPictureBufferManager::UseCodecBufferForPictureBuffer(
// Note that this is not a race, since we do not re-use a PictureBuffer
// until after the CC is done drawing it.
pictures_out_for_display_.push_back(picture_buffer.id());
- avda_image->SetBufferMetadata(codec_buf_index, !!surface_texture_,
+ avda_image->SetBufferMetadata(codec_buf_index, !!texture_owner_,
state_provider_->GetSize());
// If the shared state has changed for this image, retarget its texture.
@@ -158,7 +156,7 @@ void AVDAPictureBufferManager::UseCodecBufferForPictureBuffer(
void AVDAPictureBufferManager::AssignOnePictureBuffer(
const PictureBuffer& picture_buffer,
bool have_context) {
- // Attach a GLImage to each texture that will use the surface texture.
+ // Attach a GLImage to each texture that will use the texture owner.
scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image =
codec_images_[picture_buffer.id()] =
new AVDACodecImage(shared_state_, media_codec_);
@@ -223,10 +221,10 @@ void AVDAPictureBufferManager::MaybeRenderEarly() {
AVDACodecImage::UpdateMode::RENDER_TO_FRONT_BUFFER);
}
- // Back buffer rendering is only available for surface textures. We'll always
+ // Back buffer rendering is only available for texture owners. We'll always
// have at least one front buffer, so the next buffer must be the backbuffer.
size_t backbuffer_index = front_index + 1;
- if (!surface_texture_ || backbuffer_index >= pictures_out_for_display_.size())
+ if (!texture_owner_ || backbuffer_index >= pictures_out_for_display_.size())
return;
// See if the back buffer is free. If so, then render the frame adjacent to
@@ -253,7 +251,7 @@ void AVDAPictureBufferManager::CodecChanged(MediaCodecBridge* codec) {
bool AVDAPictureBufferManager::ArePicturesOverlayable() {
// SurfaceView frames are always overlayable because that's the only way to
// display them.
- return !surface_texture_;
+ return !texture_owner_;
}
bool AVDAPictureBufferManager::HasUnrenderedPictures() const {
diff --git a/chromium/media/gpu/android/avda_picture_buffer_manager.h b/chromium/media/gpu/android/avda_picture_buffer_manager.h
index f2bcc0ca0a5..b93e324da4c 100644
--- a/chromium/media/gpu/android/avda_picture_buffer_manager.h
+++ b/chromium/media/gpu/android/avda_picture_buffer_manager.h
@@ -28,7 +28,7 @@ class MediaCodecBridge;
// AVDAPictureBufferManager is used by AVDA to associate its PictureBuffers with
// MediaCodec output buffers. It attaches AVDACodecImages to the PictureBuffer
// textures so that when they're used to draw the AVDACodecImage can release the
-// MediaCodec buffer to the backing Surface. If the Surface is a SurfaceTexture,
+// MediaCodec buffer to the backing Surface. If the Surface is a TextureOwner,
// the front buffer can then be used to draw without needing to copy the pixels.
// If the Surface is a SurfaceView, the release causes the frame to be displayed
// immediately.
@@ -46,7 +46,7 @@ class MEDIA_GPU_EXPORT AVDAPictureBufferManager {
// (e.g., SurfaceFlinger). We will ensure that any reference to the bundle
// is dropped if the overlay sends OnSurfaceDestroyed.
//
- // Without an overlay, we will create a SurfaceTexture and add it (and its
+ // Without an overlay, we will create a TextureOwner and add it (and its
// surface) to |surface_bundle|. We will arrange to consume the buffers at
// the right time, in addition to releasing the codec buffers for rendering.
//
@@ -111,9 +111,9 @@ class MEDIA_GPU_EXPORT AVDAPictureBufferManager {
AVDAStateProvider* const state_provider_;
- // The SurfaceTexture to render to. Non-null after Initialize() if
+ // The texture owner to render to. Non-null after Initialize() if
// we're not rendering to a SurfaceView.
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_;
+ scoped_refptr<TextureOwner> texture_owner_;
MediaCodecBridge* media_codec_;
diff --git a/chromium/media/gpu/android/avda_shared_state.cc b/chromium/media/gpu/android/avda_shared_state.cc
index dbb035370b3..fa667246e74 100644
--- a/chromium/media/gpu/android/avda_shared_state.cc
+++ b/chromium/media/gpu/android/avda_shared_state.cc
@@ -33,23 +33,22 @@ AVDASharedState::AVDASharedState(
AVDASharedState::~AVDASharedState() = default;
-void AVDASharedState::RenderCodecBufferToSurfaceTexture(
- MediaCodecBridge* codec,
- int codec_buffer_index) {
- if (surface_texture()->IsExpectingFrameAvailable())
- surface_texture()->WaitForFrameAvailable();
+void AVDASharedState::RenderCodecBufferToTextureOwner(MediaCodecBridge* codec,
+ int codec_buffer_index) {
+ if (texture_owner()->IsExpectingFrameAvailable())
+ texture_owner()->WaitForFrameAvailable();
codec->ReleaseOutputBuffer(codec_buffer_index, true);
- surface_texture()->SetReleaseTimeToNow();
+ texture_owner()->SetReleaseTimeToNow();
}
void AVDASharedState::WaitForFrameAvailable() {
- surface_texture()->WaitForFrameAvailable();
+ texture_owner()->WaitForFrameAvailable();
}
void AVDASharedState::UpdateTexImage() {
- surface_texture()->UpdateTexImage();
+ texture_owner()->UpdateTexImage();
// Helpfully, this is already column major.
- surface_texture()->GetTransformMatrix(gl_matrix_);
+ texture_owner()->GetTransformMatrix(gl_matrix_);
}
void AVDASharedState::GetTransformMatrix(float matrix[16]) const {
@@ -57,8 +56,8 @@ void AVDASharedState::GetTransformMatrix(float matrix[16]) const {
}
void AVDASharedState::ClearReleaseTime() {
- if (surface_texture())
- surface_texture()->IgnorePendingRelease();
+ if (texture_owner())
+ texture_owner()->IgnorePendingRelease();
}
void AVDASharedState::ClearOverlay(AndroidOverlay* overlay_raw) {
diff --git a/chromium/media/gpu/android/avda_shared_state.h b/chromium/media/gpu/android/avda_shared_state.h
index 3110eb0dada..954f7759223 100644
--- a/chromium/media/gpu/android/avda_shared_state.h
+++ b/chromium/media/gpu/android/avda_shared_state.h
@@ -29,59 +29,59 @@ class AVDASharedState : public base::RefCounted<AVDASharedState> {
public:
AVDASharedState(scoped_refptr<AVDASurfaceBundle> surface_bundle);
- GLuint surface_texture_service_id() const {
- return surface_texture() ? surface_texture()->GetTextureId() : 0;
+ GLuint texture_owner_service_id() const {
+ return texture_owner() ? texture_owner()->GetTextureId() : 0;
}
- SurfaceTextureGLOwner* surface_texture() const {
- return surface_bundle_ ? surface_bundle_->surface_texture.get() : nullptr;
+ TextureOwner* texture_owner() const {
+ return surface_bundle_ ? surface_bundle_->texture_owner_.get() : nullptr;
}
AndroidOverlay* overlay() const {
return surface_bundle_ ? surface_bundle_->overlay.get() : nullptr;
}
- // Context and surface that |surface_texture_| is bound to, if
- // |surface_texture_| is not null.
+ // Context and surface that |texture_owner_| is bound to, if
+ // |texture_owner_| is not null.
gl::GLContext* context() const {
- return surface_texture() ? surface_texture()->GetContext() : nullptr;
+ return texture_owner() ? texture_owner()->GetContext() : nullptr;
}
gl::GLSurface* surface() const {
- return surface_texture() ? surface_texture()->GetSurface() : nullptr;
+ return texture_owner() ? texture_owner()->GetSurface() : nullptr;
}
// Helper method for coordinating the interactions between
// MediaCodec::ReleaseOutputBuffer() and WaitForFrameAvailable() when
- // rendering to a SurfaceTexture; this method should never be called when
+ // rendering to a TextureOwner; this method should never be called when
// rendering to a SurfaceView.
//
- // The release of the codec buffer to the surface texture is asynchronous, by
+ // The release of the codec buffer to the texture owner is asynchronous, by
// using this helper we can attempt to let this process complete in a non
- // blocking fashion before the SurfaceTexture is used.
+ // blocking fashion before the TextureOwner is used.
//
// Clients should call this method to release the codec buffer for rendering
- // and then call WaitForFrameAvailable() before using the SurfaceTexture. In
- // the ideal case the SurfaceTexture has already been updated, otherwise the
+ // and then call WaitForFrameAvailable() before using the TextureOwner. In
+ // the ideal case the TextureOwner has already been updated, otherwise the
// method will wait for a pro-rated amount of time based on elapsed time up
// to a short deadline.
//
// Some devices do not reliably notify frame availability, so we use a very
// short deadline of only a few milliseconds to avoid indefinite stalls.
- void RenderCodecBufferToSurfaceTexture(MediaCodecBridge* codec,
- int codec_buffer_index);
+ void RenderCodecBufferToTextureOwner(MediaCodecBridge* codec,
+ int codec_buffer_index);
void WaitForFrameAvailable();
- // Helper methods for interacting with |surface_texture_|. See
- // gl::SurfaceTexture for method details.
+ // Helper methods for interacting with |texture_owner_|. See
+ // gl::TextureOwner for method details.
void UpdateTexImage();
// Returns a matrix that needs to be y flipped in order to match the
// StreamTextureMatrix contract. See GLStreamTextureImage::YInvertMatrix().
void GetTransformMatrix(float matrix[16]) const;
- // Resets the last time for RenderCodecBufferToSurfaceTexture(). Should be
+ // Resets the last time for RenderCodecBufferToTextureOwner(). Should be
// called during codec changes.
void ClearReleaseTime();
@@ -96,7 +96,7 @@ class AVDASharedState : public base::RefCounted<AVDASharedState> {
private:
friend class base::RefCounted<AVDASharedState>;
- // Texture matrix of the front buffer of the surface texture.
+ // Texture matrix of the front buffer of the texture owner.
float gl_matrix_[16];
scoped_refptr<AVDASurfaceBundle> surface_bundle_;
diff --git a/chromium/media/gpu/android/avda_surface_bundle.cc b/chromium/media/gpu/android/avda_surface_bundle.cc
index 1c09e8fcdbc..38c13206eea 100644
--- a/chromium/media/gpu/android/avda_surface_bundle.cc
+++ b/chromium/media/gpu/android/avda_surface_bundle.cc
@@ -20,28 +20,27 @@ AVDASurfaceBundle::AVDASurfaceBundle(std::unique_ptr<AndroidOverlay> overlay)
overlay(std::move(overlay)),
weak_factory_(this) {}
-AVDASurfaceBundle::AVDASurfaceBundle(
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_owner)
+AVDASurfaceBundle::AVDASurfaceBundle(scoped_refptr<TextureOwner> texture_owner)
: RefCountedDeleteOnSequence<AVDASurfaceBundle>(
base::SequencedTaskRunnerHandle::Get()),
- surface_texture(std::move(surface_texture_owner)),
- surface_texture_surface(surface_texture->CreateJavaSurface()),
+ texture_owner_(std::move(texture_owner)),
+ texture_owner_surface(texture_owner_->CreateJavaSurface()),
weak_factory_(this) {}
AVDASurfaceBundle::~AVDASurfaceBundle() {
// Explicitly free the surface first, just to be sure that it's deleted before
- // the SurfaceTexture is.
- surface_texture_surface = gl::ScopedJavaSurface();
+ // the TextureOwner is.
+ texture_owner_surface = gl::ScopedJavaSurface();
// Also release the back buffers.
- if (surface_texture) {
- auto task_runner = surface_texture->task_runner();
+ if (texture_owner_) {
+ auto task_runner = texture_owner_->task_runner();
if (task_runner->RunsTasksInCurrentSequence()) {
- surface_texture->ReleaseBackBuffers();
+ texture_owner_->ReleaseBackBuffers();
} else {
task_runner->PostTask(
- FROM_HERE, base::Bind(&SurfaceTextureGLOwner::ReleaseBackBuffers,
- surface_texture));
+ FROM_HERE, base::BindRepeating(&TextureOwner::ReleaseBackBuffers,
+ texture_owner_));
}
}
}
@@ -51,7 +50,7 @@ const base::android::JavaRef<jobject>& AVDASurfaceBundle::GetJavaSurface()
if (overlay)
return overlay->GetJavaSurface();
else
- return surface_texture_surface.j_surface();
+ return texture_owner_surface.j_surface();
}
AVDASurfaceBundle::ScheduleLayoutCB AVDASurfaceBundle::GetScheduleLayoutCB() {
diff --git a/chromium/media/gpu/android/avda_surface_bundle.h b/chromium/media/gpu/android/avda_surface_bundle.h
index e0dc749df37..c24eebe2569 100644
--- a/chromium/media/gpu/android/avda_surface_bundle.h
+++ b/chromium/media/gpu/android/avda_surface_bundle.h
@@ -14,7 +14,7 @@
namespace media {
-// AVDASurfaceBundle is a Java surface, and the SurfaceTexture or Overlay that
+// AVDASurfaceBundle is a Java surface, and the TextureOwner or Overlay that
// backs it.
//
// Once a MediaCodec is configured with an output surface, the corresponding
@@ -29,8 +29,7 @@ struct MEDIA_GPU_EXPORT AVDASurfaceBundle
// Create an empty bundle to be manually populated.
explicit AVDASurfaceBundle();
explicit AVDASurfaceBundle(std::unique_ptr<AndroidOverlay> overlay);
- explicit AVDASurfaceBundle(
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_owner);
+ explicit AVDASurfaceBundle(scoped_refptr<TextureOwner> texture_owner);
const base::android::JavaRef<jobject>& GetJavaSurface() const;
@@ -39,12 +38,12 @@ struct MEDIA_GPU_EXPORT AVDASurfaceBundle
// |this|; the cb will do nothing if |this| is destroyed.
ScheduleLayoutCB GetScheduleLayoutCB();
- // The Overlay or SurfaceTexture.
+ // The Overlay or TextureOwner.
std::unique_ptr<AndroidOverlay> overlay;
- scoped_refptr<SurfaceTextureGLOwner> surface_texture;
+ scoped_refptr<TextureOwner> texture_owner_;
- // The Java surface for |surface_texture|.
- gl::ScopedJavaSurface surface_texture_surface;
+ // The Java surface for |texture_owner_|.
+ gl::ScopedJavaSurface texture_owner_surface;
private:
~AVDASurfaceBundle();
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index 80a9616e881..658edfc574a 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -10,35 +10,34 @@
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/texture_manager.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/scoped_make_current.h"
namespace media {
namespace {
-// Makes |surface_texture|'s context current if it isn't already.
+// Makes |texture_owner|'s context current if it isn't already.
std::unique_ptr<ui::ScopedMakeCurrent> MakeCurrentIfNeeded(
- SurfaceTextureGLOwner* surface_texture) {
+ TextureOwner* texture_owner) {
// Note: this works for virtual contexts too, because IsCurrent() returns true
// if their shared platform context is current, regardless of which virtual
// context is current.
return std::unique_ptr<ui::ScopedMakeCurrent>(
- surface_texture->GetContext()->IsCurrent(nullptr)
+ texture_owner->GetContext()->IsCurrent(nullptr)
? nullptr
- : new ui::ScopedMakeCurrent(surface_texture->GetContext(),
- surface_texture->GetSurface()));
+ : new ui::ScopedMakeCurrent(texture_owner->GetContext(),
+ texture_owner->GetSurface()));
}
} // namespace
CodecImage::CodecImage(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb)
: phase_(Phase::kInCodec),
output_buffer_(std::move(output_buffer)),
- surface_texture_(std::move(surface_texture)),
+ texture_owner_(std::move(texture_owner)),
promotion_hint_cb_(std::move(promotion_hint_cb)) {}
CodecImage::~CodecImage() {
@@ -51,7 +50,10 @@ void CodecImage::SetDestructionCb(DestructionCb destruction_cb) {
}
gfx::Size CodecImage::GetSize() {
- return output_buffer_->size();
+ // Return a nonzero size, to avoid GL errors, even if we dropped the codec
+ // buffer already. Note that if we dropped it, there's no data in the
+ // texture anyway, so the old size doesn't matter.
+ return output_buffer_ ? output_buffer_->size() : gfx::Size(1, 1);
}
unsigned CodecImage::GetInternalFormat() {
@@ -65,16 +67,16 @@ bool CodecImage::BindTexImage(unsigned target) {
void CodecImage::ReleaseTexImage(unsigned target) {}
bool CodecImage::CopyTexImage(unsigned target) {
- if (!surface_texture_ || target != GL_TEXTURE_EXTERNAL_OES)
+ if (!texture_owner_ || target != GL_TEXTURE_EXTERNAL_OES)
return false;
GLint bound_service_id = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
- // The currently bound texture should be the surface texture's texture.
- if (bound_service_id != static_cast<GLint>(surface_texture_->GetTextureId()))
+ // The currently bound texture should be the texture owner's texture.
+ if (bound_service_id != static_cast<GLint>(texture_owner_->GetTextureId()))
return false;
- RenderToSurfaceTextureFrontBuffer(BindingsMode::kDontRestore);
+ RenderToTextureOwnerFrontBuffer(BindingsMode::kDontRestore);
return true;
}
@@ -89,10 +91,11 @@ bool CodecImage::ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
gfx::OverlayTransform transform,
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
- bool enable_blend) {
- if (surface_texture_) {
+ bool enable_blend,
+ gfx::GpuFence* gpu_fence) {
+ if (texture_owner_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
- "SurfaceTexture backed.";
+ "TextureOwner backed.";
return false;
}
@@ -124,14 +127,14 @@ void CodecImage::GetTextureMatrix(float matrix[16]) {
0, 1, 0, 1 //
};
memcpy(matrix, kYInvertedIdentity, sizeof(kYInvertedIdentity));
- if (!surface_texture_)
+ if (!texture_owner_)
return;
// The matrix is available after we render to the front buffer. If that fails
// we'll return the matrix from the previous frame, which is more likely to be
// correct than the identity matrix anyway.
- RenderToSurfaceTextureFrontBuffer(BindingsMode::kDontRestore);
- surface_texture_->GetTransformMatrix(matrix);
+ RenderToTextureOwnerFrontBuffer(BindingsMode::kDontRestore);
+ texture_owner_->GetTransformMatrix(matrix);
YInvertMatrix(matrix);
}
@@ -142,7 +145,7 @@ void CodecImage::NotifyPromotionHint(bool promotion_hint,
int display_height) {
// If this is promotable, and we're using an overlay, then skip sending this
// hint. ScheduleOverlayPlane will do it.
- if (promotion_hint && !surface_texture_)
+ if (promotion_hint && !texture_owner_)
return;
promotion_hint_cb_.Run(PromotionHintAggregator::Hint(
@@ -151,13 +154,13 @@ void CodecImage::NotifyPromotionHint(bool promotion_hint,
}
bool CodecImage::RenderToFrontBuffer() {
- return surface_texture_
- ? RenderToSurfaceTextureFrontBuffer(BindingsMode::kRestore)
+ return texture_owner_
+ ? RenderToTextureOwnerFrontBuffer(BindingsMode::kRestore)
: RenderToOverlay();
}
-bool CodecImage::RenderToSurfaceTextureBackBuffer() {
- DCHECK(surface_texture_);
+bool CodecImage::RenderToTextureOwnerBackBuffer() {
+ DCHECK(texture_owner_);
DCHECK_NE(phase_, Phase::kInFrontBuffer);
if (phase_ == Phase::kInBackBuffer)
return true;
@@ -166,35 +169,35 @@ bool CodecImage::RenderToSurfaceTextureBackBuffer() {
// Wait for a previous frame available so we don't confuse it with the one
// we're about to release.
- if (surface_texture_->IsExpectingFrameAvailable())
- surface_texture_->WaitForFrameAvailable();
+ if (texture_owner_->IsExpectingFrameAvailable())
+ texture_owner_->WaitForFrameAvailable();
if (!output_buffer_->ReleaseToSurface()) {
phase_ = Phase::kInvalidated;
return false;
}
phase_ = Phase::kInBackBuffer;
- surface_texture_->SetReleaseTimeToNow();
+ texture_owner_->SetReleaseTimeToNow();
return true;
}
-bool CodecImage::RenderToSurfaceTextureFrontBuffer(BindingsMode bindings_mode) {
- DCHECK(surface_texture_);
+bool CodecImage::RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode) {
+ DCHECK(texture_owner_);
if (phase_ == Phase::kInFrontBuffer)
return true;
if (phase_ == Phase::kInvalidated)
return false;
// Render it to the back buffer if it's not already there.
- if (!RenderToSurfaceTextureBackBuffer())
+ if (!RenderToTextureOwnerBackBuffer())
return false;
// The image is now in the back buffer, so promote it to the front buffer.
phase_ = Phase::kInFrontBuffer;
- if (surface_texture_->IsExpectingFrameAvailable())
- surface_texture_->WaitForFrameAvailable();
+ if (texture_owner_->IsExpectingFrameAvailable())
+ texture_owner_->WaitForFrameAvailable();
std::unique_ptr<ui::ScopedMakeCurrent> scoped_make_current =
- MakeCurrentIfNeeded(surface_texture_.get());
+ MakeCurrentIfNeeded(texture_owner_.get());
// If we have to switch contexts, then we always want to restore the
// bindings.
bool should_restore_bindings =
@@ -203,7 +206,7 @@ bool CodecImage::RenderToSurfaceTextureFrontBuffer(BindingsMode bindings_mode) {
GLint bound_service_id = 0;
if (should_restore_bindings)
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
- surface_texture_->UpdateTexImage();
+ texture_owner_->UpdateTexImage();
if (should_restore_bindings)
glBindTexture(GL_TEXTURE_EXTERNAL_OES, bound_service_id);
return true;
diff --git a/chromium/media/gpu/android/codec_image.h b/chromium/media/gpu/android/codec_image.h
index 19eb6c4b72f..d1b6b5ad735 100644
--- a/chromium/media/gpu/android/codec_image.h
+++ b/chromium/media/gpu/android/codec_image.h
@@ -19,7 +19,7 @@
namespace media {
-// A GLImage that renders MediaCodec buffers to a SurfaceTexture or overlay
+// A GLImage that renders MediaCodec buffers to a TextureOwner or overlay
// as needed in order to draw them.
class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
public:
@@ -28,7 +28,7 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
using DestructionCb = base::RepeatingCallback<void(CodecImage*)>;
CodecImage(std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb);
void SetDestructionCb(DestructionCb destruction_cb);
@@ -47,7 +47,8 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
gfx::OverlayTransform transform,
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
- bool enable_blend) override;
+ bool enable_blend,
+ gfx::GpuFence* gpu_fence) override;
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
void Flush() override {}
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
@@ -66,12 +67,10 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
return phase_ == Phase::kInFrontBuffer;
}
- // Whether this image is backed by a surface texture.
- bool is_surface_texture_backed() const { return !!surface_texture_; }
+ // Whether this image is backed by a texture owner.
+ bool is_texture_owner_backed() const { return !!texture_owner_; }
- scoped_refptr<SurfaceTextureGLOwner> surface_texture() const {
- return surface_texture_;
- }
+ scoped_refptr<TextureOwner> texture_owner() const { return texture_owner_; }
// Renders this image to the front buffer of its backing surface.
// Returns true if the buffer is in the front buffer. Returns false if the
@@ -79,10 +78,10 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// possible to render it.
bool RenderToFrontBuffer();
- // Renders this image to the back buffer of its surface texture. Only valid if
- // is_surface_texture_backed(). Returns true if the buffer is in the back
+ // Renders this image to the back buffer of its texture owner. Only valid if
+ // is_texture_owner_backed(). Returns true if the buffer is in the back
// buffer. Returns false if the buffer was invalidated.
- bool RenderToSurfaceTextureBackBuffer();
+ bool RenderToTextureOwnerBackBuffer();
// Called when we're no longer renderable because our surface is gone. We'll
// discard any codec buffer, and generally do nothing.
@@ -97,16 +96,16 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// kInFrontBuffer and kInvalidated are terminal.
enum class Phase { kInCodec, kInBackBuffer, kInFrontBuffer, kInvalidated };
- // Renders this image to the surface texture front buffer by first rendering
+ // Renders this image to the texture owner front buffer by first rendering
// it to the back buffer if it's not already there, and then waiting for the
// frame available event before calling UpdateTexImage(). Passing
// BindingsMode::kDontRestore skips the work of restoring the current texture
- // bindings if the surface texture's context is already current. Otherwise,
+ // bindings if the texture owner's context is already current. Otherwise,
// this switches contexts and preserves the texture bindings.
// Returns true if the buffer is in the front buffer. Returns false if the
// buffer was invalidated.
enum class BindingsMode { kRestore, kDontRestore };
- bool RenderToSurfaceTextureFrontBuffer(BindingsMode bindings_mode);
+ bool RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode);
// Renders this image to the overlay. Returns true if the buffer is in the
// overlay front buffer. Returns false if the buffer was invalidated.
@@ -118,9 +117,9 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// The buffer backing this image.
std::unique_ptr<CodecOutputBuffer> output_buffer_;
- // The SurfaceTexture that |output_buffer_| will be rendered to. Or null, if
+ // The TextureOwner that |output_buffer_| will be rendered to. Or null, if
// this image is backed by an overlay.
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_;
+ scoped_refptr<TextureOwner> texture_owner_;
// The bounds last sent to the overlay.
gfx::Rect most_recent_bounds_;
diff --git a/chromium/media/gpu/android/codec_image_group_unittest.cc b/chromium/media/gpu/android/codec_image_group_unittest.cc
index cd568d1f631..90638553630 100644
--- a/chromium/media/gpu/android/codec_image_group_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_group_unittest.cc
@@ -7,6 +7,7 @@
#include "base/bind.h"
#include "base/callback.h"
#include "base/memory/ref_counted.h"
+#include "base/sequenced_task_runner.h"
#include "base/test/scoped_task_environment.h"
#include "base/test/test_simple_task_runner.h"
#include "base/threading/thread.h"
diff --git a/chromium/media/gpu/android/codec_image_unittest.cc b/chromium/media/gpu/android/codec_image_unittest.cc
index 702e5d7d44a..52855a32940 100644
--- a/chromium/media/gpu/android/codec_image_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_unittest.cc
@@ -13,7 +13,7 @@
#include "media/base/android/media_codec_bridge.h"
#include "media/base/android/mock_media_codec_bridge.h"
#include "media/gpu/android/codec_image.h"
-#include "media/gpu/android/mock_surface_texture_gl_owner.h"
+#include "media/gpu/android/mock_texture_owner.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/geometry/rect.h"
@@ -59,8 +59,8 @@ class CodecImageTest : public testing::Test {
glGenTextures(1, &texture_id);
// The tests rely on this texture being bound.
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id);
- surface_texture_ = new NiceMock<MockSurfaceTextureGLOwner>(
- texture_id, context_.get(), surface_.get());
+ texture_owner_ = new NiceMock<MockTextureOwner>(texture_id, context_.get(),
+ surface_.get());
}
void TearDown() override {
@@ -71,14 +71,14 @@ class CodecImageTest : public testing::Test {
wrapper_->TakeCodecSurfacePair();
}
- enum ImageKind { kOverlay, kSurfaceTexture };
+ enum ImageKind { kOverlay, kTextureOwner };
scoped_refptr<CodecImage> NewImage(
ImageKind kind,
CodecImage::DestructionCb destruction_cb = base::DoNothing()) {
std::unique_ptr<CodecOutputBuffer> buffer;
wrapper_->DequeueOutputBuffer(nullptr, nullptr, &buffer);
scoped_refptr<CodecImage> image = new CodecImage(
- std::move(buffer), kind == kSurfaceTexture ? surface_texture_ : nullptr,
+ std::move(buffer), kind == kTextureOwner ? texture_owner_ : nullptr,
base::BindRepeating(&PromotionHintReceiver::OnPromotionHint,
base::Unretained(&promotion_hint_receiver_)));
@@ -89,7 +89,7 @@ class CodecImageTest : public testing::Test {
base::test::ScopedTaskEnvironment scoped_task_environment_;
NiceMock<MockMediaCodecBridge>* codec_;
std::unique_ptr<CodecWrapper> wrapper_;
- scoped_refptr<NiceMock<MockSurfaceTextureGLOwner>> surface_texture_;
+ scoped_refptr<NiceMock<MockTextureOwner>> texture_owner_;
scoped_refptr<gl::GLContext> context_;
scoped_refptr<gl::GLShareGroup> share_group_;
scoped_refptr<gl::GLSurface> surface_;
@@ -110,7 +110,7 @@ TEST_F(CodecImageTest, DestructionCbRuns) {
}
TEST_F(CodecImageTest, ImageStartsUnrendered) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
ASSERT_FALSE(i->was_rendered_to_front_buffer());
}
@@ -119,20 +119,20 @@ TEST_F(CodecImageTest, CopyTexImageIsInvalidForOverlayImages) {
ASSERT_FALSE(i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES));
}
-TEST_F(CodecImageTest, ScheduleOverlayPlaneIsInvalidForSurfaceTextureImages) {
- auto i = NewImage(kSurfaceTexture);
+TEST_F(CodecImageTest, ScheduleOverlayPlaneIsInvalidForTextureOwnerImages) {
+ auto i = NewImage(kTextureOwner);
ASSERT_FALSE(i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0,
gfx::OverlayTransform(), gfx::Rect(),
- gfx::RectF(), true));
+ gfx::RectF(), true, nullptr));
}
TEST_F(CodecImageTest, CopyTexImageFailsIfTargetIsNotOES) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
ASSERT_FALSE(i->CopyTexImage(GL_TEXTURE_2D));
}
TEST_F(CodecImageTest, CopyTexImageFailsIfTheWrongTextureIsBound) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
GLuint wrong_texture_id;
glGenTextures(1, &wrong_texture_id);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, wrong_texture_id);
@@ -140,29 +140,29 @@ TEST_F(CodecImageTest, CopyTexImageFailsIfTheWrongTextureIsBound) {
}
TEST_F(CodecImageTest, CopyTexImageCanBeCalledRepeatedly) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
ASSERT_TRUE(i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES));
ASSERT_TRUE(i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES));
}
TEST_F(CodecImageTest, CopyTexImageTriggersFrontBufferRendering) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
// Verify that the release comes before the wait.
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*surface_texture_, WaitForFrameAvailable());
- EXPECT_CALL(*surface_texture_, UpdateTexImage());
+ EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
+ EXPECT_CALL(*texture_owner_, UpdateTexImage());
i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
}
TEST_F(CodecImageTest, GetTextureMatrixTriggersFrontBufferRendering) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*surface_texture_, WaitForFrameAvailable());
- EXPECT_CALL(*surface_texture_, UpdateTexImage());
- EXPECT_CALL(*surface_texture_, GetTransformMatrix(_));
+ EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
+ EXPECT_CALL(*texture_owner_, UpdateTexImage());
+ EXPECT_CALL(*texture_owner_, GetTransformMatrix(_));
float matrix[16];
i->GetTextureMatrix(matrix);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
@@ -185,43 +185,43 @@ TEST_F(CodecImageTest, ScheduleOverlayPlaneTriggersFrontBufferRendering) {
PromotionHintAggregator::Hint hint(gfx::Rect(1, 2, 3, 4), true);
EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint));
i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint.screen_rect, gfx::RectF(), true);
+ hint.screen_rect, gfx::RectF(), true, nullptr);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
}
-TEST_F(CodecImageTest, CanRenderSurfaceTextureImageToBackBuffer) {
- auto i = NewImage(kSurfaceTexture);
- ASSERT_TRUE(i->RenderToSurfaceTextureBackBuffer());
+TEST_F(CodecImageTest, CanRenderTextureOwnerImageToBackBuffer) {
+ auto i = NewImage(kTextureOwner);
+ ASSERT_TRUE(i->RenderToTextureOwnerBackBuffer());
ASSERT_FALSE(i->was_rendered_to_front_buffer());
}
TEST_F(CodecImageTest, CodecBufferInvalidationResultsInRenderingFailure) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
// Invalidate the backing codec buffer.
wrapper_->TakeCodecSurfacePair();
- ASSERT_FALSE(i->RenderToSurfaceTextureBackBuffer());
+ ASSERT_FALSE(i->RenderToTextureOwnerBackBuffer());
}
TEST_F(CodecImageTest, RenderToBackBufferDoesntWait) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*surface_texture_, SetReleaseTimeToNow());
- EXPECT_CALL(*surface_texture_, WaitForFrameAvailable()).Times(0);
- ASSERT_TRUE(i->RenderToSurfaceTextureBackBuffer());
+ EXPECT_CALL(*texture_owner_, SetReleaseTimeToNow());
+ EXPECT_CALL(*texture_owner_, WaitForFrameAvailable()).Times(0);
+ ASSERT_TRUE(i->RenderToTextureOwnerBackBuffer());
}
TEST_F(CodecImageTest, PromotingTheBackBufferWaits) {
- auto i = NewImage(kSurfaceTexture);
- EXPECT_CALL(*surface_texture_, SetReleaseTimeToNow()).Times(1);
- i->RenderToSurfaceTextureBackBuffer();
- EXPECT_CALL(*surface_texture_, WaitForFrameAvailable());
+ auto i = NewImage(kTextureOwner);
+ EXPECT_CALL(*texture_owner_, SetReleaseTimeToNow()).Times(1);
+ i->RenderToTextureOwnerBackBuffer();
+ EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
ASSERT_TRUE(i->RenderToFrontBuffer());
}
TEST_F(CodecImageTest, PromotingTheBackBufferAlwaysSucceeds) {
- auto i = NewImage(kSurfaceTexture);
- i->RenderToSurfaceTextureBackBuffer();
+ auto i = NewImage(kTextureOwner);
+ i->RenderToTextureOwnerBackBuffer();
// Invalidating the codec buffer doesn't matter after it's rendered to the
// back buffer.
wrapper_->TakeCodecSurfacePair();
@@ -229,9 +229,9 @@ TEST_F(CodecImageTest, PromotingTheBackBufferAlwaysSucceeds) {
}
TEST_F(CodecImageTest, FrontBufferRenderingFailsIfBackBufferRenderingFailed) {
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
wrapper_->TakeCodecSurfacePair();
- i->RenderToSurfaceTextureBackBuffer();
+ i->RenderToTextureOwnerBackBuffer();
ASSERT_FALSE(i->RenderToFrontBuffer());
}
@@ -239,8 +239,8 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresTextureBindings) {
GLuint pre_bound_texture = 0;
glGenTextures(1, &pre_bound_texture);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, pre_bound_texture);
- auto i = NewImage(kSurfaceTexture);
- EXPECT_CALL(*surface_texture_, UpdateTexImage());
+ auto i = NewImage(kTextureOwner);
+ EXPECT_CALL(*texture_owner_, UpdateTexImage());
i->RenderToFrontBuffer();
GLint post_bound_texture = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &post_bound_texture);
@@ -257,9 +257,9 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresGLContext) {
context->Initialize(surface.get(), gl::GLContextAttribs());
ASSERT_TRUE(context->MakeCurrent(surface.get()));
- auto i = NewImage(kSurfaceTexture);
+ auto i = NewImage(kTextureOwner);
// Our context should not be current when UpdateTexImage() is called.
- EXPECT_CALL(*surface_texture_, UpdateTexImage()).WillOnce(Invoke([&]() {
+ EXPECT_CALL(*texture_owner_, UpdateTexImage()).WillOnce(Invoke([&]() {
ASSERT_FALSE(context->IsCurrent(surface.get()));
}));
i->RenderToFrontBuffer();
@@ -281,12 +281,12 @@ TEST_F(CodecImageTest, ScheduleOverlayPlaneDoesntSendDuplicateHints) {
EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint1)).Times(1);
EXPECT_CALL(promotion_hint_receiver_, OnPromotionHint(hint2)).Times(1);
i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint1.screen_rect, gfx::RectF(), true);
+ hint1.screen_rect, gfx::RectF(), true, nullptr);
i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint1.screen_rect, gfx::RectF(), true);
+ hint1.screen_rect, gfx::RectF(), true, nullptr);
// Sending a different rectangle should send another hint.
i->ScheduleOverlayPlane(gfx::AcceleratedWidget(), 0, gfx::OverlayTransform(),
- hint2.screen_rect, gfx::RectF(), true);
+ hint2.screen_rect, gfx::RectF(), true, nullptr);
}
} // namespace media
diff --git a/chromium/media/gpu/android/codec_wrapper.cc b/chromium/media/gpu/android/codec_wrapper.cc
index c9df3802c8a..98f9225ad01 100644
--- a/chromium/media/gpu/android/codec_wrapper.cc
+++ b/chromium/media/gpu/android/codec_wrapper.cc
@@ -226,9 +226,10 @@ CodecWrapperImpl::QueueStatus CodecWrapperImpl::QueueInputBuffer(
// Queue a buffer.
const DecryptConfig* decrypt_config = buffer.decrypt_config();
- bool encrypted = decrypt_config && decrypt_config->is_encrypted();
MediaCodecStatus status;
- if (encrypted) {
+ if (decrypt_config) {
+ // TODO(crbug.com/813845): Use encryption scheme settings from
+ // DecryptConfig.
status = codec_->QueueSecureInputBuffer(
input_buffer, buffer.data(), buffer.data_size(),
decrypt_config->key_id(), decrypt_config->iv(),
diff --git a/chromium/media/gpu/android/codec_wrapper_unittest.cc b/chromium/media/gpu/android/codec_wrapper_unittest.cc
index 7ed54f33ff1..d649baacac7 100644
--- a/chromium/media/gpu/android/codec_wrapper_unittest.cc
+++ b/chromium/media/gpu/android/codec_wrapper_unittest.cc
@@ -8,7 +8,6 @@
#include "base/bind.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
-#include "base/message_loop/message_loop.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_task_environment.h"
#include "media/base/android/media_codec_bridge.h"
diff --git a/chromium/media/gpu/android/command_buffer_stub_wrapper.h b/chromium/media/gpu/android/command_buffer_stub_wrapper.h
deleted file mode 100644
index 38bed44062a..00000000000
--- a/chromium/media/gpu/android/command_buffer_stub_wrapper.h
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_COMMAND_BUFFER_STUB_WRAPPER_H_
-#define MEDIA_GPU_ANDROID_COMMAND_BUFFER_STUB_WRAPPER_H_
-
-#include "gpu/ipc/service/command_buffer_stub.h"
-
-namespace media {
-
-// Helpful class to wrap a CommandBufferStub that we can mock out more easily.
-// Mocking out a CommandBufferStub + DecoderContext is quite annoying, since we
-// really need very little.
-// TODO(liberato): consider making this refcounted, so that one injected mock
-// can be re-used as its passed from class to class. In that case, it likely
-// has to keep its own DestructionObserver list, and register itself as a
-// DestructionObserver on the stub.
-// TODO(liberato): once this interface is stable, move this to media/gpu and
-// use it on non-android platforms.
-class CommandBufferStubWrapper {
- public:
- virtual ~CommandBufferStubWrapper() = default;
-
- // Make the stub's context current. Return true on success.
- virtual bool MakeCurrent() = 0;
-
- // Add or remove a destruction observer on the underlying stub.
- virtual void AddDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) = 0;
- virtual void RemoveDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) = 0;
-
- // To support VideoFrameFactoryImpl, we need at least GetTextureManager().
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_COMMAND_BUFFER_STUB_WRAPPER_H_
diff --git a/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.cc b/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.cc
deleted file mode 100644
index 88c43914513..00000000000
--- a/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "command_buffer_stub_wrapper_impl.h"
-
-#include "gpu/ipc/service/command_buffer_stub.h"
-
-namespace media {
-
-CommandBufferStubWrapperImpl::CommandBufferStubWrapperImpl(
- gpu::CommandBufferStub* stub)
- : stub_(stub) {}
-
-bool CommandBufferStubWrapperImpl::MakeCurrent() {
- // Support |!stub_| as a convenience.
- return stub_ && stub_->decoder_context()->MakeCurrent();
-}
-
-void CommandBufferStubWrapperImpl::AddDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) {
- stub_->AddDestructionObserver(observer);
-}
-
-void CommandBufferStubWrapperImpl::RemoveDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) {
- stub_->RemoveDestructionObserver(observer);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.h b/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.h
deleted file mode 100644
index cd209ea0987..00000000000
--- a/chromium/media/gpu/android/command_buffer_stub_wrapper_impl.h
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_COMMAND_BUFFER_STUB_WRAPPER_IMPL_H_
-#define MEDIA_GPU_ANDROID_COMMAND_BUFFER_STUB_WRAPPER_IMPL_H_
-
-#include "media/gpu/android/command_buffer_stub_wrapper.h"
-
-namespace media {
-
-// Implementation that actually talks to a CommandBufferStub
-class CommandBufferStubWrapperImpl : public CommandBufferStubWrapper {
- public:
- explicit CommandBufferStubWrapperImpl(gpu::CommandBufferStub* stub);
- ~CommandBufferStubWrapperImpl() override = default;
-
- // CommandBufferStubWrapper
- bool MakeCurrent() override;
- void AddDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) override;
- void RemoveDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) override;
-
- private:
- gpu::CommandBufferStub* stub_;
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_MEDIA_COMMAND_BUFFER_STUB_WRAPPER_IMPL_H_
diff --git a/chromium/media/gpu/android/fake_codec_allocator.cc b/chromium/media/gpu/android/fake_codec_allocator.cc
index 1c9f5d5ea22..e7b80630f07 100644
--- a/chromium/media/gpu/android/fake_codec_allocator.cc
+++ b/chromium/media/gpu/android/fake_codec_allocator.cc
@@ -30,7 +30,7 @@ void FakeCodecAllocator::StopThread(AVDACodecAllocatorClient* client) {}
std::unique_ptr<MediaCodecBridge> FakeCodecAllocator::CreateMediaCodecSync(
scoped_refptr<CodecConfig> config) {
CopyCodecConfig(config);
- MockCreateMediaCodecSync(most_recent_overlay, most_recent_surface_texture);
+ MockCreateMediaCodecSync(most_recent_overlay, most_recent_texture_owner);
std::unique_ptr<MockMediaCodecBridge> codec;
if (allow_sync_creation) {
@@ -57,14 +57,14 @@ void FakeCodecAllocator::CreateMediaCodecAsync(
client_ = client;
codec_creation_pending_ = true;
- MockCreateMediaCodecAsync(most_recent_overlay, most_recent_surface_texture);
+ MockCreateMediaCodecAsync(most_recent_overlay, most_recent_texture_owner);
}
void FakeCodecAllocator::ReleaseMediaCodec(
std::unique_ptr<MediaCodecBridge> media_codec,
scoped_refptr<AVDASurfaceBundle> surface_bundle) {
MockReleaseMediaCodec(media_codec.get(), surface_bundle->overlay.get(),
- surface_bundle->surface_texture.get());
+ surface_bundle->texture_owner_.get());
}
MockMediaCodecBridge* FakeCodecAllocator::ProvideMockCodecAsync(
@@ -97,7 +97,7 @@ void FakeCodecAllocator::ProvideNullCodecAsync() {
void FakeCodecAllocator::CopyCodecConfig(scoped_refptr<CodecConfig> config) {
// CodecConfig isn't copyable, since it has unique_ptrs and such.
most_recent_overlay = config->surface_bundle->overlay.get();
- most_recent_surface_texture = config->surface_bundle->surface_texture.get();
+ most_recent_texture_owner = config->surface_bundle->texture_owner_.get();
most_recent_config->media_crypto =
config->media_crypto
? std::make_unique<base::android::ScopedJavaGlobalRef<jobject>>(
diff --git a/chromium/media/gpu/android/fake_codec_allocator.h b/chromium/media/gpu/android/fake_codec_allocator.h
index 695b937a6c6..5823a06d74f 100644
--- a/chromium/media/gpu/android/fake_codec_allocator.h
+++ b/chromium/media/gpu/android/fake_codec_allocator.h
@@ -7,6 +7,7 @@
#include <memory>
+#include "base/sequenced_task_runner.h"
#include "media/base/android/mock_media_codec_bridge.h"
#include "media/gpu/android/avda_codec_allocator.h"
#include "media/gpu/android/avda_surface_bundle.h"
@@ -29,18 +30,14 @@ class FakeCodecAllocator : public testing::NiceMock<AVDACodecAllocator> {
// These are called with some parameters of the codec config by our
// implementation of their respective functions. This allows tests to set
// expectations on them.
- MOCK_METHOD2(MockCreateMediaCodecSync,
- void(AndroidOverlay*, SurfaceTextureGLOwner*));
- MOCK_METHOD2(MockCreateMediaCodecAsync,
- void(AndroidOverlay*, SurfaceTextureGLOwner*));
+ MOCK_METHOD2(MockCreateMediaCodecSync, void(AndroidOverlay*, TextureOwner*));
+ MOCK_METHOD2(MockCreateMediaCodecAsync, void(AndroidOverlay*, TextureOwner*));
// Note that this doesn't exactly match the signature, since unique_ptr
// doesn't work. plus, we expand |surface_bundle| a bit to make it more
// convenient to set expectations.
MOCK_METHOD3(MockReleaseMediaCodec,
- void(MediaCodecBridge*,
- AndroidOverlay*,
- SurfaceTextureGLOwner*));
+ void(MediaCodecBridge*, AndroidOverlay*, TextureOwner*));
std::unique_ptr<MediaCodecBridge> CreateMediaCodecSync(
scoped_refptr<CodecConfig> config) override;
@@ -69,8 +66,8 @@ class FakeCodecAllocator : public testing::NiceMock<AVDACodecAllocator> {
// The most recent overlay provided during codec allocation.
AndroidOverlay* most_recent_overlay = nullptr;
- // The most recent surface texture provided during codec allocation.
- SurfaceTextureGLOwner* most_recent_surface_texture = nullptr;
+ // The most recent texture owner provided during codec allocation.
+ TextureOwner* most_recent_texture_owner = nullptr;
// Whether CreateMediaCodecSync() is allowed to succeed.
bool allow_sync_creation = true;
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index 46b45f65492..f7e45e6ef8c 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -21,8 +21,10 @@
#include "media/base/video_codecs.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
#include "media/gpu/android/android_video_surface_chooser.h"
#include "media/gpu/android/avda_codec_allocator.h"
+#include "media/media_buildflags.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#include "media/base/android/extract_sps_and_pps.h"
@@ -74,12 +76,14 @@ bool ConfigSupported(const VideoDecoderConfig& config,
return true;
}
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
case kCodecH264:
return true;
#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
case kCodecHEVC:
return true;
#endif
+#endif
default:
return false;
}
@@ -136,17 +140,16 @@ MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
if (!media_crypto_context_)
return;
- DCHECK(cdm_registration_id_);
-
// Cancel previously registered callback (if any).
media_crypto_context_->SetMediaCryptoReadyCB(
MediaCryptoContext::MediaCryptoReadyCB());
- media_crypto_context_->UnregisterPlayer(cdm_registration_id_);
+ if (cdm_registration_id_)
+ media_crypto_context_->UnregisterPlayer(cdm_registration_id_);
}
void MediaCodecVideoDecoder::Destroy() {
- DVLOG(2) << __func__;
+ DVLOG(1) << __func__;
// Mojo callbacks require that they're run before destruction.
if (reset_cb_)
std::move(reset_cb_).Run();
@@ -164,8 +167,9 @@ void MediaCodecVideoDecoder::Initialize(
const OutputCB& output_cb,
const WaitingForDecryptionKeyCB& /* waiting_for_decryption_key_cb */) {
const bool first_init = !decoder_config_.IsValidConfig();
- DVLOG(2) << (first_init ? "Initializing" : "Reinitializing")
- << " MCVD with config: " << config.AsHumanReadableString();
+ DVLOG(1) << (first_init ? "Initializing" : "Reinitializing")
+ << " MCVD with config: " << config.AsHumanReadableString()
+ << ", cdm_context = " << cdm_context;
InitCB bound_init_cb = BindToCurrentLoop(init_cb);
if (!ConfigSupported(config, device_info_)) {
@@ -181,6 +185,8 @@ void MediaCodecVideoDecoder::Initialize(
}
decoder_config_ = config;
+ surface_chooser_helper_.SetVideoRotation(decoder_config_.video_rotation());
+
output_cb_ = output_cb;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -188,46 +194,35 @@ void MediaCodecVideoDecoder::Initialize(
ExtractSpsAndPps(config.extra_data(), &csd0_, &csd1_);
#endif
- // For encrypted content, defer signalling success until the Cdm is ready.
- if (config.is_encrypted()) {
+ // We only support setting CDM at first initialization. Even if the initial
+ // config is clear, we'll still try to set CDM since we may switch to an
+ // encrypted config later.
+ if (first_init && cdm_context && cdm_context->GetMediaCryptoContext()) {
+ DCHECK(media_crypto_.is_null());
SetCdm(cdm_context, init_cb);
return;
}
+ if (config.is_encrypted() && media_crypto_.is_null()) {
+ DVLOG(1) << "No MediaCrypto to handle encrypted config";
+ bound_init_cb.Run(false);
+ return;
+ }
+
// Do the rest of the initialization lazily on the first decode.
init_cb.Run(true);
}
void MediaCodecVideoDecoder::SetCdm(CdmContext* cdm_context,
const InitCB& init_cb) {
- if (!cdm_context) {
- LOG(ERROR) << "No CDM provided";
- EnterTerminalState(State::kError);
- init_cb.Run(false);
- return;
- }
+ DVLOG(1) << __func__;
+ DCHECK(cdm_context) << "No CDM provided";
+ DCHECK(cdm_context->GetMediaCryptoContext());
media_crypto_context_ = cdm_context->GetMediaCryptoContext();
- if (!media_crypto_context_) {
- LOG(ERROR) << "MediaCryptoContext not supported";
- EnterTerminalState(State::kError);
- init_cb.Run(false);
- return;
- }
// Register CDM callbacks. The callbacks registered will be posted back to
// this thread via BindToCurrentLoop.
-
- // Since |this| holds a reference to the |cdm_|, by the time the CDM is
- // destructed, UnregisterPlayer() must have been called and |this| has been
- // destructed as well. So the |cdm_unset_cb| will never have a chance to be
- // called.
- // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms.
- cdm_registration_id_ = media_crypto_context_->RegisterPlayer(
- media::BindToCurrentLoop(base::Bind(&MediaCodecVideoDecoder::OnKeyAdded,
- weak_factory_.GetWeakPtr())),
- base::DoNothing());
-
media_crypto_context_->SetMediaCryptoReadyCB(media::BindToCurrentLoop(
base::Bind(&MediaCodecVideoDecoder::OnMediaCryptoReady,
weak_factory_.GetWeakPtr(), init_cb)));
@@ -237,22 +232,46 @@ void MediaCodecVideoDecoder::OnMediaCryptoReady(
const InitCB& init_cb,
JavaObjectPtr media_crypto,
bool requires_secure_video_codec) {
- DVLOG(1) << __func__;
+ DVLOG(1) << __func__
+ << ": requires_secure_video_codec = " << requires_secure_video_codec;
DCHECK(state_ == State::kInitializing);
+ DCHECK(media_crypto);
- if (!media_crypto || media_crypto->is_null()) {
- LOG(ERROR) << "MediaCrypto is not available";
- EnterTerminalState(State::kError);
- init_cb.Run(false);
+ if (media_crypto->is_null()) {
+ media_crypto_context_->SetMediaCryptoReadyCB(
+ MediaCryptoContext::MediaCryptoReadyCB());
+ media_crypto_context_ = nullptr;
+
+ if (decoder_config_.is_encrypted()) {
+ LOG(ERROR) << "MediaCrypto is not available";
+ EnterTerminalState(State::kError);
+ init_cb.Run(false);
+ return;
+ }
+
+ // MediaCrypto is not available, but the stream is clear. So we can still
+ // play the current stream. But if we switch to an encrypted stream playback
+ // will fail.
+ init_cb.Run(true);
return;
}
media_crypto_ = *media_crypto;
requires_secure_codec_ = requires_secure_video_codec;
+ // Since |this| holds a reference to the |cdm_|, by the time the CDM is
+ // destructed, UnregisterPlayer() must have been called and |this| has been
+ // destructed as well. So the |cdm_unset_cb| will never have a chance to be
+ // called.
+ // TODO(xhwang): Remove |cdm_unset_cb| after it's not used on all platforms.
+ cdm_registration_id_ = media_crypto_context_->RegisterPlayer(
+ media::BindToCurrentLoop(base::Bind(&MediaCodecVideoDecoder::OnKeyAdded,
+ weak_factory_.GetWeakPtr())),
+ base::DoNothing());
+
// Request a secure surface in all cases. For L3, it's okay if we fall back
- // to SurfaceTexture rather than fail composition. For L1, it's required.
+ // to TextureOwner rather than fail composition. For L1, it's required.
surface_chooser_helper_.SetSecureSurfaceMode(
requires_secure_video_codec
? SurfaceChooserHelper::SecureSurfaceMode::kRequired
@@ -285,13 +304,13 @@ void MediaCodecVideoDecoder::StartLazyInit() {
}
void MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized(
- scoped_refptr<SurfaceTextureGLOwner> surface_texture) {
+ scoped_refptr<TextureOwner> texture_owner) {
DVLOG(2) << __func__;
- if (!surface_texture) {
+ if (!texture_owner) {
EnterTerminalState(State::kError);
return;
}
- surface_texture_bundle_ = new AVDASurfaceBundle(std::move(surface_texture));
+ texture_owner_bundle_ = new AVDASurfaceBundle(std::move(texture_owner));
// Overlays are disabled when |enable_threaded_texture_mailboxes| is true
// (http://crbug.com/582170).
@@ -337,7 +356,7 @@ void MediaCodecVideoDecoder::OnSurfaceChosen(
weak_factory_.GetWeakPtr()));
target_surface_bundle_ = new AVDASurfaceBundle(std::move(overlay));
} else {
- target_surface_bundle_ = surface_texture_bundle_;
+ target_surface_bundle_ = texture_owner_bundle_;
}
// If we were waiting for our first surface during initialization, then
@@ -366,7 +385,7 @@ void MediaCodecVideoDecoder::OnSurfaceDestroyed(AndroidOverlay* overlay) {
// Reset the target bundle if it is the one being destroyed.
if (target_surface_bundle_ &&
target_surface_bundle_->overlay.get() == overlay) {
- target_surface_bundle_ = surface_texture_bundle_;
+ target_surface_bundle_ = texture_owner_bundle_;
}
// Transition the codec away from the overlay if necessary.
@@ -445,7 +464,7 @@ void MediaCodecVideoDecoder::OnCodecConfigured(
void MediaCodecVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
const DecodeCB& decode_cb) {
- DVLOG(2) << __func__ << ": " << buffer->AsHumanReadableString();
+ DVLOG(3) << __func__ << ": " << buffer->AsHumanReadableString();
if (state_ == State::kError) {
decode_cb.Run(DecodeStatus::DECODE_ERROR);
return;
@@ -554,7 +573,10 @@ bool MediaCodecVideoDecoder::QueueInput() {
PendingDecode& pending_decode = pending_decodes_.front();
auto status = codec_->QueueInputBuffer(*pending_decode.buffer,
decoder_config_.encryption_scheme());
- DVLOG((status == CodecWrapper::QueueStatus::kTryAgainLater ? 3 : 2))
+ DVLOG((status == CodecWrapper::QueueStatus::kTryAgainLater ||
+ status == CodecWrapper::QueueStatus::kOk
+ ? 3
+ : 2))
<< "QueueInput(" << pending_decode.buffer->AsHumanReadableString()
<< ") status=" << static_cast<int>(status);
@@ -618,7 +640,7 @@ bool MediaCodecVideoDecoder::DequeueOutput() {
EnterTerminalState(State::kError);
return false;
}
- DVLOG(2) << "DequeueOutputBuffer(): pts="
+ DVLOG(3) << "DequeueOutputBuffer(): pts="
<< (eos ? "EOS"
: std::to_string(presentation_time.InMilliseconds()));
@@ -648,9 +670,11 @@ bool MediaCodecVideoDecoder::DequeueOutput() {
SurfaceChooserHelper::FrameInformation::FRAME_INFORMATION_MAX) +
1); // PRESUBMIT_IGNORE_UMA_MAX
+ gfx::Rect visible_rect(output_buffer->size());
video_frame_factory_->CreateVideoFrame(
std::move(output_buffer), presentation_time,
- decoder_config_.natural_size(), CreatePromotionHintCB(),
+ GetNaturalSize(visible_rect, decoder_config_.GetPixelAspectRatio()),
+ CreatePromotionHintCB(),
base::Bind(&MediaCodecVideoDecoder::ForwardVideoFrame,
weak_factory_.GetWeakPtr(), reset_generation_));
return true;
@@ -744,7 +768,7 @@ void MediaCodecVideoDecoder::EnterTerminalState(State state) {
pump_codec_timer_.Stop();
ReleaseCodec();
target_surface_bundle_ = nullptr;
- surface_texture_bundle_ = nullptr;
+ texture_owner_bundle_ = nullptr;
if (state == State::kError)
CancelPendingDecodes(DecodeStatus::DECODE_ERROR);
if (drain_type_)
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index adbfa1de71f..1bdaab20417 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -44,7 +44,7 @@ struct PendingDecode {
// first Decode() (see StartLazyInit()). We do this because there are cases in
// our media pipeline where we'll initialize a decoder but never use it
// (e.g., MSE with no media data appended), and if we eagerly allocator decoder
-// resources, like MediaCodecs and SurfaceTextures, we will block other
+// resources, like MediaCodecs and TextureOwners, we will block other
// playbacks that need them.
// TODO: Lazy initialization should be handled at a higher layer of the media
// stack for both simplicity and cross platform support.
@@ -120,7 +120,7 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder
// Finishes initialization.
void StartLazyInit();
void OnVideoFrameFactoryInitialized(
- scoped_refptr<SurfaceTextureGLOwner> surface_texture);
+ scoped_refptr<TextureOwner> texture_owner);
// Resets |waiting_for_key_| to false, indicating that MediaCodec might now
// accept buffers.
@@ -242,9 +242,9 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder
// non-null from the first surface choice.
scoped_refptr<AVDASurfaceBundle> target_surface_bundle_;
- // A SurfaceTexture bundle that is kept for the lifetime of MCVD so that if we
+ // A TextureOwner bundle that is kept for the lifetime of MCVD so that if we
// have to synchronously switch surfaces we always have one available.
- scoped_refptr<AVDASurfaceBundle> surface_texture_bundle_;
+ scoped_refptr<AVDASurfaceBundle> texture_owner_bundle_;
// A callback for requesting overlay info updates.
RequestOverlayInfoCB request_overlay_info_cb_;
@@ -267,7 +267,7 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder
// Most recently cached frame information, so that we can dispatch it without
// recomputing it on every frame. It changes very rarely.
SurfaceChooserHelper::FrameInformation cached_frame_information_ =
- SurfaceChooserHelper::FrameInformation::SURFACETEXTURE_INSECURE;
+ SurfaceChooserHelper::FrameInformation::NON_OVERLAY_INSECURE;
// CDM related stuff.
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 30325328b36..a19ca8163f7 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -3,8 +3,10 @@
// found in the LICENSE file.
#include "media/gpu/android/media_codec_video_decoder.h"
+
#include "base/android/jni_android.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_task_environment.h"
@@ -21,7 +23,7 @@
#include "media/gpu/android/fake_codec_allocator.h"
#include "media/gpu/android/mock_android_video_surface_chooser.h"
#include "media/gpu/android/mock_device_info.h"
-#include "media/gpu/android/mock_surface_texture_gl_owner.h"
+#include "media/gpu/android/mock_texture_owner.h"
#include "media/gpu/android/video_frame_factory.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -61,7 +63,7 @@ class MockVideoFrameFactory : public VideoFrameFactory {
MOCK_METHOD6(
MockCreateVideoFrame,
void(CodecOutputBuffer* raw_output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
@@ -74,10 +76,10 @@ class MockVideoFrameFactory : public VideoFrameFactory {
scoped_refptr<AVDASurfaceBundle> surface_bundle) override {
MockSetSurfaceBundle(surface_bundle);
if (!surface_bundle) {
- surface_texture_ = nullptr;
+ texture_owner_ = nullptr;
} else {
- surface_texture_ =
- surface_bundle->overlay ? nullptr : surface_bundle->surface_texture;
+ texture_owner_ =
+ surface_bundle->overlay ? nullptr : surface_bundle->texture_owner_;
}
}
@@ -87,7 +89,7 @@ class MockVideoFrameFactory : public VideoFrameFactory {
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
VideoDecoder::OutputCB output_cb) override {
- MockCreateVideoFrame(output_buffer.get(), surface_texture_, timestamp,
+ MockCreateVideoFrame(output_buffer.get(), texture_owner_, timestamp,
natural_size, promotion_hint_cb, output_cb);
last_output_buffer_ = std::move(output_buffer);
output_cb.Run(VideoFrame::CreateBlackFrame(gfx::Size(10, 10)));
@@ -99,13 +101,13 @@ class MockVideoFrameFactory : public VideoFrameFactory {
}
std::unique_ptr<CodecOutputBuffer> last_output_buffer_;
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_;
+ scoped_refptr<TextureOwner> texture_owner_;
base::OnceClosure last_closure_;
};
-class MediaCodecVideoDecoderTest : public testing::Test {
+class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
public:
- MediaCodecVideoDecoderTest() = default;
+ MediaCodecVideoDecoderTest() : codec_(GetParam()) {}
void SetUp() override {
uint8_t data = 0;
@@ -126,19 +128,18 @@ class MediaCodecVideoDecoderTest : public testing::Test {
std::make_unique<NiceMock<MockAndroidVideoSurfaceChooser>>();
surface_chooser_ = surface_chooser.get();
- auto surface_texture =
- base::MakeRefCounted<NiceMock<MockSurfaceTextureGLOwner>>(0, nullptr,
- nullptr);
- surface_texture_ = surface_texture.get();
+ auto texture_owner =
+ base::MakeRefCounted<NiceMock<MockTextureOwner>>(0, nullptr, nullptr);
+ texture_owner_ = texture_owner.get();
auto video_frame_factory =
std::make_unique<NiceMock<MockVideoFrameFactory>>();
video_frame_factory_ = video_frame_factory.get();
- // Set up VFF to pass |surface_texture_| via its InitCb.
+ // Set up VFF to pass |texture_owner_| via its InitCb.
const bool want_promotion_hint =
device_info_->IsSetOutputSurfaceSupported();
ON_CALL(*video_frame_factory_, Initialize(want_promotion_hint, _))
- .WillByDefault(RunCallback<1>(surface_texture));
+ .WillByDefault(RunCallback<1>(texture_owner));
auto* observable_mcvd = new DestructionObservableMCVD(
gpu_preferences_, device_info_.get(), codec_allocator_.get(),
@@ -154,8 +155,9 @@ class MediaCodecVideoDecoderTest : public testing::Test {
destruction_observer_->ExpectDestruction();
}
- void CreateCdm(bool require_secure_video_decoder) {
- cdm_ = std::make_unique<MockMediaCryptoContext>();
+ void CreateCdm(bool has_media_crypto_context,
+ bool require_secure_video_decoder) {
+ cdm_ = std::make_unique<MockMediaCryptoContext>(has_media_crypto_context);
require_secure_video_decoder_ = require_secure_video_decoder;
// We need to send an object as the media crypto, but MCVD shouldn't
@@ -167,21 +169,20 @@ class MediaCodecVideoDecoderTest : public testing::Test {
// Just call Initialize(). MCVD will be waiting for a call to Decode() before
// continuining initialization.
- bool Initialize(
- VideoDecoderConfig config = TestVideoConfig::Large(kCodecH264)) {
+ bool Initialize(VideoDecoderConfig config) {
if (!mcvd_)
CreateMcvd();
bool result = false;
auto init_cb = [](bool* result_out, bool result) { *result_out = result; };
mcvd_->Initialize(config, false, cdm_.get(), base::Bind(init_cb, &result),
base::BindRepeating(&OutputCb, &most_recent_frame_),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
- if (config.is_encrypted() && cdm_) {
- // If the output is encrypted, then we expect that MCVD will be waiting
- // for the media crypto object.
- // TODO(liberato): why does CreateJavaObjectPtr() not link?
+ // If there is a CDM available, then we expect that MCVD will be waiting
+ // for the media crypto object.
+ // TODO(liberato): why does CreateJavaObjectPtr() not link?
+ if (cdm_ && cdm_->media_crypto_ready_cb) {
cdm_->media_crypto_ready_cb.Run(
std::make_unique<base::android::ScopedJavaGlobalRef<jobject>>(
media_crypto_),
@@ -195,7 +196,7 @@ class MediaCodecVideoDecoderTest : public testing::Test {
// Call Initialize() and Decode() to start lazy init. MCVD will be waiting for
// a codec and have one decode pending.
MockAndroidOverlay* InitializeWithOverlay_OneDecodePending(
- VideoDecoderConfig config = TestVideoConfig::Large(kCodecH264)) {
+ VideoDecoderConfig config) {
Initialize(config);
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
OverlayInfo info;
@@ -209,19 +210,18 @@ class MediaCodecVideoDecoderTest : public testing::Test {
// Call Initialize() and Decode() to start lazy init. MCVD will be waiting for
// a codec and have one decode pending.
- void InitializeWithSurfaceTexture_OneDecodePending(
- VideoDecoderConfig config = TestVideoConfig::Large(kCodecH264)) {
+ void InitializeWithTextureOwner_OneDecodePending(VideoDecoderConfig config) {
Initialize(config);
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
provide_overlay_info_cb_.Run(OverlayInfo());
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
}
// Fully initializes MCVD and returns the codec it's configured with. MCVD
// will have one decode pending.
MockMediaCodecBridge* InitializeFully_OneDecodePending(
- VideoDecoderConfig config = TestVideoConfig::Large(kCodecH264)) {
- InitializeWithSurfaceTexture_OneDecodePending(config);
+ VideoDecoderConfig config) {
+ InitializeWithTextureOwner_OneDecodePending(config);
return codec_allocator_->ProvideMockCodecAsync();
}
@@ -238,12 +238,13 @@ class MediaCodecVideoDecoderTest : public testing::Test {
}
protected:
+ const VideoCodec codec_;
base::test::ScopedTaskEnvironment scoped_task_environment_;
scoped_refptr<DecoderBuffer> fake_decoder_buffer_;
std::unique_ptr<MockDeviceInfo> device_info_;
std::unique_ptr<FakeCodecAllocator> codec_allocator_;
MockAndroidVideoSurfaceChooser* surface_chooser_;
- MockSurfaceTextureGLOwner* surface_texture_;
+ MockTextureOwner* texture_owner_;
MockVideoFrameFactory* video_frame_factory_;
NiceMock<base::MockCallback<VideoDecoder::DecodeCB>> decode_cb_;
std::unique_ptr<DestructionObserver> destruction_observer_;
@@ -264,83 +265,86 @@ class MediaCodecVideoDecoderTest : public testing::Test {
std::unique_ptr<MockMediaCryptoContext> cdm_;
};
-TEST_F(MediaCodecVideoDecoderTest, UnknownCodecIsRejected) {
+// Tests which only work for a single codec.
+class MediaCodecVideoDecoderH264Test : public MediaCodecVideoDecoderTest {};
+class MediaCodecVideoDecoderVp8Test : public MediaCodecVideoDecoderTest {};
+
+TEST_P(MediaCodecVideoDecoderTest, UnknownCodecIsRejected) {
ASSERT_FALSE(Initialize(TestVideoConfig::Invalid()));
}
-TEST_F(MediaCodecVideoDecoderTest, H264IsSupported) {
- // H264 is always supported by MCVD.
+TEST_P(MediaCodecVideoDecoderH264Test, H264IsSupported) {
ASSERT_TRUE(Initialize(TestVideoConfig::NormalH264()));
}
-TEST_F(MediaCodecVideoDecoderTest, SmallVp8IsRejected) {
+TEST_P(MediaCodecVideoDecoderVp8Test, SmallVp8IsRejected) {
ASSERT_FALSE(Initialize(TestVideoConfig::Normal()));
}
-TEST_F(MediaCodecVideoDecoderTest, InitializeDoesntInitSurfaceOrCodec) {
+TEST_P(MediaCodecVideoDecoderTest, InitializeDoesntInitSurfaceOrCodec) {
CreateMcvd();
EXPECT_CALL(*video_frame_factory_, Initialize(_, _)).Times(0);
EXPECT_CALL(*surface_chooser_, MockUpdateState()).Times(0);
EXPECT_CALL(*codec_allocator_, MockCreateMediaCodecAsync(_, _)).Times(0);
- Initialize();
+ Initialize(TestVideoConfig::Large(codec_));
}
-TEST_F(MediaCodecVideoDecoderTest, FirstDecodeTriggersFrameFactoryInit) {
- Initialize();
+TEST_P(MediaCodecVideoDecoderTest, FirstDecodeTriggersFrameFactoryInit) {
+ Initialize(TestVideoConfig::Large(codec_));
EXPECT_CALL(*video_frame_factory_, Initialize(_, _));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
FirstDecodeTriggersOverlayInfoRequestIfSupported) {
- Initialize();
+ Initialize(TestVideoConfig::Large(codec_));
// Requesting overlay info sets this cb.
ASSERT_FALSE(provide_overlay_info_cb_);
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
ASSERT_TRUE(provide_overlay_info_cb_);
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
OverlayInfoIsNotRequestedIfOverlaysNotSupported) {
- Initialize();
+ Initialize(TestVideoConfig::Large(codec_));
ON_CALL(*device_info_, SupportsOverlaySurfaces())
.WillByDefault(Return(false));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
ASSERT_FALSE(provide_overlay_info_cb_);
}
-TEST_F(MediaCodecVideoDecoderTest, RestartForOverlayTransitionsFlagIsCorrect) {
+TEST_P(MediaCodecVideoDecoderTest, RestartForOverlayTransitionsFlagIsCorrect) {
ON_CALL(*device_info_, IsSetOutputSurfaceSupported())
.WillByDefault(Return(true));
- Initialize();
+ Initialize(TestVideoConfig::Large(codec_));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
ASSERT_FALSE(restart_for_transitions_);
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
OverlayInfoIsNotRequestedIfThreadedTextureMailboxesEnabled) {
gpu_preferences_.enable_threaded_texture_mailboxes = true;
- Initialize();
+ Initialize(TestVideoConfig::Large(codec_));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
ASSERT_FALSE(provide_overlay_info_cb_);
}
-TEST_F(MediaCodecVideoDecoderTest, OverlayInfoDuringInitUpdatesSurfaceChooser) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, OverlayInfoDuringInitUpdatesSurfaceChooser) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
EXPECT_CALL(*surface_chooser_, MockUpdateState());
provide_overlay_info_cb_.Run(OverlayInfo());
}
-TEST_F(MediaCodecVideoDecoderTest, CodecIsCreatedAfterSurfaceChosen) {
- Initialize();
+TEST_P(MediaCodecVideoDecoderTest, CodecIsCreatedAfterSurfaceChosen) {
+ Initialize(TestVideoConfig::Large(codec_));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
provide_overlay_info_cb_.Run(OverlayInfo());
EXPECT_CALL(*codec_allocator_, MockCreateMediaCodecAsync(_, NotNull()));
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
}
-TEST_F(MediaCodecVideoDecoderTest, FrameFactoryInitFailureIsAnError) {
- Initialize();
+TEST_P(MediaCodecVideoDecoderTest, FrameFactoryInitFailureIsAnError) {
+ Initialize(TestVideoConfig::Large(codec_));
ON_CALL(*video_frame_factory_, Initialize(_, _))
.WillByDefault(RunCallback<1>(nullptr));
EXPECT_CALL(decode_cb_, Run(DecodeStatus::DECODE_ERROR)).Times(1);
@@ -348,24 +352,26 @@ TEST_F(MediaCodecVideoDecoderTest, FrameFactoryInitFailureIsAnError) {
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
}
-TEST_F(MediaCodecVideoDecoderTest, CodecCreationFailureIsAnError) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, CodecCreationFailureIsAnError) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
EXPECT_CALL(decode_cb_, Run(DecodeStatus::DECODE_ERROR)).Times(2);
// Failing to create a codec should put MCVD into an error state.
codec_allocator_->ProvideNullCodecAsync();
}
-TEST_F(MediaCodecVideoDecoderTest, CodecFailuresAreAnError) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, CodecFailuresAreAnError) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
EXPECT_CALL(*codec, DequeueInputBuffer(_, _))
.WillOnce(Return(MEDIA_CODEC_ERROR));
EXPECT_CALL(decode_cb_, Run(DecodeStatus::DECODE_ERROR));
PumpCodec();
}
-TEST_F(MediaCodecVideoDecoderTest, AfterInitCompletesTheCodecIsPolled) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, AfterInitCompletesTheCodecIsPolled) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Run a RunLoop until the first time the codec is polled for an available
// input buffer.
base::RunLoop loop;
@@ -377,13 +383,14 @@ TEST_F(MediaCodecVideoDecoderTest, AfterInitCompletesTheCodecIsPolled) {
loop.Run();
}
-TEST_F(MediaCodecVideoDecoderTest, CodecIsReleasedOnDestruction) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, CodecIsReleasedOnDestruction) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
EXPECT_CALL(*codec_allocator_, MockReleaseMediaCodec(codec, _, _));
}
-TEST_F(MediaCodecVideoDecoderTest, SurfaceChooserIsUpdatedOnOverlayChanges) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, SurfaceChooserIsUpdatedOnOverlayChanges) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
EXPECT_CALL(*surface_chooser_, MockReplaceOverlayFactory(_)).Times(2);
OverlayInfo info;
@@ -395,8 +402,8 @@ TEST_F(MediaCodecVideoDecoderTest, SurfaceChooserIsUpdatedOnOverlayChanges) {
ASSERT_TRUE(surface_chooser_->factory_);
}
-TEST_F(MediaCodecVideoDecoderTest, OverlayInfoUpdatesAreIgnoredInStateError) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, OverlayInfoUpdatesAreIgnoredInStateError) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
// Enter the error state.
codec_allocator_->ProvideNullCodecAsync();
@@ -406,8 +413,8 @@ TEST_F(MediaCodecVideoDecoderTest, OverlayInfoUpdatesAreIgnoredInStateError) {
provide_overlay_info_cb_.Run(info);
}
-TEST_F(MediaCodecVideoDecoderTest, DuplicateOverlayInfoUpdatesAreIgnored) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, DuplicateOverlayInfoUpdatesAreIgnored) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
// The second overlay info update should be ignored.
EXPECT_CALL(*surface_chooser_, MockReplaceOverlayFactory(_)).Times(1);
@@ -417,19 +424,21 @@ TEST_F(MediaCodecVideoDecoderTest, DuplicateOverlayInfoUpdatesAreIgnored) {
provide_overlay_info_cb_.Run(info);
}
-TEST_F(MediaCodecVideoDecoderTest, CodecIsCreatedWithChosenOverlay) {
+TEST_P(MediaCodecVideoDecoderTest, CodecIsCreatedWithChosenOverlay) {
AndroidOverlay* overlay_passed_to_codec = nullptr;
EXPECT_CALL(*codec_allocator_, MockCreateMediaCodecAsync(_, _))
.WillOnce(SaveArg<0>(&overlay_passed_to_codec));
- auto* overlay = InitializeWithOverlay_OneDecodePending();
+ auto* overlay =
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
DCHECK_EQ(overlay, overlay_passed_to_codec);
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
CodecCreationWeakPtrIsInvalidatedBySurfaceDestroyed) {
ON_CALL(*device_info_, IsSetOutputSurfaceSupported())
.WillByDefault(Return(false));
- auto* overlay = InitializeWithOverlay_OneDecodePending();
+ auto* overlay =
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
overlay->OnSurfaceDestroyed();
// MCVD should invalidate its CodecAllocatorClient WeakPtr so that it doesn't
@@ -438,8 +447,9 @@ TEST_F(MediaCodecVideoDecoderTest,
ASSERT_FALSE(codec_allocator_->ProvideMockCodecAsync());
}
-TEST_F(MediaCodecVideoDecoderTest, SurfaceChangedWhileCodecCreationPending) {
- auto* overlay = InitializeWithOverlay_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, SurfaceChangedWhileCodecCreationPending) {
+ auto* overlay =
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
overlay->OnSurfaceDestroyed();
auto codec = std::make_unique<NiceMock<MockMediaCodecBridge>>();
@@ -449,8 +459,9 @@ TEST_F(MediaCodecVideoDecoderTest, SurfaceChangedWhileCodecCreationPending) {
codec_allocator_->ProvideMockCodecAsync(std::move(codec));
}
-TEST_F(MediaCodecVideoDecoderTest, SurfaceDestroyedDoesSyncSurfaceTransition) {
- auto* overlay = InitializeWithOverlay_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, SurfaceDestroyedDoesSyncSurfaceTransition) {
+ auto* overlay =
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
// MCVD must synchronously switch the codec's surface (to surface
@@ -461,11 +472,12 @@ TEST_F(MediaCodecVideoDecoderTest, SurfaceDestroyedDoesSyncSurfaceTransition) {
overlay->OnSurfaceDestroyed();
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
SurfaceDestroyedReleasesCodecIfSetSurfaceIsNotSupported) {
ON_CALL(*device_info_, IsSetOutputSurfaceSupported())
.WillByDefault(Return(false));
- auto* overlay = InitializeWithOverlay_OneDecodePending();
+ auto* overlay =
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
// MCVD must synchronously release the codec.
@@ -476,22 +488,22 @@ TEST_F(MediaCodecVideoDecoderTest,
testing::Mock::VerifyAndClearExpectations(codec_allocator_.get());
}
-TEST_F(MediaCodecVideoDecoderTest, PumpCodecPerformsPendingSurfaceTransitions) {
- InitializeWithOverlay_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, PumpCodecPerformsPendingSurfaceTransitions) {
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
// Set a pending surface transition and then call PumpCodec().
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
EXPECT_CALL(*codec, SetSurface(_)).WillOnce(Return(true));
PumpCodec();
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
SetSurfaceFailureReleasesTheCodecAndSignalsError) {
- InitializeWithOverlay_OneDecodePending();
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
EXPECT_CALL(*codec, SetSurface(_)).WillOnce(Return(false));
EXPECT_CALL(decode_cb_, Run(DecodeStatus::DECODE_ERROR)).Times(2);
EXPECT_CALL(*codec_allocator_, MockReleaseMediaCodec(codec, NotNull(), _));
@@ -500,51 +512,53 @@ TEST_F(MediaCodecVideoDecoderTest,
testing::Mock::VerifyAndClearExpectations(codec_allocator_.get());
}
-TEST_F(MediaCodecVideoDecoderTest, SurfaceTransitionsCanBeCanceled) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, SurfaceTransitionsCanBeCanceled) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
- // Set a pending transition to an overlay, and then back to a surface texture.
+ // Set a pending transition to an overlay, and then back to a texture owner.
// They should cancel each other out and leave the codec as-is.
EXPECT_CALL(*codec, SetSurface(_)).Times(0);
auto overlay = std::make_unique<MockAndroidOverlay>();
auto observer = overlay->CreateDestructionObserver();
surface_chooser_->ProvideOverlay(std::move(overlay));
- // Switching back to surface texture should delete the pending overlay.
+ // Switching back to texture owner should delete the pending overlay.
observer->ExpectDestruction();
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
observer.reset();
// Verify that Decode() does not transition the surface
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
}
-TEST_F(MediaCodecVideoDecoderTest, TransitionToSameSurfaceIsIgnored) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, TransitionToSameSurfaceIsIgnored) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
auto* codec = codec_allocator_->ProvideMockCodecAsync();
EXPECT_CALL(*codec, SetSurface(_)).Times(0);
- surface_chooser_->ProvideSurfaceTexture();
+ surface_chooser_->ProvideTextureOwner();
mcvd_->Decode(fake_decoder_buffer_, decode_cb_.Get());
}
-TEST_F(MediaCodecVideoDecoderTest,
+TEST_P(MediaCodecVideoDecoderTest,
ResetBeforeCodecInitializedSucceedsImmediately) {
- InitializeWithSurfaceTexture_OneDecodePending();
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
base::MockCallback<base::Closure> reset_cb;
EXPECT_CALL(reset_cb, Run());
mcvd_->Reset(reset_cb.Get());
}
-TEST_F(MediaCodecVideoDecoderTest, ResetAbortsPendingDecodes) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, ResetAbortsPendingDecodes) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
EXPECT_CALL(decode_cb_, Run(DecodeStatus::ABORTED));
mcvd_->Reset(base::DoNothing());
}
-TEST_F(MediaCodecVideoDecoderTest, ResetAbortsPendingEosDecode) {
+// TODO(liberato): Why does this test only work for H264?
+TEST_P(MediaCodecVideoDecoderH264Test, ResetAbortsPendingEosDecode) {
// EOS is treated differently by MCVD. This verifies that it's also aborted.
- auto* codec = InitializeFully_OneDecodePending();
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
base::MockCallback<VideoDecoder::DecodeCB> eos_decode_cb;
mcvd_->Decode(DecoderBuffer::CreateEOSBuffer(), eos_decode_cb.Get());
@@ -558,8 +572,9 @@ TEST_F(MediaCodecVideoDecoderTest, ResetAbortsPendingEosDecode) {
mcvd_->Reset(base::DoNothing());
}
-TEST_F(MediaCodecVideoDecoderTest, ResetDoesNotFlushAnAlreadyFlushedCodec) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, ResetDoesNotFlushAnAlreadyFlushedCodec) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// The codec is still in the flushed state so Reset() doesn't need to flush.
EXPECT_CALL(*codec, Flush()).Times(0);
@@ -568,9 +583,9 @@ TEST_F(MediaCodecVideoDecoderTest, ResetDoesNotFlushAnAlreadyFlushedCodec) {
mcvd_->Reset(reset_cb.Get());
}
-TEST_F(MediaCodecVideoDecoderTest, ResetDrainsVP8CodecsBeforeFlushing) {
+TEST_P(MediaCodecVideoDecoderVp8Test, ResetDrainsVP8CodecsBeforeFlushing) {
auto* codec =
- InitializeFully_OneDecodePending(TestVideoConfig::Large(kCodecVP8));
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Accept the first decode to transition out of the flushed state.
codec->AcceptOneInput();
PumpCodec();
@@ -594,8 +609,9 @@ TEST_F(MediaCodecVideoDecoderTest, ResetDrainsVP8CodecsBeforeFlushing) {
testing::Mock::VerifyAndClearExpectations(&reset_cb);
}
-TEST_F(MediaCodecVideoDecoderTest, ResetDoesNotDrainNonVp8Codecs) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderVp8Test, ResetDoesNotDrainNonVp8Codecs) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Accept the first decode to transition out of the flushed state.
codec->AcceptOneInput();
PumpCodec();
@@ -608,9 +624,9 @@ TEST_F(MediaCodecVideoDecoderTest, ResetDoesNotDrainNonVp8Codecs) {
mcvd_->Reset(reset_cb.Get());
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownCompletesPendingReset) {
+TEST_P(MediaCodecVideoDecoderVp8Test, TeardownCompletesPendingReset) {
auto* codec =
- InitializeFully_OneDecodePending(TestVideoConfig::Large(kCodecVP8));
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Accept the first decode to transition out of the flushed state.
codec->AcceptOneInput();
@@ -627,8 +643,9 @@ TEST_F(MediaCodecVideoDecoderTest, TeardownCompletesPendingReset) {
PumpCodec();
}
-TEST_F(MediaCodecVideoDecoderTest, CodecFlushIsDeferredAfterDraining) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, CodecFlushIsDeferredAfterDraining) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
mcvd_->Decode(DecoderBuffer::CreateEOSBuffer(), decode_cb_.Get());
// Produce one output that VFF will hold onto.
@@ -653,8 +670,9 @@ TEST_F(MediaCodecVideoDecoderTest, CodecFlushIsDeferredAfterDraining) {
PumpCodec();
}
-TEST_F(MediaCodecVideoDecoderTest, EosDecodeCbIsRunAfterEosIsDequeued) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, EosDecodeCbIsRunAfterEosIsDequeued) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
codec->AcceptOneInput();
PumpCodec();
@@ -674,13 +692,13 @@ TEST_F(MediaCodecVideoDecoderTest, EosDecodeCbIsRunAfterEosIsDequeued) {
std::move(video_frame_factory_->last_closure_).Run();
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownBeforeInitWorks) {
+TEST_P(MediaCodecVideoDecoderTest, TeardownBeforeInitWorks) {
// Since we assert that MCVD is destructed by default, this test verifies that
// MCVD is destructed safely before Initialize().
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownInvalidatesCodecCreationWeakPtr) {
- InitializeWithSurfaceTexture_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, TeardownInvalidatesCodecCreationWeakPtr) {
+ InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
destruction_observer_->DoNotAllowDestruction();
mcvd_.reset();
// DeleteSoon() is now pending. Ensure it's safe if the codec creation
@@ -689,14 +707,15 @@ TEST_F(MediaCodecVideoDecoderTest, TeardownInvalidatesCodecCreationWeakPtr) {
destruction_observer_->ExpectDestruction();
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownDoesNotDrainFlushedCodecs) {
- InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderTest, TeardownDoesNotDrainFlushedCodecs) {
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Since we assert that MCVD is destructed by default, this test verifies that
// MCVD is destructed without requiring the codec to output an EOS buffer.
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownDoesNotDrainNonVp8Codecs) {
- auto* codec = InitializeFully_OneDecodePending();
+TEST_P(MediaCodecVideoDecoderVp8Test, TeardownDoesNotDrainNonVp8Codecs) {
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Accept the first decode to transition out of the flushed state.
codec->AcceptOneInput();
PumpCodec();
@@ -704,9 +723,10 @@ TEST_F(MediaCodecVideoDecoderTest, TeardownDoesNotDrainNonVp8Codecs) {
// MCVD is destructed without requiring the codec to output an EOS buffer.
}
-TEST_F(MediaCodecVideoDecoderTest, TeardownDrainsVp8CodecsBeforeDestruction) {
+TEST_P(MediaCodecVideoDecoderVp8Test,
+ TeardownDrainsVp8CodecsBeforeDestruction) {
auto* codec =
- InitializeFully_OneDecodePending(TestVideoConfig::Large(kCodecVP8));
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Accept the first decode to transition out of the flushed state.
codec->AcceptOneInput();
PumpCodec();
@@ -725,12 +745,12 @@ TEST_F(MediaCodecVideoDecoderTest, TeardownDrainsVp8CodecsBeforeDestruction) {
base::RunLoop().RunUntilIdle();
}
-TEST_F(MediaCodecVideoDecoderTest, CdmInitializationWorksForL3) {
+TEST_P(MediaCodecVideoDecoderTest, CdmInitializationWorksForL3) {
// Make sure that MCVD uses the cdm, and sends it along to the codec.
- CreateCdm(false);
+ CreateCdm(true, false);
EXPECT_CALL(*cdm_, RegisterPlayer(_, _));
InitializeWithOverlay_OneDecodePending(
- TestVideoConfig::NormalEncrypted(kCodecH264));
+ TestVideoConfig::NormalEncrypted(codec_));
ASSERT_TRUE(!!cdm_->new_key_cb);
ASSERT_TRUE(!!cdm_->cdm_unset_cb);
ASSERT_TRUE(!!cdm_->media_crypto_ready_cb);
@@ -744,12 +764,12 @@ TEST_F(MediaCodecVideoDecoderTest, CdmInitializationWorksForL3) {
EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
}
-TEST_F(MediaCodecVideoDecoderTest, CdmInitializationWorksForL1) {
+TEST_P(MediaCodecVideoDecoderTest, CdmInitializationWorksForL1) {
// Make sure that MCVD uses the cdm, and sends it along to the codec.
- CreateCdm(true);
+ CreateCdm(true, true);
EXPECT_CALL(*cdm_, RegisterPlayer(_, _));
InitializeWithOverlay_OneDecodePending(
- TestVideoConfig::NormalEncrypted(kCodecH264));
+ TestVideoConfig::NormalEncrypted(codec_));
ASSERT_TRUE(!!cdm_->new_key_cb);
ASSERT_TRUE(!!cdm_->cdm_unset_cb);
ASSERT_TRUE(!!cdm_->media_crypto_ready_cb);
@@ -762,37 +782,62 @@ TEST_F(MediaCodecVideoDecoderTest, CdmInitializationWorksForL1) {
EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
}
-TEST_F(MediaCodecVideoDecoderTest, CdmIsIgnoredIfNotEncrypted) {
- CreateCdm(true);
- // It should not register or unregister.
- EXPECT_CALL(*cdm_, RegisterPlayer(_, _)).Times(0);
- EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId))
- .Times(0);
- ASSERT_TRUE(Initialize(TestVideoConfig::NormalH264()));
- ASSERT_TRUE(!cdm_->new_key_cb);
- ASSERT_TRUE(!cdm_->cdm_unset_cb);
- ASSERT_TRUE(!cdm_->media_crypto_ready_cb);
+// TODO(liberato): Why does this test only work for H264?
+TEST_P(MediaCodecVideoDecoderH264Test, CdmIsSetEvenForClearStream) {
+ // Make sure that MCVD uses the cdm, and sends it along to the codec.
+ CreateCdm(true, false);
+ EXPECT_CALL(*cdm_, RegisterPlayer(_, _));
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Normal(codec_));
+ ASSERT_TRUE(!!cdm_->new_key_cb);
+ ASSERT_TRUE(!!cdm_->cdm_unset_cb);
+ ASSERT_TRUE(!!cdm_->media_crypto_ready_cb);
+ ASSERT_EQ(surface_chooser_->current_state_.is_secure, true);
+ ASSERT_EQ(surface_chooser_->current_state_.is_required, false);
+ ASSERT_FALSE(codec_allocator_->most_recent_config->requires_secure_codec);
+ // We can't check for equality safely, but verify that something was provided.
+ ASSERT_TRUE(codec_allocator_->most_recent_config->media_crypto->obj());
+
+ // When |mcvd_| is destroyed, expect that it will unregister itself.
+ EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
+}
+
+TEST_P(MediaCodecVideoDecoderTest, NoMediaCryptoContext_ClearStream) {
+ // Make sure that MCVD initializes for clear stream when MediaCryptoContext
+ // is not available.
+ CreateCdm(false, false);
+ InitializeWithOverlay_OneDecodePending(TestVideoConfig::Normal(codec_));
+ ASSERT_FALSE(!!cdm_->new_key_cb);
+ ASSERT_FALSE(!!cdm_->cdm_unset_cb);
+ ASSERT_FALSE(!!cdm_->media_crypto_ready_cb);
ASSERT_EQ(surface_chooser_->current_state_.is_secure, false);
ASSERT_EQ(surface_chooser_->current_state_.is_required, false);
+ ASSERT_FALSE(codec_allocator_->most_recent_config->requires_secure_codec);
}
-TEST_F(MediaCodecVideoDecoderTest, MissingMediaCryptoFailsInit) {
+TEST_P(MediaCodecVideoDecoderTest, NoMediaCryptoContext_EncryptedStream) {
+ // Make sure that MCVD fails to initialize for encrypted stream when
+ // MediaCryptoContext is not available.
+ CreateCdm(false, false);
+ ASSERT_FALSE(Initialize(TestVideoConfig::NormalEncrypted(codec_)));
+}
+
+TEST_P(MediaCodecVideoDecoderTest, MissingMediaCryptoFailsInit) {
// Encrypted media that doesn't get a mediacrypto should fail to init.
- CreateCdm(true);
+ CreateCdm(true, true);
media_crypto_ = nullptr;
- EXPECT_CALL(*cdm_, RegisterPlayer(_, _));
- ASSERT_FALSE(Initialize(TestVideoConfig::NormalEncrypted(kCodecH264)));
- EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
+ ASSERT_FALSE(Initialize(TestVideoConfig::NormalEncrypted(codec_)));
}
-TEST_F(MediaCodecVideoDecoderTest, MissingCdmFailsInit) {
+TEST_P(MediaCodecVideoDecoderTest, MissingCdmFailsInit) {
// MCVD should fail init if we don't provide a cdm with an encrypted config.
- ASSERT_FALSE(Initialize(TestVideoConfig::NormalEncrypted(kCodecH264)));
+ ASSERT_FALSE(Initialize(TestVideoConfig::NormalEncrypted(codec_)));
}
-TEST_F(MediaCodecVideoDecoderTest, VideoFramesArePowerEfficient) {
+// TODO(liberato): Why does this test only work for H264?
+TEST_P(MediaCodecVideoDecoderH264Test, VideoFramesArePowerEfficient) {
// MCVD should mark video frames as POWER_EFFICIENT.
- auto* codec = InitializeFully_OneDecodePending();
+ auto* codec =
+ InitializeFully_OneDecodePending(TestVideoConfig::Large(codec_));
// Produce one output.
codec->AcceptOneInput();
@@ -808,4 +853,47 @@ TEST_F(MediaCodecVideoDecoderTest, VideoFramesArePowerEfficient) {
EXPECT_TRUE(power_efficient);
}
+static std::vector<VideoCodec> GetTestList() {
+ std::vector<VideoCodec> test_codecs;
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ if (MediaCodecUtil::IsMediaCodecAvailable())
+ test_codecs.push_back(kCodecH264);
+#endif
+
+ if (MediaCodecUtil::IsVp8DecoderAvailable())
+ test_codecs.push_back(kCodecVP8);
+ if (MediaCodecUtil::IsVp9DecoderAvailable())
+ test_codecs.push_back(kCodecVP9);
+ return test_codecs;
+}
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+static std::vector<VideoCodec> GetH264IfAvailable() {
+ return MediaCodecUtil::IsMediaCodecAvailable()
+ ? std::vector<VideoCodec>()
+ : std::vector<VideoCodec>(1, kCodecH264);
+}
+#endif
+
+static std::vector<VideoCodec> GetVp8IfAvailable() {
+ return MediaCodecUtil::IsVp8DecoderAvailable()
+ ? std::vector<VideoCodec>()
+ : std::vector<VideoCodec>(1, kCodecVP8);
+}
+
+INSTANTIATE_TEST_CASE_P(MediaCodecVideoDecoderTest,
+ MediaCodecVideoDecoderTest,
+ testing::ValuesIn(GetTestList()));
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+INSTANTIATE_TEST_CASE_P(MediaCodecVideoDecoderH264Test,
+ MediaCodecVideoDecoderH264Test,
+ testing::ValuesIn(GetH264IfAvailable()));
+#endif
+
+INSTANTIATE_TEST_CASE_P(MediaCodecVideoDecoderVp8Test,
+ MediaCodecVideoDecoderVp8Test,
+ testing::ValuesIn(GetVp8IfAvailable()));
+
} // namespace media
diff --git a/chromium/media/gpu/android/mock_android_video_surface_chooser.cc b/chromium/media/gpu/android/mock_android_video_surface_chooser.cc
index 21641c838f8..35f701f1ab7 100644
--- a/chromium/media/gpu/android/mock_android_video_surface_chooser.cc
+++ b/chromium/media/gpu/android/mock_android_video_surface_chooser.cc
@@ -11,10 +11,10 @@ MockAndroidVideoSurfaceChooser::~MockAndroidVideoSurfaceChooser() = default;
void MockAndroidVideoSurfaceChooser::SetClientCallbacks(
UseOverlayCB use_overlay_cb,
- UseSurfaceTextureCB use_surface_texture_cb) {
+ UseTextureOwnerCB use_texture_owner_cb) {
MockSetClientCallbacks();
use_overlay_cb_ = std::move(use_overlay_cb);
- use_surface_texture_cb_ = std::move(use_surface_texture_cb);
+ use_texture_owner_cb_ = std::move(use_texture_owner_cb);
}
void MockAndroidVideoSurfaceChooser::UpdateState(
@@ -28,8 +28,8 @@ void MockAndroidVideoSurfaceChooser::UpdateState(
current_state_ = new_state;
}
-void MockAndroidVideoSurfaceChooser::ProvideSurfaceTexture() {
- use_surface_texture_cb_.Run();
+void MockAndroidVideoSurfaceChooser::ProvideTextureOwner() {
+ use_texture_owner_cb_.Run();
}
void MockAndroidVideoSurfaceChooser::ProvideOverlay(
diff --git a/chromium/media/gpu/android/mock_android_video_surface_chooser.h b/chromium/media/gpu/android/mock_android_video_surface_chooser.h
index 9f3fdc64758..24a2322f734 100644
--- a/chromium/media/gpu/android/mock_android_video_surface_chooser.h
+++ b/chromium/media/gpu/android/mock_android_video_surface_chooser.h
@@ -12,7 +12,7 @@
namespace media {
// A mock surface chooser that lets tests choose the surface with
-// ProvideOverlay() and ProvideSurfaceTexture().
+// ProvideOverlay() and ProvideTextureOwner().
class MockAndroidVideoSurfaceChooser : public AndroidVideoSurfaceChooser {
public:
MockAndroidVideoSurfaceChooser();
@@ -27,16 +27,16 @@ class MockAndroidVideoSurfaceChooser : public AndroidVideoSurfaceChooser {
MOCK_METHOD1(MockReplaceOverlayFactory, void(bool));
void SetClientCallbacks(UseOverlayCB use_overlay_cb,
- UseSurfaceTextureCB use_surface_texture_cb) override;
+ UseTextureOwnerCB use_texture_owner_cb) override;
void UpdateState(base::Optional<AndroidOverlayFactoryCB> factory,
const State& new_state) override;
// Calls the corresponding callback to choose the surface.
void ProvideOverlay(std::unique_ptr<AndroidOverlay> overlay);
- void ProvideSurfaceTexture();
+ void ProvideTextureOwner();
UseOverlayCB use_overlay_cb_;
- UseSurfaceTextureCB use_surface_texture_cb_;
+ UseTextureOwnerCB use_texture_owner_cb_;
AndroidOverlayFactoryCB factory_;
State current_state_;
diff --git a/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.cc b/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.cc
deleted file mode 100644
index 319e3f3c17a..00000000000
--- a/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.cc
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/mock_command_buffer_stub_wrapper.h"
-
-namespace media {
-
-MockCommandBufferStubWrapper::MockCommandBufferStubWrapper() = default;
-MockCommandBufferStubWrapper::~MockCommandBufferStubWrapper() = default;
-
-void MockCommandBufferStubWrapper::AddDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) {
- ASSERT_FALSE(observer_);
- ASSERT_TRUE(observer);
- observer_ = observer;
-}
-
-void MockCommandBufferStubWrapper::RemoveDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer) {
- ASSERT_EQ(observer_, observer);
- observer_ = nullptr;
-}
-
-void MockCommandBufferStubWrapper::NotifyDestruction() {
- if (observer_)
- observer_->OnWillDestroyStub();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.h b/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.h
deleted file mode 100644
index a2eef3b9213..00000000000
--- a/chromium/media/gpu/android/mock_command_buffer_stub_wrapper.h
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_MOCK_COMMAND_BUFFER_STUB_WRAPPER_H_
-#define MEDIA_GPU_ANDROID_MOCK_COMMAND_BUFFER_STUB_WRAPPER_H_
-
-#include "media/gpu/android/command_buffer_stub_wrapper.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-class MockCommandBufferStubWrapper
- : public ::testing::NiceMock<CommandBufferStubWrapper> {
- public:
- MockCommandBufferStubWrapper();
- virtual ~MockCommandBufferStubWrapper();
-
- // CommandBufferStubWrapper
- MOCK_METHOD0(MakeCurrent, bool());
-
- void AddDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer);
- void RemoveDestructionObserver(
- gpu::CommandBufferStub::DestructionObserver* observer);
-
- // Notify the observer that we will be destroyed.
- void NotifyDestruction();
-
- private:
- gpu::CommandBufferStub::DestructionObserver* observer_ = nullptr;
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_MOCK_COMMAND_BUFFER_STUB_WRAPPER_H_
diff --git a/chromium/media/gpu/android/mock_surface_texture_gl_owner.cc b/chromium/media/gpu/android/mock_texture_owner.cc
index 54113058c14..bd1368b2b5e 100644
--- a/chromium/media/gpu/android/mock_surface_texture_gl_owner.cc
+++ b/chromium/media/gpu/android/mock_texture_owner.cc
@@ -2,17 +2,16 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/gpu/android/mock_surface_texture_gl_owner.h"
+#include "media/gpu/android/mock_texture_owner.h"
namespace media {
using testing::Invoke;
using testing::Return;
-MockSurfaceTextureGLOwner::MockSurfaceTextureGLOwner(
- GLuint fake_texture_id,
- gl::GLContext* fake_context,
- gl::GLSurface* fake_surface)
+MockTextureOwner::MockTextureOwner(GLuint fake_texture_id,
+ gl::GLContext* fake_context,
+ gl::GLSurface* fake_surface)
: fake_texture_id(fake_texture_id),
fake_context(fake_context),
fake_surface(fake_surface),
@@ -21,19 +20,17 @@ MockSurfaceTextureGLOwner::MockSurfaceTextureGLOwner(
ON_CALL(*this, GetContext()).WillByDefault(Return(fake_context));
ON_CALL(*this, GetSurface()).WillByDefault(Return(fake_surface));
ON_CALL(*this, SetReleaseTimeToNow())
- .WillByDefault(
- Invoke(this, &MockSurfaceTextureGLOwner::FakeSetReleaseTimeToNow));
+ .WillByDefault(Invoke(this, &MockTextureOwner::FakeSetReleaseTimeToNow));
ON_CALL(*this, IgnorePendingRelease())
- .WillByDefault(
- Invoke(this, &MockSurfaceTextureGLOwner::FakeIgnorePendingRelease));
+ .WillByDefault(Invoke(this, &MockTextureOwner::FakeIgnorePendingRelease));
ON_CALL(*this, IsExpectingFrameAvailable())
- .WillByDefault(Invoke(
- this, &MockSurfaceTextureGLOwner::FakeIsExpectingFrameAvailable));
+ .WillByDefault(
+ Invoke(this, &MockTextureOwner::FakeIsExpectingFrameAvailable));
ON_CALL(*this, WaitForFrameAvailable())
.WillByDefault(
- Invoke(this, &MockSurfaceTextureGLOwner::FakeWaitForFrameAvailable));
+ Invoke(this, &MockTextureOwner::FakeWaitForFrameAvailable));
}
-MockSurfaceTextureGLOwner::~MockSurfaceTextureGLOwner() = default;
+MockTextureOwner::~MockTextureOwner() = default;
} // namespace media
diff --git a/chromium/media/gpu/android/mock_surface_texture_gl_owner.h b/chromium/media/gpu/android/mock_texture_owner.h
index f43efe739f6..8f3d5e036b4 100644
--- a/chromium/media/gpu/android/mock_surface_texture_gl_owner.h
+++ b/chromium/media/gpu/android/mock_texture_owner.h
@@ -2,10 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_GPU_ANDROID_MOCK_SURFACE_TEXTURE_GL_OWNER_H_
-#define MEDIA_GPU_ANDROID_MOCK_SURFACE_TEXTURE_GL_OWNER_H_
+#ifndef MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
+#define MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
-#include "media/gpu/android/surface_texture_gl_owner.h"
+#include "media/gpu/android/texture_owner.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gl/gl_bindings.h"
@@ -15,11 +15,11 @@
namespace media {
// This is a mock with a small amount of fake functionality too.
-class MockSurfaceTextureGLOwner : public SurfaceTextureGLOwner {
+class MockTextureOwner : public TextureOwner {
public:
- MockSurfaceTextureGLOwner(GLuint fake_texture_id,
- gl::GLContext* fake_context,
- gl::GLSurface* fake_surface);
+ MockTextureOwner(GLuint fake_texture_id,
+ gl::GLContext* fake_context,
+ gl::GLSurface* fake_surface);
MOCK_CONST_METHOD0(GetTextureId, GLuint());
MOCK_CONST_METHOD0(GetContext, gl::GLContext*());
@@ -45,9 +45,9 @@ class MockSurfaceTextureGLOwner : public SurfaceTextureGLOwner {
bool expecting_frame_available;
protected:
- ~MockSurfaceTextureGLOwner();
+ ~MockTextureOwner();
};
} // namespace media
-#endif // MEDIA_GPU_ANDROID_MOCK_SURFACE_TEXTURE_GL_OWNER_H_
+#endif // MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
diff --git a/chromium/media/gpu/android/surface_chooser_helper.cc b/chromium/media/gpu/android/surface_chooser_helper.cc
index 1704289671b..504c64d95c4 100644
--- a/chromium/media/gpu/android/surface_chooser_helper.cc
+++ b/chromium/media/gpu/android/surface_chooser_helper.cc
@@ -89,6 +89,10 @@ void SurfaceChooserHelper::SetIsFullscreen(bool is_fullscreen) {
surface_chooser_state_.is_fullscreen = is_fullscreen;
}
+void SurfaceChooserHelper::SetVideoRotation(VideoRotation video_rotation) {
+ surface_chooser_state_.video_rotation = video_rotation;
+}
+
void SurfaceChooserHelper::UpdateChooserState(
base::Optional<AndroidOverlayFactoryCB> new_factory) {
surface_chooser_->UpdateState(std::move(new_factory), surface_chooser_state_);
@@ -143,8 +147,8 @@ SurfaceChooserHelper::ComputeFrameInformation(bool is_using_overlay) {
if (!is_using_overlay) {
// Not an overlay.
return surface_chooser_state_.is_secure
- ? FrameInformation::SURFACETEXTURE_L3
- : FrameInformation::SURFACETEXTURE_INSECURE;
+ ? FrameInformation::NON_OVERLAY_L3
+ : FrameInformation::NON_OVERLAY_INSECURE;
}
// Overlay.
diff --git a/chromium/media/gpu/android/surface_chooser_helper.h b/chromium/media/gpu/android/surface_chooser_helper.h
index 2bfaddf9ee0..b696a4672d8 100644
--- a/chromium/media/gpu/android/surface_chooser_helper.h
+++ b/chromium/media/gpu/android/surface_chooser_helper.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/time/time.h"
+#include "media/base/video_rotation.h"
#include "media/gpu/android/android_video_surface_chooser.h"
#include "media/gpu/android/promotion_hint_aggregator.h"
#include "media/gpu/media_gpu_export.h"
@@ -39,11 +40,11 @@ class MEDIA_GPU_EXPORT SurfaceChooserHelper {
enum class SecureSurfaceMode {
// The surface should not be secure. This allows both overlays and
- // SurfaceTexture surfaces.
+ // TextureOwner surfaces.
kInsecure,
// It is preferable to have a secure surface, but insecure
- // (SurfaceTexture) is better than failing.
+ // (TextureOwner) is better than failing.
kRequested,
// The surface must be a secure surface, and should fail otherwise.
@@ -53,8 +54,8 @@ class MEDIA_GPU_EXPORT SurfaceChooserHelper {
// Must match AVDAFrameInformation UMA enum. Please do not remove or re-order
// values, only append new ones.
enum class FrameInformation {
- SURFACETEXTURE_INSECURE = 0,
- SURFACETEXTURE_L3 = 1,
+ NON_OVERLAY_INSECURE = 0,
+ NON_OVERLAY_L3 = 1,
OVERLAY_L3 = 2,
OVERLAY_L1 = 3,
OVERLAY_INSECURE_PLAYER_ELEMENT_FULLSCREEN = 4,
@@ -74,6 +75,9 @@ class MEDIA_GPU_EXPORT SurfaceChooserHelper {
// Notify us about the fullscreen state. Does not update the chooser state.
void SetIsFullscreen(bool is_fullscreen);
+ // Notify us about the default rotation for the video.
+ void SetVideoRotation(VideoRotation video_rotation);
+
// Update the chooser state using the given factory.
void UpdateChooserState(base::Optional<AndroidOverlayFactoryCB> new_factory);
diff --git a/chromium/media/gpu/android/surface_chooser_helper_unittest.cc b/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
index e1d7c926d2b..b07ec1a7f46 100644
--- a/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
+++ b/chromium/media/gpu/android/surface_chooser_helper_unittest.cc
@@ -77,6 +77,13 @@ TEST_F(SurfaceChooserHelperTest, SetIsFullscreen) {
// We don't really care if it sets expecting_relayout, clears it, or not.
}
+TEST_F(SurfaceChooserHelperTest, SetVideoRotation) {
+ // VideoRotation should be forwarded to the chooser.
+ helper_->SetVideoRotation(VIDEO_ROTATION_90);
+ UpdateChooserState();
+ ASSERT_EQ(chooser_->current_state_.video_rotation, VIDEO_ROTATION_90);
+}
+
TEST_F(SurfaceChooserHelperTest, SetIsOverlayRequired) {
// The default helper was created without |is_required|, so verify that.
UpdateChooserState();
@@ -242,7 +249,7 @@ TEST_F(SurfaceChooserHelperTest, FrameInformationIsCorrectForL3) {
ASSERT_EQ(SurfaceChooserHelper::FrameInformation::OVERLAY_L3,
helper_->ComputeFrameInformation(true));
- ASSERT_EQ(SurfaceChooserHelper::FrameInformation::SURFACETEXTURE_L3,
+ ASSERT_EQ(SurfaceChooserHelper::FrameInformation::NON_OVERLAY_L3,
helper_->ComputeFrameInformation(false));
}
@@ -251,8 +258,8 @@ TEST_F(SurfaceChooserHelperTest, FrameInformationIsCorrectForInsecure) {
helper_->SetSecureSurfaceMode(
SurfaceChooserHelper::SecureSurfaceMode::kInsecure);
- // Not using an overlay should be SURFACETEXTURE_INSECURE
- ASSERT_EQ(SurfaceChooserHelper::FrameInformation::SURFACETEXTURE_INSECURE,
+ // Not using an overlay should be NON_OVERLAY_INSECURE
+ ASSERT_EQ(SurfaceChooserHelper::FrameInformation::NON_OVERLAY_INSECURE,
helper_->ComputeFrameInformation(false));
// Fullscreen state should affect the result, so that we can tell the
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.cc b/chromium/media/gpu/android/surface_texture_gl_owner.cc
index 067fe429c71..ef1de92ed72 100644
--- a/chromium/media/gpu/android/surface_texture_gl_owner.cc
+++ b/chromium/media/gpu/android/surface_texture_gl_owner.cc
@@ -30,14 +30,7 @@ struct FrameAvailableEvent
~FrameAvailableEvent() = default;
};
-SurfaceTextureGLOwner::SurfaceTextureGLOwner()
- : base::RefCountedDeleteOnSequence<SurfaceTextureGLOwner>(
- base::ThreadTaskRunnerHandle::Get()),
- task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
-
-SurfaceTextureGLOwner::~SurfaceTextureGLOwner() = default;
-
-scoped_refptr<SurfaceTextureGLOwner> SurfaceTextureGLOwnerImpl::Create() {
+scoped_refptr<TextureOwner> SurfaceTextureGLOwner::Create() {
GLuint texture_id;
glGenTextures(1, &texture_id);
if (!texture_id)
@@ -52,10 +45,10 @@ scoped_refptr<SurfaceTextureGLOwner> SurfaceTextureGLOwnerImpl::Create() {
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
DCHECK_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
- return new SurfaceTextureGLOwnerImpl(texture_id);
+ return new SurfaceTextureGLOwner(texture_id);
}
-SurfaceTextureGLOwnerImpl::SurfaceTextureGLOwnerImpl(GLuint texture_id)
+SurfaceTextureGLOwner::SurfaceTextureGLOwner(GLuint texture_id)
: surface_texture_(gl::SurfaceTexture::Create(texture_id)),
texture_id_(texture_id),
context_(gl::GLContext::GetCurrent()),
@@ -63,11 +56,11 @@ SurfaceTextureGLOwnerImpl::SurfaceTextureGLOwnerImpl(GLuint texture_id)
frame_available_event_(new FrameAvailableEvent()) {
DCHECK(context_);
DCHECK(surface_);
- surface_texture_->SetFrameAvailableCallbackOnAnyThread(
- base::Bind(&FrameAvailableEvent::Signal, frame_available_event_));
+ surface_texture_->SetFrameAvailableCallbackOnAnyThread(base::BindRepeating(
+ &FrameAvailableEvent::Signal, frame_available_event_));
}
-SurfaceTextureGLOwnerImpl::~SurfaceTextureGLOwnerImpl() {
+SurfaceTextureGLOwner::~SurfaceTextureGLOwner() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// Make sure that the SurfaceTexture isn't using the GL objects.
@@ -89,56 +82,56 @@ SurfaceTextureGLOwnerImpl::~SurfaceTextureGLOwnerImpl() {
DCHECK_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError());
}
-GLuint SurfaceTextureGLOwnerImpl::GetTextureId() const {
+GLuint SurfaceTextureGLOwner::GetTextureId() const {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return texture_id_;
}
-gl::ScopedJavaSurface SurfaceTextureGLOwnerImpl::CreateJavaSurface() const {
+gl::ScopedJavaSurface SurfaceTextureGLOwner::CreateJavaSurface() const {
return gl::ScopedJavaSurface(surface_texture_.get());
}
-void SurfaceTextureGLOwnerImpl::UpdateTexImage() {
+void SurfaceTextureGLOwner::UpdateTexImage() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
surface_texture_->UpdateTexImage();
}
-void SurfaceTextureGLOwnerImpl::GetTransformMatrix(float mtx[]) {
+void SurfaceTextureGLOwner::GetTransformMatrix(float mtx[]) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
surface_texture_->GetTransformMatrix(mtx);
}
-void SurfaceTextureGLOwnerImpl::ReleaseBackBuffers() {
+void SurfaceTextureGLOwner::ReleaseBackBuffers() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
surface_texture_->ReleaseBackBuffers();
}
-gl::GLContext* SurfaceTextureGLOwnerImpl::GetContext() const {
+gl::GLContext* SurfaceTextureGLOwner::GetContext() const {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return context_.get();
}
-gl::GLSurface* SurfaceTextureGLOwnerImpl::GetSurface() const {
+gl::GLSurface* SurfaceTextureGLOwner::GetSurface() const {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return surface_.get();
}
-void SurfaceTextureGLOwnerImpl::SetReleaseTimeToNow() {
+void SurfaceTextureGLOwner::SetReleaseTimeToNow() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
release_time_ = base::TimeTicks::Now();
}
-void SurfaceTextureGLOwnerImpl::IgnorePendingRelease() {
+void SurfaceTextureGLOwner::IgnorePendingRelease() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
release_time_ = base::TimeTicks();
}
-bool SurfaceTextureGLOwnerImpl::IsExpectingFrameAvailable() {
+bool SurfaceTextureGLOwner::IsExpectingFrameAvailable() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
return !release_time_.is_null();
}
-void SurfaceTextureGLOwnerImpl::WaitForFrameAvailable() {
+void SurfaceTextureGLOwner::WaitForFrameAvailable() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!release_time_.is_null());
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.h b/chromium/media/gpu/android/surface_texture_gl_owner.h
index 904dc34d291..5277ef90461 100644
--- a/chromium/media/gpu/android/surface_texture_gl_owner.h
+++ b/chromium/media/gpu/android/surface_texture_gl_owner.h
@@ -5,90 +5,22 @@
#ifndef MEDIA_GPU_ANDROID_SURFACE_TEXTURE_GL_OWNER_H_
#define MEDIA_GPU_ANDROID_SURFACE_TEXTURE_GL_OWNER_H_
+#include "media/gpu/android/texture_owner.h"
+
#include "base/memory/ref_counted.h"
-#include "base/memory/ref_counted_delete_on_sequence.h"
-#include "base/sequenced_task_runner_helpers.h"
-#include "base/single_thread_task_runner.h"
-#include "base/synchronization/waitable_event.h"
#include "base/threading/thread_checker.h"
#include "media/gpu/media_gpu_export.h"
-#include "ui/gl/android/scoped_java_surface.h"
#include "ui/gl/android/surface_texture.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context.h"
-#include "ui/gl/gl_surface.h"
namespace media {
struct FrameAvailableEvent;
-// A SurfaceTexture wrapper that creates and maintains ownership of the
-// attached GL texture. The texture is destroyed with the object but it's
-// possible to call ReleaseSurfaceTexture() without destroying the GL texture.
-// It should only be accessed on the thread it was created on, with the
-// exception of CreateJavaSurface(), which can be called on any thread.
-// It's safe to keep and drop refptrs to it on any thread; it will be
-// automatically destructed on the thread it was constructed on.
-// Virtual for testing; see SurfaceTextureGLOwnerImpl.
-class MEDIA_GPU_EXPORT SurfaceTextureGLOwner
- : public base::RefCountedDeleteOnSequence<SurfaceTextureGLOwner> {
- public:
- SurfaceTextureGLOwner();
-
- scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
- return task_runner_;
- }
-
- // Returns the GL texture id that the SurfaceTexture is attached to.
- virtual GLuint GetTextureId() const = 0;
- virtual gl::GLContext* GetContext() const = 0;
- virtual gl::GLSurface* GetSurface() const = 0;
-
- // Create a java surface for the SurfaceTexture.
- virtual gl::ScopedJavaSurface CreateJavaSurface() const = 0;
-
- // See gl::SurfaceTexture for the following.
- virtual void UpdateTexImage() = 0;
- virtual void GetTransformMatrix(float mtx[16]) = 0;
- virtual void ReleaseBackBuffers() = 0;
-
- // Sets the expectation of onFrameAVailable for a new frame because a buffer
- // was just released to this surface.
- virtual void SetReleaseTimeToNow() = 0;
-
- // Ignores a pending release that was previously indicated with
- // SetReleaseTimeToNow().
- // TODO(watk): This doesn't seem necessary. It actually may be detrimental
- // because the next time we release a buffer we may confuse its
- // onFrameAvailable with the one we're ignoring.
- virtual void IgnorePendingRelease() = 0;
-
- // Whether we're expecting onFrameAvailable. True when SetReleaseTimeToNow()
- // was called but neither IgnorePendingRelease() nor WaitForFrameAvailable()
- // have been called since.
- virtual bool IsExpectingFrameAvailable() = 0;
-
- // Waits for onFrameAvailable until it's been 5ms since the buffer was
- // released. This must only be called if IsExpectingFrameAvailable().
- virtual void WaitForFrameAvailable() = 0;
-
- protected:
- friend class base::RefCountedDeleteOnSequence<SurfaceTextureGLOwner>;
- friend class base::DeleteHelper<SurfaceTextureGLOwner>;
- virtual ~SurfaceTextureGLOwner();
-
- private:
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(SurfaceTextureGLOwner);
-};
-
-class MEDIA_GPU_EXPORT SurfaceTextureGLOwnerImpl
- : public SurfaceTextureGLOwner {
+class MEDIA_GPU_EXPORT SurfaceTextureGLOwner : public TextureOwner {
public:
// Creates a GL texture using the current platform GL context and returns a
- // new SurfaceTextureGLOwnerImpl attached to it. Returns null on failure.
- static scoped_refptr<SurfaceTextureGLOwner> Create();
+ // new SurfaceTextureGLOwner attached to it. Returns null on failure.
+ static scoped_refptr<TextureOwner> Create();
GLuint GetTextureId() const override;
gl::GLContext* GetContext() const override;
@@ -103,8 +35,8 @@ class MEDIA_GPU_EXPORT SurfaceTextureGLOwnerImpl
void WaitForFrameAvailable() override;
private:
- SurfaceTextureGLOwnerImpl(GLuint texture_id);
- ~SurfaceTextureGLOwnerImpl() override;
+ SurfaceTextureGLOwner(GLuint texture_id);
+ ~SurfaceTextureGLOwner() override;
scoped_refptr<gl::SurfaceTexture> surface_texture_;
GLuint texture_id_;
@@ -121,7 +53,7 @@ class MEDIA_GPU_EXPORT SurfaceTextureGLOwnerImpl
THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(SurfaceTextureGLOwnerImpl);
+ DISALLOW_COPY_AND_ASSIGN(SurfaceTextureGLOwner);
};
} // namespace media
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc b/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc
index 1d395cb78a8..39b2da82883 100644
--- a/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc
+++ b/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc
@@ -43,7 +43,7 @@ class SurfaceTextureGLOwnerTest : public testing::Test {
context_->Initialize(surface_.get(), gl::GLContextAttribs());
ASSERT_TRUE(context_->MakeCurrent(surface_.get()));
- surface_texture_ = SurfaceTextureGLOwnerImpl::Create();
+ surface_texture_ = SurfaceTextureGLOwner::Create();
texture_id_ = surface_texture_->GetTextureId();
// Bind and un-bind the texture, since that's required for glIsTexture to
// return true.
@@ -60,7 +60,7 @@ class SurfaceTextureGLOwnerTest : public testing::Test {
gl::init::ShutdownGL(false);
}
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_;
+ scoped_refptr<TextureOwner> surface_texture_;
GLuint texture_id_ = 0;
scoped_refptr<gl::GLContext> context_;
diff --git a/chromium/media/gpu/android/texture_owner.cc b/chromium/media/gpu/android/texture_owner.cc
new file mode 100644
index 00000000000..8b3801f1838
--- /dev/null
+++ b/chromium/media/gpu/android/texture_owner.cc
@@ -0,0 +1,18 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/texture_owner.h"
+
+#include "base/threading/thread_task_runner_handle.h"
+
+namespace media {
+
+TextureOwner::TextureOwner()
+ : base::RefCountedDeleteOnSequence<TextureOwner>(
+ base::ThreadTaskRunnerHandle::Get()),
+ task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
+
+TextureOwner::~TextureOwner() = default;
+
+} // namespace media
diff --git a/chromium/media/gpu/android/texture_owner.h b/chromium/media/gpu/android/texture_owner.h
new file mode 100644
index 00000000000..8660d783752
--- /dev/null
+++ b/chromium/media/gpu/android/texture_owner.h
@@ -0,0 +1,80 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
+#define MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/ref_counted_delete_on_sequence.h"
+#include "base/single_thread_task_runner.h"
+#include "media/gpu/media_gpu_export.h"
+#include "ui/gl/android/scoped_java_surface.h"
+#include "ui/gl/gl_bindings.h"
+#include "ui/gl/gl_context.h"
+#include "ui/gl/gl_surface.h"
+
+namespace media {
+
+// A Texture wrapper interface that creates and maintains ownership of the
+// attached GL or Vulkan texture. The texture is destroyed with the object.
+// It should only be accessed on the thread it was created on, with the
+// exception of CreateJavaSurface(), which can be called on any thread. It's
+// safe to keep and drop refptrs to it on any thread; it will be automatically
+// destructed on the thread it was constructed on.
+class MEDIA_GPU_EXPORT TextureOwner
+ : public base::RefCountedDeleteOnSequence<TextureOwner> {
+ public:
+ TextureOwner();
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
+ return task_runner_;
+ }
+
+ // Returns the GL texture id that the TextureOwner is attached to.
+ virtual GLuint GetTextureId() const = 0;
+ virtual gl::GLContext* GetContext() const = 0;
+ virtual gl::GLSurface* GetSurface() const = 0;
+
+ // Create a java surface for the TextureOwner.
+ virtual gl::ScopedJavaSurface CreateJavaSurface() const = 0;
+
+ // Update the texture image using the latest available image data.
+ virtual void UpdateTexImage() = 0;
+ // Transformation matrix if any associated with the texture image.
+ virtual void GetTransformMatrix(float mtx[16]) = 0;
+ virtual void ReleaseBackBuffers() = 0;
+
+ // Sets the expectation of onFrameAVailable for a new frame because a buffer
+ // was just released to this surface.
+ virtual void SetReleaseTimeToNow() = 0;
+
+ // Ignores a pending release that was previously indicated with
+ // SetReleaseTimeToNow(). TODO(watk): This doesn't seem necessary. It
+ // actually may be detrimental because the next time we release a buffer we
+ // may confuse its onFrameAvailable with the one we're ignoring.
+ virtual void IgnorePendingRelease() = 0;
+
+ // Whether we're expecting onFrameAvailable. True when SetReleaseTimeToNow()
+ // was called but neither IgnorePendingRelease() nor WaitForFrameAvailable()
+ // have been called since.
+ virtual bool IsExpectingFrameAvailable() = 0;
+
+ // Waits for onFrameAvailable until it's been 5ms since the buffer was
+ // released. This must only be called if IsExpectingFrameAvailable().
+ virtual void WaitForFrameAvailable() = 0;
+
+ protected:
+ friend class base::RefCountedDeleteOnSequence<TextureOwner>;
+ friend class base::DeleteHelper<TextureOwner>;
+ virtual ~TextureOwner();
+
+ private:
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ DISALLOW_COPY_AND_ASSIGN(TextureOwner);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
diff --git a/chromium/media/gpu/android/texture_pool.cc b/chromium/media/gpu/android/texture_pool.cc
index ba124baedbe..efdb4552824 100644
--- a/chromium/media/gpu/android/texture_pool.cc
+++ b/chromium/media/gpu/android/texture_pool.cc
@@ -5,22 +5,38 @@
#include "media/gpu/android/texture_pool.h"
#include "gpu/command_buffer/service/texture_manager.h"
-#include "media/gpu/android/command_buffer_stub_wrapper.h"
#include "media/gpu/android/texture_wrapper.h"
+#include "media/gpu/command_buffer_helper.h"
+#include "ui/gl/gl_context.h"
+#include "ui/gl/scoped_make_current.h"
namespace media {
-TexturePool::TexturePool(std::unique_ptr<CommandBufferStubWrapper> stub)
- : stub_(std::move(stub)) {
- if (stub_)
- stub_->AddDestructionObserver(this);
+TexturePool::TexturePool(scoped_refptr<CommandBufferHelper> helper)
+ : helper_(std::move(helper)), weak_factory_(this) {
+ if (helper_) {
+ helper_->SetWillDestroyStubCB(base::BindOnce(
+ &TexturePool::OnWillDestroyStub, weak_factory_.GetWeakPtr()));
+ }
}
TexturePool::~TexturePool() {
- DestroyAllPlatformTextures();
-
- if (stub_)
- stub_->RemoveDestructionObserver(this);
+ // Note that the size of |pool_| doesn't, in general, tell us if there are any
+ // textures. If the stub has been destroyed, then we will drop the
+ // TextureRefs but leave null entries in the map. So, we check |stub_| too.
+ if (pool_.size() && helper_) {
+ // TODO(liberato): consider using ScopedMakeCurrent here, though if we are
+ // ever called as part of decoder teardown, then using ScopedMakeCurrent
+ // isn't safe. For now, we preserve the old behavior (MakeCurrent).
+ //
+ // We check IsContextCurrent, even though that only checks for the
+ // underlying shared context if |context| is a virtual context. Assuming
+ // that all TextureRef does is to delete a texture, this is enough. Of
+ // course, we shouldn't assume that this is all it does.
+ bool have_context =
+ helper_->IsContextCurrent() || helper_->MakeContextCurrent();
+ DestroyAllPlatformTextures(have_context);
+ }
}
void TexturePool::AddTexture(std::unique_ptr<TextureWrapper> texture) {
@@ -29,35 +45,51 @@ void TexturePool::AddTexture(std::unique_ptr<TextureWrapper> texture) {
// Don't permit additions after we've lost the stub.
// TODO(liberato): consider making this fail gracefully. However, nobody
// should be doing this, so for now it's a DCHECK.
- DCHECK(stub_);
+ DCHECK(helper_);
TextureWrapper* texture_raw = texture.get();
pool_[texture_raw] = std::move(texture);
}
-void TexturePool::ReleaseTexture(TextureWrapper* texture) {
+void TexturePool::ReleaseTexture(TextureWrapper* texture,
+ const gpu::SyncToken& sync_token) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ // If we don't have a sync token, or if we have no stub, then just finish.
+ if (!sync_token.HasData() || !helper_) {
+ OnSyncTokenReleased(texture);
+ return;
+ }
+
+ // We keep a strong ref to |this| in the callback, so that we are guaranteed
+ // to receive it. It's common for the last ref to us to be our caller, as
+ // a callback. We need to stick around a bit longer than that if there's a
+ // sync token. Plus, we're required to keep |helper_| around while a wait is
+ // still pending.
+ helper_->WaitForSyncToken(
+ sync_token, base::BindOnce(&TexturePool::OnSyncTokenReleased,
+ scoped_refptr<TexturePool>(this), texture));
+}
+
+void TexturePool::OnSyncTokenReleased(TextureWrapper* texture) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
auto iter = pool_.find(texture);
DCHECK(iter != pool_.end());
// If we can't make the context current, then notify the texture. Note that
- // the wrapper might already have been destroyed, which is fine.
- if (iter->second && (!stub_ || !stub_->MakeCurrent()))
+ // the wrapper might already have been destroyed, which is fine. We elide
+ // the MakeContextCurrent if our underlying physical context is current, which
+ // only works if we don't do much besides delete the texture.
+ bool have_context =
+ helper_ && (helper_->IsContextCurrent() || helper_->MakeContextCurrent());
+ if (iter->second && !have_context)
texture->ForceContextLost();
pool_.erase(iter);
}
-void TexturePool::DestroyAllPlatformTextures() {
+void TexturePool::DestroyAllPlatformTextures(bool have_context) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- // Don't bother to make the context current if we have no textures.
- if (!pool_.size())
- return;
-
- // If we can't make the context current, then notify all the textures that
- // they can't delete the underlying platform textures.
- const bool have_context = stub_ && stub_->MakeCurrent();
-
// Destroy the wrapper, but keep the entry around in the map. We do this so
// that ReleaseTexture can still check that at least the texture was, at some
// point, in the map. Hopefully, since nobody should be adding textures to
@@ -68,6 +100,8 @@ void TexturePool::DestroyAllPlatformTextures() {
if (!texture)
continue;
+ // If we can't make the context current, then notify all the textures that
+ // they can't delete the underlying platform textures.
if (!have_context)
texture->ForceContextLost();
@@ -75,12 +109,11 @@ void TexturePool::DestroyAllPlatformTextures() {
}
}
-void TexturePool::OnWillDestroyStub() {
+void TexturePool::OnWillDestroyStub(bool have_context) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(stub_);
- // Since the stub is going away, clean up while we can.
- DestroyAllPlatformTextures();
- stub_ = nullptr;
+ DCHECK(helper_);
+ DestroyAllPlatformTextures(have_context);
+ helper_ = nullptr;
}
} // namespace media
diff --git a/chromium/media/gpu/android/texture_pool.h b/chromium/media/gpu/android/texture_pool.h
index cc42f51605d..8db87c3e159 100644
--- a/chromium/media/gpu/android/texture_pool.h
+++ b/chromium/media/gpu/android/texture_pool.h
@@ -9,13 +9,14 @@
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
+#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/gpu/media_gpu_export.h"
namespace media {
-class CommandBufferStubWrapper;
+class CommandBufferHelper;
class TextureWrapper;
// Owns Textures that are used to hold decoded video frames. Allows them to
@@ -23,11 +24,9 @@ class TextureWrapper;
// pipeline is suspended, but decoded frames can be on-screen indefinitely.
// TODO(tmathmeyer): Convert this into a pool. Right now, we just constantly
// add new textures and remove them.
-class MEDIA_GPU_EXPORT TexturePool
- : public base::RefCounted<TexturePool>,
- public gpu::CommandBufferStub::DestructionObserver {
+class MEDIA_GPU_EXPORT TexturePool : public base::RefCounted<TexturePool> {
public:
- TexturePool(std::unique_ptr<CommandBufferStubWrapper> stub);
+ TexturePool(scoped_refptr<CommandBufferHelper> helper);
// Add a new texture into the pool. This may only be done before |stub_| is
// destroyed. When |stub_| is destroyed, we will destroy any textures that
@@ -41,29 +40,36 @@ class MEDIA_GPU_EXPORT TexturePool
// Release a texture back into the pool. |texture| must have been added to
// the pool previously, and not released. Otherwise, this is undefined.
// Note: since we don't actually pool things, this just forgets |texture|.
- // It's okay if this is called after we've lost |stub_|.
- void ReleaseTexture(TextureWrapper* texture);
+ // It's okay if this is called after we've lost |stub_|. If |sync_token| is
+ // not null, then we'll wait for that token before taking any action.
+ void ReleaseTexture(TextureWrapper* texture,
+ const gpu::SyncToken& sync_token);
protected:
- ~TexturePool() override;
+ virtual ~TexturePool();
- // DestructionObserver
- void OnWillDestroyStub() override;
+ // Called after a sync token has been released, to free |texture|.
+ void OnSyncTokenReleased(TextureWrapper* texture);
+
+ // Called when |stub_| notifies us that the underlying stub will be destroyed.
+ void OnWillDestroyStub(bool have_context);
// When called, we will destroy any platform textures if we have a context,
// or mark them as "lost context" if we don't. This will not actually remove
// entries in |pool_|, but will instead clear the unique_ptr to delete the
// texture. Assuming that nobody adds textures after our stub is destroyed,
// this is still alias-free.
- void DestroyAllPlatformTextures();
+ void DestroyAllPlatformTextures(bool have_context);
private:
friend class base::RefCounted<TexturePool>;
THREAD_CHECKER(thread_checker_);
- std::unique_ptr<CommandBufferStubWrapper> stub_;
+ scoped_refptr<CommandBufferHelper> helper_;
std::map<TextureWrapper*, std::unique_ptr<TextureWrapper>> pool_;
+
+ base::WeakPtrFactory<TexturePool> weak_factory_;
};
} // namespace media
diff --git a/chromium/media/gpu/android/texture_pool_unittest.cc b/chromium/media/gpu/android/texture_pool_unittest.cc
index d1c9ed7ac7e..d0d20d35e72 100644
--- a/chromium/media/gpu/android/texture_pool_unittest.cc
+++ b/chromium/media/gpu/android/texture_pool_unittest.cc
@@ -7,14 +7,21 @@
#include <memory>
#include "base/memory/weak_ptr.h"
+#include "base/run_loop.h"
+#include "base/test/scoped_task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "gpu/command_buffer/common/command_buffer_id.h"
+#include "gpu/command_buffer/common/constants.h"
#include "gpu/command_buffer/service/sequence_id.h"
#include "gpu/ipc/common/gpu_messages.h"
-#include "media/gpu/android/mock_command_buffer_stub_wrapper.h"
#include "media/gpu/android/texture_wrapper.h"
+#include "media/gpu/fake_command_buffer_helper.h"
+#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
+using testing::_;
using testing::NiceMock;
using testing::Return;
@@ -30,23 +37,23 @@ class MockTextureWrapper : public NiceMock<TextureWrapper>,
class TexturePoolTest : public testing::Test {
public:
- TexturePoolTest() = default;
-
void SetUp() override {
- std::unique_ptr<MockCommandBufferStubWrapper> stub =
- std::make_unique<MockCommandBufferStubWrapper>();
- stub_ = stub.get();
- SetContextCanBeCurrent(true);
- texture_pool_ = new TexturePool(std::move(stub));
+ task_runner_ = base::ThreadTaskRunnerHandle::Get();
+ helper_ = base::MakeRefCounted<FakeCommandBufferHelper>(task_runner_);
+ texture_pool_ = new TexturePool(helper_);
+ // Random sync token that HasData().
+ sync_token_ = gpu::SyncToken(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(1), 1);
+ ASSERT_TRUE(sync_token_.HasData());
}
- using WeakTexture = base::WeakPtr<MockTextureWrapper>;
-
- // Set whether or not |stub_| will report that MakeCurrent worked.
- void SetContextCanBeCurrent(bool allow) {
- ON_CALL(*stub_, MakeCurrent()).WillByDefault(Return(allow));
+ ~TexturePoolTest() override {
+ helper_->StubLost();
+ base::RunLoop().RunUntilIdle();
}
+ using WeakTexture = base::WeakPtr<MockTextureWrapper>;
+
WeakTexture CreateAndAddTexture() {
std::unique_ptr<MockTextureWrapper> texture =
std::make_unique<MockTextureWrapper>();
@@ -57,8 +64,14 @@ class TexturePoolTest : public testing::Test {
return texture_weak;
}
+ base::test::ScopedTaskEnvironment scoped_task_environment_;
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ gpu::SyncToken sync_token_;
+
+ scoped_refptr<FakeCommandBufferHelper> helper_;
scoped_refptr<TexturePool> texture_pool_;
- MockCommandBufferStubWrapper* stub_ = nullptr;
};
TEST_F(TexturePoolTest, AddAndReleaseTexturesWithContext) {
@@ -66,19 +79,29 @@ TEST_F(TexturePoolTest, AddAndReleaseTexturesWithContext) {
WeakTexture texture = CreateAndAddTexture();
// The texture should not be notified that the context was lost.
EXPECT_CALL(*texture.get(), ForceContextLost()).Times(0);
- EXPECT_CALL(*stub_, MakeCurrent()).Times(1);
- texture_pool_->ReleaseTexture(texture.get());
+ texture_pool_->ReleaseTexture(texture.get(), sync_token_);
+
+ // The texture should still exist until the sync token is cleared.
+ ASSERT_TRUE(texture);
+
+ // Once the sync token is released, then the context should be made current
+ // and the texture should be destroyed.
+ helper_->ReleaseSyncToken(sync_token_);
+ base::RunLoop().RunUntilIdle();
ASSERT_FALSE(texture);
}
TEST_F(TexturePoolTest, AddAndReleaseTexturesWithoutContext) {
// Test that adding then deleting a texture destroys it, and marks that the
- // context is lost.
+ // context is lost, if the context can't be made current.
WeakTexture texture = CreateAndAddTexture();
- SetContextCanBeCurrent(false);
+ helper_->ContextLost();
EXPECT_CALL(*texture, ForceContextLost()).Times(1);
- EXPECT_CALL(*stub_, MakeCurrent()).Times(1);
- texture_pool_->ReleaseTexture(texture.get());
+ texture_pool_->ReleaseTexture(texture.get(), sync_token_);
+ ASSERT_TRUE(texture);
+
+ helper_->ReleaseSyncToken(sync_token_);
+ base::RunLoop().RunUntilIdle();
ASSERT_FALSE(texture);
}
@@ -95,9 +118,7 @@ TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithContext) {
EXPECT_CALL(*textures.back(), ForceContextLost()).Times(0);
}
- EXPECT_CALL(*stub_, MakeCurrent()).Times(1);
-
- stub_->NotifyDestruction();
+ helper_->StubLost();
// TextureWrappers should be destroyed.
for (auto& texture : textures)
@@ -106,7 +127,7 @@ TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithContext) {
// It should be okay to release the textures after they're destroyed, and
// nothing should crash.
for (auto* raw_texture : raw_textures)
- texture_pool_->ReleaseTexture(raw_texture);
+ texture_pool_->ReleaseTexture(raw_texture, sync_token_);
}
TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithoutContext) {
@@ -119,10 +140,8 @@ TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithoutContext) {
EXPECT_CALL(*textures.back(), ForceContextLost()).Times(1);
}
- SetContextCanBeCurrent(false);
- EXPECT_CALL(*stub_, MakeCurrent()).Times(1);
-
- stub_->NotifyDestruction();
+ helper_->ContextLost();
+ helper_->StubLost();
for (auto& texture : textures)
ASSERT_FALSE(texture);
@@ -130,7 +149,7 @@ TEST_F(TexturePoolTest, TexturesAreReleasedOnStubDestructionWithoutContext) {
// It should be okay to release the textures after they're destroyed, and
// nothing should crash.
for (auto* raw_texture : raw_textures)
- texture_pool_->ReleaseTexture(raw_texture);
+ texture_pool_->ReleaseTexture(raw_texture, sync_token_);
}
TEST_F(TexturePoolTest, NonEmptyPoolAfterStubDestructionDoesntCrash) {
@@ -138,7 +157,7 @@ TEST_F(TexturePoolTest, NonEmptyPoolAfterStubDestructionDoesntCrash) {
// works (doesn't crash) even though the pool is not empty.
CreateAndAddTexture();
- stub_->NotifyDestruction();
+ helper_->StubLost();
}
TEST_F(TexturePoolTest,
@@ -147,8 +166,35 @@ TEST_F(TexturePoolTest,
// works (doesn't crash) even though the pool is not empty.
CreateAndAddTexture();
- SetContextCanBeCurrent(false);
- stub_->NotifyDestruction();
+ helper_->ContextLost();
+ helper_->StubLost();
+}
+
+TEST_F(TexturePoolTest, TexturePoolRetainsReferenceWhileWaiting) {
+ // Dropping our reference to |texture_pool_| while it's waiting for a sync
+ // token shouldn't prevent the wait from completing.
+ WeakTexture texture = CreateAndAddTexture();
+ texture_pool_->ReleaseTexture(texture.get(), sync_token_);
+
+ // The texture should still exist until the sync token is cleared.
+ ASSERT_TRUE(texture);
+
+ // Drop the texture pool while it's waiting. Nothing should happen.
+ texture_pool_ = nullptr;
+ ASSERT_TRUE(texture);
+
+ // The texture should be destroyed after the sync token completes.
+ helper_->ReleaseSyncToken(sync_token_);
+ base::RunLoop().RunUntilIdle();
+ ASSERT_FALSE(texture);
+}
+
+TEST_F(TexturePoolTest, TexturePoolReleasesImmediatelyWithoutSyncToken) {
+ // If we don't provide a sync token, then it should release the texture.
+ WeakTexture texture = CreateAndAddTexture();
+ texture_pool_->ReleaseTexture(texture.get(), gpu::SyncToken());
+ base::RunLoop().RunUntilIdle();
+ ASSERT_FALSE(texture);
}
} // namespace media
diff --git a/chromium/media/gpu/android/video_frame_factory.h b/chromium/media/gpu/android/video_frame_factory.h
index ee65a1d84ee..f4507e8d1b8 100644
--- a/chromium/media/gpu/android/video_frame_factory.h
+++ b/chromium/media/gpu/android/video_frame_factory.h
@@ -23,7 +23,7 @@ namespace media {
struct AVDASurfaceBundle;
class CodecOutputBuffer;
-class SurfaceTextureGLOwner;
+class TextureOwner;
class VideoFrame;
// VideoFrameFactory creates CodecOutputBuffer backed VideoFrames. Not thread
@@ -31,13 +31,13 @@ class VideoFrame;
class MEDIA_GPU_EXPORT VideoFrameFactory {
public:
using GetStubCb = base::Callback<gpu::CommandBufferStub*()>;
- using InitCb = base::Callback<void(scoped_refptr<SurfaceTextureGLOwner>)>;
+ using InitCb = base::RepeatingCallback<void(scoped_refptr<TextureOwner>)>;
VideoFrameFactory() = default;
virtual ~VideoFrameFactory() = default;
// Initializes the factory and runs |init_cb| on the current thread when it's
- // complete. If initialization fails, the returned surface texture will be
+ // complete. If initialization fails, the returned texture owner will be
// null. |wants_promotion_hint| tells us whether to mark VideoFrames for
// compositor overlay promotion hints or not.
virtual void Initialize(bool wants_promotion_hint, InitCb init_cb) = 0;
@@ -47,8 +47,8 @@ class MEDIA_GPU_EXPORT VideoFrameFactory {
virtual void SetSurfaceBundle(
scoped_refptr<AVDASurfaceBundle> surface_bundle) = 0;
- // Creates a new VideoFrame backed by |output_buffer| and |surface_texture|.
- // |surface_texture| may be null if the buffer is backed by an overlay
+ // Creates a new VideoFrame backed by |output_buffer| and |texture_owner|.
+ // |texture_owner| may be null if the buffer is backed by an overlay
// instead. Runs |output_cb| on the calling sequence to return the frame.
// TODO(liberato): update the comment.
virtual void CreateVideoFrame(
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index 2bf885f3bf7..e83f0d65ebd 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -21,9 +21,9 @@
#include "media/gpu/android/codec_image.h"
#include "media/gpu/android/codec_image_group.h"
#include "media/gpu/android/codec_wrapper.h"
-#include "media/gpu/android/command_buffer_stub_wrapper_impl.h"
#include "media/gpu/android/texture_pool.h"
#include "media/gpu/android/texture_wrapper.h"
+#include "media/gpu/command_buffer_helper.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "ui/gl/android/surface_texture.h"
#include "ui/gl/gl_bindings.h"
@@ -67,11 +67,11 @@ void VideoFrameFactoryImpl::SetSurfaceBundle(
scoped_refptr<CodecImageGroup> image_group;
if (!surface_bundle) {
// Clear everything, just so we're not holding a reference.
- surface_texture_ = nullptr;
+ texture_owner_ = nullptr;
} else {
- // If |surface_bundle| is using a SurfaceTexture, then get it.
- surface_texture_ =
- surface_bundle->overlay ? nullptr : surface_bundle->surface_texture;
+ // If |surface_bundle| is using a TextureOwner, then get it.
+ texture_owner_ =
+ surface_bundle->overlay ? nullptr : surface_bundle->texture_owner_;
// Start a new image group. Note that there's no reason that we can't have
// more than one group per surface bundle; it's okay if we're called
@@ -103,7 +103,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame(
FROM_HERE,
base::Bind(&GpuVideoFrameFactory::CreateVideoFrame,
base::Unretained(gpu_video_frame_factory_.get()),
- base::Passed(&output_buffer), surface_texture_, timestamp,
+ base::Passed(&output_buffer), texture_owner_, timestamp,
natural_size, std::move(promotion_hint_cb),
std::move(output_cb), base::ThreadTaskRunnerHandle::Get()));
}
@@ -126,7 +126,7 @@ GpuVideoFrameFactory::~GpuVideoFrameFactory() {
stub_->RemoveDestructionObserver(this);
}
-scoped_refptr<SurfaceTextureGLOwner> GpuVideoFrameFactory::Initialize(
+scoped_refptr<TextureOwner> GpuVideoFrameFactory::Initialize(
bool wants_promotion_hint,
VideoFrameFactoryImpl::GetStubCb get_stub_cb) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -136,16 +136,15 @@ scoped_refptr<SurfaceTextureGLOwner> GpuVideoFrameFactory::Initialize(
return nullptr;
stub_->AddDestructionObserver(this);
- texture_pool_ =
- new TexturePool(std::make_unique<CommandBufferStubWrapperImpl>(stub_));
+ texture_pool_ = new TexturePool(CommandBufferHelper::Create(stub_));
decoder_helper_ = GLES2DecoderHelper::Create(stub_->decoder_context());
- return SurfaceTextureGLOwnerImpl::Create();
+ return SurfaceTextureGLOwner::Create();
}
void GpuVideoFrameFactory::CreateVideoFrame(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner_,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
@@ -154,7 +153,7 @@ void GpuVideoFrameFactory::CreateVideoFrame(
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
scoped_refptr<VideoFrame> frame;
scoped_refptr<gpu::gles2::TextureRef> texture_ref;
- CreateVideoFrameInternal(std::move(output_buffer), std::move(surface_texture),
+ CreateVideoFrameInternal(std::move(output_buffer), std::move(texture_owner_),
timestamp, natural_size,
std::move(promotion_hint_cb), &frame, &texture_ref);
if (!frame || !texture_ref)
@@ -165,10 +164,11 @@ void GpuVideoFrameFactory::CreateVideoFrame(
std::unique_ptr<TextureWrapper> texture_wrapper =
std::make_unique<TextureWrapperImpl>(std::move(texture_ref));
+ // Note that this keeps the pool around while any texture is.
auto drop_texture_ref = base::BindOnce(
[](scoped_refptr<TexturePool> texture_pool,
TextureWrapper* texture_wrapper, const gpu::SyncToken& sync_token) {
- texture_pool->ReleaseTexture(texture_wrapper);
+ texture_pool->ReleaseTexture(texture_wrapper, sync_token);
},
texture_pool_, base::Unretained(texture_wrapper.get()));
texture_pool_->AddTexture(std::move(texture_wrapper));
@@ -183,7 +183,7 @@ void GpuVideoFrameFactory::CreateVideoFrame(
void GpuVideoFrameFactory::CreateVideoFrameInternal(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner_,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
@@ -219,27 +219,27 @@ void GpuVideoFrameFactory::CreateVideoFrameInternal(
size.width(), size.height(), GL_RGBA,
GL_UNSIGNED_BYTE);
auto image = base::MakeRefCounted<CodecImage>(
- std::move(output_buffer), surface_texture, std::move(promotion_hint_cb));
+ std::move(output_buffer), texture_owner_, std::move(promotion_hint_cb));
images_.push_back(image.get());
// Add |image| to our current image group. This makes suer that any overlay
- // lasts as long as the images. For SurfaceTexture, it doesn't do much.
+ // lasts as long as the images. For TextureOwner, it doesn't do much.
image_group_->AddCodecImage(image.get());
// Attach the image to the texture.
- // If we're attaching a SurfaceTexture backed image, we set the state to
+ // If we're attaching a TextureOwner backed image, we set the state to
// UNBOUND. This ensures that the implementation will call CopyTexImage()
- // which lets us update the surface texture at the right time.
+ // which lets us update the texture owner at the right time.
// For overlays we set the state to BOUND because it's required for
// ScheduleOverlayPlane() to be called. If something tries to sample from an
// overlay texture it won't work, but there's no way to make that work.
- auto image_state = surface_texture ? gpu::gles2::Texture::UNBOUND
- : gpu::gles2::Texture::BOUND;
- GLuint surface_texture_service_id =
- surface_texture ? surface_texture->GetTextureId() : 0;
+ auto image_state = texture_owner_ ? gpu::gles2::Texture::UNBOUND
+ : gpu::gles2::Texture::BOUND;
+ GLuint texture_owner_service_id =
+ texture_owner_ ? texture_owner_->GetTextureId() : 0;
texture_manager->SetLevelStreamTextureImage(
texture_ref.get(), GL_TEXTURE_EXTERNAL_OES, 0, image.get(), image_state,
- surface_texture_service_id);
+ texture_owner_service_id);
texture_manager->SetLevelCleared(texture_ref.get(), GL_TEXTURE_EXTERNAL_OES,
0, true);
@@ -258,22 +258,22 @@ void GpuVideoFrameFactory::CreateVideoFrameInternal(
frame->metadata()->SetBoolean(VideoFrameMetadata::COPY_REQUIRED, true);
// We unconditionally mark the picture as overlayable, even if
- // |!surface_texture|, if we want to get hints. It's required, else we won't
+ // |!texture_owner_|, if we want to get hints. It's required, else we won't
// get hints.
- const bool allow_overlay = !surface_texture || wants_promotion_hint_;
+ const bool allow_overlay = !texture_owner_ || wants_promotion_hint_;
frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
allow_overlay);
frame->metadata()->SetBoolean(VideoFrameMetadata::WANTS_PROMOTION_HINT,
wants_promotion_hint_);
- frame->metadata()->SetBoolean(VideoFrameMetadata::SURFACE_TEXTURE,
- !!surface_texture);
+ frame->metadata()->SetBoolean(VideoFrameMetadata::TEXTURE_OWNER,
+ !!texture_owner_);
*video_frame_out = std::move(frame);
*texture_ref_out = std::move(texture_ref);
}
-void GpuVideoFrameFactory::OnWillDestroyStub() {
+void GpuVideoFrameFactory::OnWillDestroyStub(bool have_context) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(stub_);
stub_ = nullptr;
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.h b/chromium/media/gpu/android/video_frame_factory_impl.h
index 0e78a032ee5..db6f659784c 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.h
+++ b/chromium/media/gpu/android/video_frame_factory_impl.h
@@ -6,6 +6,7 @@
#define MEDIA_GPU_ANDROID_VIDEO_FRAME_FACTORY_IMPL_
#include "base/optional.h"
+#include "base/single_thread_task_runner.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/texture_manager.h"
#include "gpu/ipc/service/command_buffer_stub.h"
@@ -53,8 +54,8 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
GetStubCb get_stub_cb_;
- // The surface texture that video frames should use, or nullptr.
- scoped_refptr<SurfaceTextureGLOwner> surface_texture_;
+ // The texture owner that video frames should use, or nullptr.
+ scoped_refptr<TextureOwner> texture_owner_;
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(VideoFrameFactoryImpl);
@@ -68,14 +69,14 @@ class GpuVideoFrameFactory
GpuVideoFrameFactory();
~GpuVideoFrameFactory() override;
- scoped_refptr<SurfaceTextureGLOwner> Initialize(
+ scoped_refptr<TextureOwner> Initialize(
bool wants_promotion_hint,
VideoFrameFactory::GetStubCb get_stub_cb);
// Creates and returns a VideoFrame with its ReleaseMailboxCB.
void CreateVideoFrame(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
@@ -90,14 +91,14 @@ class GpuVideoFrameFactory
// Creates a TextureRef and VideoFrame.
void CreateVideoFrameInternal(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<SurfaceTextureGLOwner> surface_texture,
+ scoped_refptr<TextureOwner> texture_owner,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
scoped_refptr<VideoFrame>* video_frame_out,
scoped_refptr<gpu::gles2::TextureRef>* texture_ref_out);
- void OnWillDestroyStub() override;
+ void OnWillDestroyStub(bool have_context) override;
// Removes |image| from |images_|.
void OnImageDestructed(CodecImage* image);
@@ -166,8 +167,8 @@ void MEDIA_GPU_EXPORT MaybeRenderEarly(std::vector<Image*>* image_vector_ptr) {
// Try to render the image following the front buffer to the back buffer.
size_t back_buffer_index = *front_buffer_index + 1;
if (back_buffer_index < images.size() &&
- images[back_buffer_index]->is_surface_texture_backed()) {
- images[back_buffer_index]->RenderToSurfaceTextureBackBuffer();
+ images[back_buffer_index]->is_texture_owner_backed()) {
+ images[back_buffer_index]->RenderToTextureOwnerBackBuffer();
}
}
diff --git a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
index 9125ae4cc69..11fb25a3419 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
@@ -13,15 +13,15 @@ using testing::NiceMock;
using testing::Return;
// The dimensions for specifying MockImage behavior.
-enum ImageKind { kSurfaceTexture, kOverlay };
+enum ImageKind { kTextureOwner, kOverlay };
enum Phase { kInCodec, kInFrontBuffer, kInvalidated };
enum Expectation { kRenderToFrontBuffer, kRenderToBackBuffer, kNone };
// A mock image with the same interface as CodecImage.
struct MockImage {
MockImage(ImageKind kind, Phase phase, Expectation expectation) {
- ON_CALL(*this, is_surface_texture_backed())
- .WillByDefault(Return(kind == kSurfaceTexture));
+ ON_CALL(*this, is_texture_owner_backed())
+ .WillByDefault(Return(kind == kTextureOwner));
ON_CALL(*this, was_rendered_to_front_buffer())
.WillByDefault(Return(phase == kInFrontBuffer));
@@ -34,17 +34,17 @@ struct MockImage {
}
if (expectation == kRenderToBackBuffer) {
- EXPECT_CALL(*this, RenderToSurfaceTextureBackBuffer())
+ EXPECT_CALL(*this, RenderToTextureOwnerBackBuffer())
.WillOnce(Return(phase != kInvalidated));
} else {
- EXPECT_CALL(*this, RenderToSurfaceTextureBackBuffer()).Times(0);
+ EXPECT_CALL(*this, RenderToTextureOwnerBackBuffer()).Times(0);
}
}
MOCK_METHOD0(was_rendered_to_front_buffer, bool());
- MOCK_METHOD0(is_surface_texture_backed, bool());
+ MOCK_METHOD0(is_texture_owner_backed, bool());
MOCK_METHOD0(RenderToFrontBuffer, bool());
- MOCK_METHOD0(RenderToSurfaceTextureBackBuffer, bool());
+ MOCK_METHOD0(RenderToTextureOwnerBackBuffer, bool());
};
class MaybeRenderEarlyTest : public testing::Test {
@@ -67,7 +67,7 @@ TEST_F(MaybeRenderEarlyTest, EmptyVector) {
}
TEST_F(MaybeRenderEarlyTest, SingleUnrenderedSTImageIsRendered) {
- AddImage(kSurfaceTexture, kInCodec, Expectation::kRenderToFrontBuffer);
+ AddImage(kTextureOwner, kInCodec, Expectation::kRenderToFrontBuffer);
internal::MaybeRenderEarly(&images_);
}
@@ -77,9 +77,9 @@ TEST_F(MaybeRenderEarlyTest, SingleUnrenderedOverlayImageIsRendered) {
}
TEST_F(MaybeRenderEarlyTest, InvalidatedImagesAreSkippedOver) {
- AddImage(kSurfaceTexture, kInvalidated, Expectation::kRenderToFrontBuffer);
- AddImage(kSurfaceTexture, kInvalidated, Expectation::kRenderToFrontBuffer);
- AddImage(kSurfaceTexture, kInCodec, Expectation::kRenderToFrontBuffer);
+ AddImage(kTextureOwner, kInvalidated, Expectation::kRenderToFrontBuffer);
+ AddImage(kTextureOwner, kInvalidated, Expectation::kRenderToFrontBuffer);
+ AddImage(kTextureOwner, kInCodec, Expectation::kRenderToFrontBuffer);
internal::MaybeRenderEarly(&images_);
}
@@ -92,10 +92,10 @@ TEST_F(MaybeRenderEarlyTest, NoFrontBufferRenderingIfAlreadyPopulated) {
TEST_F(MaybeRenderEarlyTest,
ImageFollowingLatestFrontBufferIsBackBufferRendered) {
- AddImage(kSurfaceTexture, kInCodec, Expectation::kNone);
- AddImage(kSurfaceTexture, kInFrontBuffer, Expectation::kNone);
- AddImage(kSurfaceTexture, kInCodec, Expectation::kRenderToBackBuffer);
- AddImage(kSurfaceTexture, kInCodec, Expectation::kNone);
+ AddImage(kTextureOwner, kInCodec, Expectation::kNone);
+ AddImage(kTextureOwner, kInFrontBuffer, Expectation::kNone);
+ AddImage(kTextureOwner, kInCodec, Expectation::kRenderToBackBuffer);
+ AddImage(kTextureOwner, kInCodec, Expectation::kNone);
internal::MaybeRenderEarly(&images_);
}
diff --git a/chromium/media/gpu/codec_picture.cc b/chromium/media/gpu/codec_picture.cc
new file mode 100644
index 00000000000..e42bd5096df
--- /dev/null
+++ b/chromium/media/gpu/codec_picture.cc
@@ -0,0 +1,12 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/codec_picture.h"
+
+namespace media {
+
+CodecPicture::CodecPicture() {}
+CodecPicture::~CodecPicture() {}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/gpu/codec_picture.h b/chromium/media/gpu/codec_picture.h
index 14ed9b6c6cf..15020236433 100644
--- a/chromium/media/gpu/codec_picture.h
+++ b/chromium/media/gpu/codec_picture.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "media/base/decrypt_config.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gfx/geometry/rect.h"
@@ -23,7 +24,7 @@ namespace media {
class MEDIA_GPU_EXPORT CodecPicture
: public base::RefCountedThreadSafe<CodecPicture> {
public:
- CodecPicture() = default;
+ CodecPicture();
int32_t bitstream_id() const { return bitstream_id_; }
void set_bitstream_id(int32_t bitstream_id) { bitstream_id_ = bitstream_id; }
@@ -31,13 +32,22 @@ class MEDIA_GPU_EXPORT CodecPicture
const gfx::Rect visible_rect() const { return visible_rect_; }
void set_visible_rect(const gfx::Rect& rect) { visible_rect_ = rect; }
+ // DecryptConfig returned by this method describes the decryption
+ // configuration of the input stream for this picture. Returns null if it is
+ // not encrypted.
+ const DecryptConfig* decrypt_config() const { return decrypt_config_.get(); }
+ void set_decrypt_config(std::unique_ptr<DecryptConfig> config) {
+ decrypt_config_ = std::move(config);
+ }
+
protected:
friend class base::RefCountedThreadSafe<CodecPicture>;
- virtual ~CodecPicture() = default;
+ virtual ~CodecPicture();
private:
int32_t bitstream_id_ = -1;
gfx::Rect visible_rect_;
+ std::unique_ptr<DecryptConfig> decrypt_config_;
DISALLOW_COPY_AND_ASSIGN(CodecPicture);
};
diff --git a/chromium/media/gpu/command_buffer_helper.cc b/chromium/media/gpu/command_buffer_helper.cc
new file mode 100644
index 00000000000..106cbab8a79
--- /dev/null
+++ b/chromium/media/gpu/command_buffer_helper.cc
@@ -0,0 +1,241 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/command_buffer_helper.h"
+
+#include <utility>
+#include <vector>
+
+#include "base/logging.h"
+#include "base/single_thread_task_runner.h"
+#include "base/threading/thread_checker.h"
+#include "gpu/command_buffer/common/scheduling_priority.h"
+#include "gpu/command_buffer/service/decoder_context.h"
+#include "gpu/command_buffer/service/scheduler.h"
+#include "gpu/command_buffer/service/sync_point_manager.h"
+#include "gpu/command_buffer/service/texture_manager.h"
+#include "gpu/ipc/service/command_buffer_stub.h"
+#include "gpu/ipc/service/gpu_channel.h"
+#include "media/gpu/gles2_decoder_helper.h"
+#include "ui/gl/gl_context.h"
+
+namespace media {
+
+namespace {
+
+class CommandBufferHelperImpl
+ : public CommandBufferHelper,
+ public gpu::CommandBufferStub::DestructionObserver {
+ public:
+ explicit CommandBufferHelperImpl(gpu::CommandBufferStub* stub) : stub_(stub) {
+ DVLOG(1) << __func__;
+ DCHECK(stub_->channel()->task_runner()->BelongsToCurrentThread());
+
+ stub_->AddDestructionObserver(this);
+ wait_sequence_id_ = stub_->channel()->scheduler()->CreateSequence(
+ gpu::SchedulingPriority::kNormal);
+ decoder_helper_ = GLES2DecoderHelper::Create(stub_->decoder_context());
+ }
+
+ gl::GLContext* GetGLContext() override {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!decoder_helper_)
+ return nullptr;
+
+ return decoder_helper_->GetGLContext();
+ }
+
+ bool MakeContextCurrent() override {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ return decoder_helper_ && decoder_helper_->MakeContextCurrent();
+ }
+
+ bool IsContextCurrent() const override {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!stub_)
+ return false;
+
+ gl::GLContext* context = stub_->decoder_context()->GetGLContext();
+ if (!context)
+ return false;
+
+ return context->IsCurrent(nullptr);
+ }
+
+ GLuint CreateTexture(GLenum target,
+ GLenum internal_format,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type) override {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(stub_->decoder_context()->GetGLContext()->IsCurrent(nullptr));
+
+ scoped_refptr<gpu::gles2::TextureRef> texture_ref =
+ decoder_helper_->CreateTexture(target, internal_format, width, height,
+ format, type);
+ GLuint service_id = texture_ref->service_id();
+ texture_refs_[service_id] = std::move(texture_ref);
+ return service_id;
+ }
+
+ void DestroyTexture(GLuint service_id) override {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(stub_->decoder_context()->GetGLContext()->IsCurrent(nullptr));
+ DCHECK(texture_refs_.count(service_id));
+
+ texture_refs_.erase(service_id);
+ }
+
+ void SetCleared(GLuint service_id) override {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!decoder_helper_)
+ return;
+
+ DCHECK(texture_refs_.count(service_id));
+ decoder_helper_->SetCleared(texture_refs_[service_id].get());
+ }
+
+ bool BindImage(GLuint service_id,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) override {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!decoder_helper_)
+ return false;
+
+ DCHECK(texture_refs_.count(service_id));
+ decoder_helper_->BindImage(texture_refs_[service_id].get(), image,
+ can_bind_to_sampler);
+ return true;
+ }
+
+ gpu::Mailbox CreateMailbox(GLuint service_id) override {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!decoder_helper_)
+ return gpu::Mailbox();
+
+ DCHECK(texture_refs_.count(service_id));
+ return decoder_helper_->CreateMailbox(texture_refs_[service_id].get());
+ }
+
+ void WaitForSyncToken(gpu::SyncToken sync_token,
+ base::OnceClosure done_cb) override {
+ DVLOG(2) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!stub_)
+ return;
+
+ // TODO(sandersd): Do we need to keep a ref to |this| while there are
+ // pending waits? If we destruct while they are pending, they will never
+ // run.
+ stub_->channel()->scheduler()->ScheduleTask(
+ gpu::Scheduler::Task(wait_sequence_id_, std::move(done_cb),
+ std::vector<gpu::SyncToken>({sync_token})));
+ }
+
+ void SetWillDestroyStubCB(WillDestroyStubCB will_destroy_stub_cb) override {
+ DCHECK(!will_destroy_stub_cb_);
+ will_destroy_stub_cb_ = std::move(will_destroy_stub_cb);
+ }
+
+ private:
+ ~CommandBufferHelperImpl() override {
+ DVLOG(1) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (!stub_)
+ return;
+
+ // Try to drop TextureRefs with the context current, so that the platform
+ // textures can be deleted.
+ //
+ // Note: Since we don't know what stack we are on, it might not be safe to
+ // change the context. In practice we can be reasonably sure that our last
+ // owner isn't doing work in a different context.
+ //
+ // TODO(sandersd): We should restore the previous context.
+ if (!texture_refs_.empty() && MakeContextCurrent())
+ texture_refs_.clear();
+
+ DestroyStub();
+ }
+
+ void OnWillDestroyStub(bool have_context) override {
+ DVLOG(1) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ // If we don't have a context, then tell the textures.
+ if (!have_context) {
+ for (auto iter : texture_refs_)
+ iter.second->ForceContextLost();
+ }
+
+ // In case |will_destroy_stub_cb_| drops the last reference to |this|, make
+ // sure that we're around a bit longer.
+ scoped_refptr<CommandBufferHelper> thiz(this);
+
+ if (will_destroy_stub_cb_)
+ std::move(will_destroy_stub_cb_).Run(have_context);
+
+ // OnWillDestroyStub() is called with the context current if possible. Drop
+ // the TextureRefs now while the platform textures can still be deleted.
+ texture_refs_.clear();
+
+ DestroyStub();
+ }
+
+ void DestroyStub() {
+ DVLOG(3) << __func__;
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ decoder_helper_ = nullptr;
+
+ // If the last reference to |this| is in a |done_cb|, destroying the wait
+ // sequence can delete |this|. Clearing |stub_| first prevents DestroyStub()
+ // being called twice.
+ gpu::CommandBufferStub* stub = stub_;
+ stub_ = nullptr;
+
+ stub->RemoveDestructionObserver(this);
+ stub->channel()->scheduler()->DestroySequence(wait_sequence_id_);
+ }
+
+ gpu::CommandBufferStub* stub_;
+ // Wait tasks are scheduled on our own sequence so that we can't inadvertently
+ // block the command buffer.
+ gpu::SequenceId wait_sequence_id_;
+ // TODO(sandersd): Merge GLES2DecoderHelper implementation into this class.
+ std::unique_ptr<GLES2DecoderHelper> decoder_helper_;
+ std::map<GLuint, scoped_refptr<gpu::gles2::TextureRef>> texture_refs_;
+
+ WillDestroyStubCB will_destroy_stub_cb_;
+
+ THREAD_CHECKER(thread_checker_);
+ DISALLOW_COPY_AND_ASSIGN(CommandBufferHelperImpl);
+};
+
+} // namespace
+
+// static
+scoped_refptr<CommandBufferHelper> CommandBufferHelper::Create(
+ gpu::CommandBufferStub* stub) {
+ return base::MakeRefCounted<CommandBufferHelperImpl>(stub);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/command_buffer_helper.h b/chromium/media/gpu/command_buffer_helper.h
new file mode 100644
index 00000000000..8adfee188b5
--- /dev/null
+++ b/chromium/media/gpu/command_buffer_helper.h
@@ -0,0 +1,135 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_COMMAND_BUFFER_HELPER_H_
+#define MEDIA_GPU_COMMAND_BUFFER_HELPER_H_
+
+#include "base/callback_forward.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
+#include "gpu/command_buffer/common/mailbox.h"
+#include "gpu/command_buffer/common/sync_token.h"
+#include "media/gpu/media_gpu_export.h"
+#include "ui/gl/gl_bindings.h"
+
+namespace gpu {
+class CommandBufferStub;
+} // namespace gpu
+
+namespace gl {
+class GLContext;
+class GLImage;
+} // namespace gl
+
+namespace media {
+
+// TODO(sandersd): CommandBufferHelper does not inherently need to be ref
+// counted, but some clients want that (VdaVideoDecoder and PictureBufferManager
+// both hold a ref to the same CommandBufferHelper). Consider making an owned
+// variant.
+class MEDIA_GPU_EXPORT CommandBufferHelper
+ : public base::RefCountedThreadSafe<CommandBufferHelper> {
+ public:
+ REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE();
+
+ using WillDestroyStubCB = base::OnceCallback<void(bool have_context)>;
+
+ // TODO(sandersd): Consider adding an Initialize(stub) method so that
+ // CommandBufferHelpers can be created before a stub is available.
+ static scoped_refptr<CommandBufferHelper> Create(
+ gpu::CommandBufferStub* stub);
+
+ // Gets the associated GLContext.
+ //
+ // Used by DXVAVDA to test for D3D11 support, and by V4L2VDA to create
+ // EGLImages. New clients should use more specialized accessors instead.
+ virtual gl::GLContext* GetGLContext() = 0;
+
+ // Makes the GL context current.
+ virtual bool MakeContextCurrent() = 0;
+
+ // Returns whether or not the the context is current in the
+ // GLContext::IsCurrent(nullptr) sense. Note that this is not necessarily the
+ // same for virtual contexts as "Did somebody run MakeContextCurrent?".
+ virtual bool IsContextCurrent() const = 0;
+
+ // Creates a texture and returns its |service_id|.
+ //
+ // See glTexImage2D() for argument definitions.
+ //
+ // The texture will be configured as a video frame: linear filtering, clamp to
+ // edge. If |target| is GL_TEXTURE_2D, storage will be allocated but not
+ // initialized.
+ //
+ // The context must be current.
+ //
+ // TODO(sandersd): Is really necessary to allocate storage? GpuVideoDecoder
+ // does this, but it's not clear that any clients require it.
+ virtual GLuint CreateTexture(GLenum target,
+ GLenum internal_format,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type) = 0;
+
+ // Destroys a texture.
+ //
+ // The context must be current.
+ virtual void DestroyTexture(GLuint service_id) = 0;
+
+ // Sets the cleared flag on level 0 of the texture.
+ virtual void SetCleared(GLuint service_id) = 0;
+
+ // Binds level 0 of the texture to an image.
+ //
+ // If the sampler binding already exists, set |can_bind_to_sampler| to true.
+ // Otherwise set it to false, and BindTexImage()/CopyTexImage() will be called
+ // when the texture is used.
+ //
+ // TODO(sandersd): Should we expose ImageState directly, rather than
+ // |can_bind_to_sampler|?
+ virtual bool BindImage(GLuint service_id,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) = 0;
+
+ // Creates a mailbox for a texture.
+ //
+ // TODO(sandersd): Specify the behavior when the stub has been destroyed. The
+ // current implementation returns an empty (zero) mailbox. One solution would
+ // be to add a HasStub() method, and not define behavior when it is false.
+ virtual gpu::Mailbox CreateMailbox(GLuint service_id) = 0;
+
+ // Waits for a SyncToken, then runs |done_cb|.
+ //
+ // |done_cb| may be destructed without running if the stub is destroyed.
+ //
+ // TODO(sandersd): Currently it is possible to lose the stub while
+ // PictureBufferManager is waiting for all picture buffers, which results in a
+ // decoding softlock. Notification of wait failure (or just context/stub lost)
+ // is probably necessary.
+ virtual void WaitForSyncToken(gpu::SyncToken sync_token,
+ base::OnceClosure done_cb) = 0;
+
+ // Set the callback to be called when our stub is destroyed. This callback
+ // may not change the current context.
+ virtual void SetWillDestroyStubCB(WillDestroyStubCB will_destroy_stub_cb) = 0;
+
+ protected:
+ CommandBufferHelper() = default;
+
+ // TODO(sandersd): Deleting remaining textures upon destruction requires
+ // making the context current, which may be undesireable. Consider adding an
+ // explicit DestroyWithContext() API.
+ virtual ~CommandBufferHelper() = default;
+
+ private:
+ friend class base::RefCountedThreadSafe<CommandBufferHelper>;
+
+ DISALLOW_COPY_AND_ASSIGN(CommandBufferHelper);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_COMMAND_BUFFER_HELPER_H_
diff --git a/chromium/media/gpu/fake_command_buffer_helper.cc b/chromium/media/gpu/fake_command_buffer_helper.cc
new file mode 100644
index 00000000000..1851d810c59
--- /dev/null
+++ b/chromium/media/gpu/fake_command_buffer_helper.cc
@@ -0,0 +1,141 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/fake_command_buffer_helper.h"
+
+#include "base/logging.h"
+
+namespace media {
+
+FakeCommandBufferHelper::FakeCommandBufferHelper(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner)
+ : task_runner_(std::move(task_runner)) {
+ DVLOG(1) << __func__;
+}
+
+FakeCommandBufferHelper::~FakeCommandBufferHelper() {
+ DVLOG(1) << __func__;
+}
+
+void FakeCommandBufferHelper::StubLost() {
+ DVLOG(1) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ // Keep a reference to |this| in case the destruction cb drops the last one.
+ scoped_refptr<CommandBufferHelper> thiz(this);
+ if (will_destroy_stub_cb_)
+ std::move(will_destroy_stub_cb_).Run(!is_context_lost_);
+ has_stub_ = false;
+ is_context_lost_ = true;
+ is_context_current_ = false;
+ service_ids_.clear();
+ waits_.clear();
+}
+
+void FakeCommandBufferHelper::ContextLost() {
+ DVLOG(1) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ is_context_lost_ = true;
+ is_context_current_ = false;
+}
+
+void FakeCommandBufferHelper::CurrentContextLost() {
+ DVLOG(2) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ is_context_current_ = false;
+}
+
+bool FakeCommandBufferHelper::HasTexture(GLuint service_id) {
+ DVLOG(4) << __func__ << "(" << service_id << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ return service_ids_.count(service_id);
+}
+
+void FakeCommandBufferHelper::ReleaseSyncToken(gpu::SyncToken sync_token) {
+ DVLOG(3) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(waits_.count(sync_token));
+ task_runner_->PostTask(FROM_HERE, std::move(waits_[sync_token]));
+ waits_.erase(sync_token);
+}
+
+gl::GLContext* FakeCommandBufferHelper::GetGLContext() {
+ DVLOG(4) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ return nullptr;
+}
+
+bool FakeCommandBufferHelper::MakeContextCurrent() {
+ DVLOG(3) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ is_context_current_ = !is_context_lost_;
+ return is_context_current_;
+}
+
+bool FakeCommandBufferHelper::IsContextCurrent() const {
+ return is_context_current_;
+}
+
+GLuint FakeCommandBufferHelper::CreateTexture(GLenum target,
+ GLenum internal_format,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type) {
+ DVLOG(2) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(is_context_current_);
+ GLuint service_id = next_service_id_++;
+ service_ids_.insert(service_id);
+ return service_id;
+}
+
+void FakeCommandBufferHelper::DestroyTexture(GLuint service_id) {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(is_context_current_);
+ DCHECK(service_ids_.count(service_id));
+ service_ids_.erase(service_id);
+}
+
+void FakeCommandBufferHelper::SetCleared(GLuint service_id) {
+ DVLOG(2) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(service_ids_.count(service_id));
+}
+
+bool FakeCommandBufferHelper::BindImage(GLuint service_id,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(service_ids_.count(service_id));
+ DCHECK(image);
+ return has_stub_;
+}
+
+gpu::Mailbox FakeCommandBufferHelper::CreateMailbox(GLuint service_id) {
+ DVLOG(2) << __func__ << "(" << service_id << ")";
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(service_ids_.count(service_id));
+ if (!has_stub_)
+ return gpu::Mailbox();
+ return gpu::Mailbox::Generate();
+}
+
+void FakeCommandBufferHelper::WaitForSyncToken(gpu::SyncToken sync_token,
+ base::OnceClosure done_cb) {
+ DVLOG(2) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(!waits_.count(sync_token));
+ if (has_stub_)
+ waits_.emplace(sync_token, std::move(done_cb));
+}
+
+void FakeCommandBufferHelper::SetWillDestroyStubCB(
+ WillDestroyStubCB will_destroy_stub_cb) {
+ DCHECK(!will_destroy_stub_cb_);
+ will_destroy_stub_cb_ = std::move(will_destroy_stub_cb);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/fake_command_buffer_helper.h b/chromium/media/gpu/fake_command_buffer_helper.h
new file mode 100644
index 00000000000..d3c0169d003
--- /dev/null
+++ b/chromium/media/gpu/fake_command_buffer_helper.h
@@ -0,0 +1,80 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_FAKE_COMMAND_BUFFER_HELPER_H_
+#define MEDIA_GPU_FAKE_COMMAND_BUFFER_HELPER_H_
+
+#include <map>
+#include <set>
+
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/single_thread_task_runner.h"
+#include "media/gpu/command_buffer_helper.h"
+
+namespace media {
+
+class FakeCommandBufferHelper : public CommandBufferHelper {
+ public:
+ explicit FakeCommandBufferHelper(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+
+ // Signal stub destruction. All textures will be deleted. Listeners will
+ // be notified that we have a current context unless one calls ContextLost
+ // before this.
+ void StubLost();
+
+ // Signal context loss. MakeContextCurrent() fails after this.
+ void ContextLost();
+
+ // Signal that the context is no longer current.
+ void CurrentContextLost();
+
+ // Complete a pending SyncToken wait.
+ void ReleaseSyncToken(gpu::SyncToken sync_token);
+
+ // Test whether a texture exists (has not been destroyed).
+ bool HasTexture(GLuint service_id);
+
+ // CommandBufferHelper implementation.
+ gl::GLContext* GetGLContext() override;
+ bool MakeContextCurrent() override;
+ bool IsContextCurrent() const override;
+ GLuint CreateTexture(GLenum target,
+ GLenum internal_format,
+ GLsizei width,
+ GLsizei height,
+ GLenum format,
+ GLenum type) override;
+ void DestroyTexture(GLuint service_id) override;
+ void SetCleared(GLuint service_id) override;
+ bool BindImage(GLuint service_id,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) override;
+ gpu::Mailbox CreateMailbox(GLuint service_id) override;
+ void WaitForSyncToken(gpu::SyncToken sync_token,
+ base::OnceClosure done_cb) override;
+ void SetWillDestroyStubCB(WillDestroyStubCB will_destroy_stub_cb) override;
+
+ private:
+ ~FakeCommandBufferHelper() override;
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ bool has_stub_ = true;
+ bool is_context_lost_ = false;
+ bool is_context_current_ = false;
+
+ GLuint next_service_id_ = 1;
+ std::set<GLuint> service_ids_;
+ std::map<gpu::SyncToken, base::OnceClosure> waits_;
+
+ WillDestroyStubCB will_destroy_stub_cb_;
+
+ DISALLOW_COPY_AND_ASSIGN(FakeCommandBufferHelper);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_FAKE_COMMAND_BUFFER_HELPER_H_
diff --git a/chromium/media/gpu/gles2_decoder_helper.cc b/chromium/media/gpu/gles2_decoder_helper.cc
index 6c376ff22fa..551d335f181 100644
--- a/chromium/media/gpu/gles2_decoder_helper.cc
+++ b/chromium/media/gpu/gles2_decoder_helper.cc
@@ -6,8 +6,10 @@
#include <memory>
+#include "base/logging.h"
#include "base/macros.h"
#include "base/threading/thread_checker.h"
+#include "gpu/command_buffer/common/gles2_cmd_utils.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "gpu/command_buffer/service/context_group.h"
#include "gpu/command_buffer/service/decoder_context.h"
@@ -20,7 +22,15 @@ namespace media {
class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
public:
explicit GLES2DecoderHelperImpl(gpu::DecoderContext* decoder)
- : decoder_(decoder) {}
+ : decoder_(decoder) {
+ DCHECK(decoder_);
+ gpu::gles2::ContextGroup* group = decoder_->GetContextGroup();
+ texture_manager_ = group->texture_manager();
+ mailbox_manager_ = group->mailbox_manager();
+ // TODO(sandersd): Support GLES2DecoderPassthroughImpl.
+ DCHECK(texture_manager_);
+ DCHECK(mailbox_manager_);
+ }
bool MakeContextCurrent() override {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -35,10 +45,6 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
GLenum type) override {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(decoder_->GetGLContext()->IsCurrent(nullptr));
- gpu::gles2::ContextGroup* group = decoder_->GetContextGroup();
- gpu::gles2::TextureManager* texture_manager = group->texture_manager();
- // TODO(sandersd): Support GLES2DecoderPassthroughImpl.
- DCHECK(texture_manager);
// We can't use texture_manager->CreateTexture(), since it requires a unique
// |client_id|. Instead we create the texture directly, and create our own
@@ -47,39 +53,41 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
glGenTextures(1, &texture_id);
glBindTexture(target, texture_id);
+ // Mark external textures as clear, since nobody is going to take any action
+ // that would "clear" them.
+ // TODO(liberato): should we make the client do this when it binds an image?
+ gfx::Rect cleared_rect = (target == GL_TEXTURE_EXTERNAL_OES)
+ ? gfx::Rect(width, height)
+ : gfx::Rect();
+
scoped_refptr<gpu::gles2::TextureRef> texture_ref =
- gpu::gles2::TextureRef::Create(texture_manager, 0, texture_id);
- texture_manager->SetTarget(texture_ref.get(), target);
- texture_manager->SetLevelInfo(texture_ref.get(), // ref
- target, // target
- 0, // level
- internal_format, // internal_format
- width, // width
- height, // height
- 1, // depth
- 0, // border
- format, // format
- type, // type
- gfx::Rect()); // cleared_rect
-
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_MAG_FILTER,
- GL_LINEAR);
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_MIN_FILTER,
- GL_LINEAR);
-
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_WRAP_S,
- GL_CLAMP_TO_EDGE);
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_WRAP_T,
- GL_CLAMP_TO_EDGE);
-
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_BASE_LEVEL, 0);
- texture_manager->SetParameteri(__func__, decoder_->GetErrorState(),
- texture_ref.get(), GL_TEXTURE_MAX_LEVEL, 0);
+ gpu::gles2::TextureRef::Create(texture_manager_, 0, texture_id);
+ texture_manager_->SetTarget(texture_ref.get(), target);
+ texture_manager_->SetLevelInfo(texture_ref.get(), // ref
+ target, // target
+ 0, // level
+ internal_format, // internal_format
+ width, // width
+ height, // height
+ 1, // depth
+ 0, // border
+ format, // format
+ type, // type
+ cleared_rect); // cleared_rect
+
+ texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
+ texture_ref.get(), GL_TEXTURE_MAG_FILTER,
+ GL_LINEAR);
+ texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
+ texture_ref.get(), GL_TEXTURE_MIN_FILTER,
+ GL_LINEAR);
+
+ texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
+ texture_ref.get(), GL_TEXTURE_WRAP_S,
+ GL_CLAMP_TO_EDGE);
+ texture_manager_->SetParameteri(__func__, decoder_->GetErrorState(),
+ texture_ref.get(), GL_TEXTURE_WRAP_T,
+ GL_CLAMP_TO_EDGE);
// TODO(sandersd): Do we always want to allocate for GL_TEXTURE_2D?
if (target == GL_TEXTURE_2D) {
@@ -98,17 +106,40 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
return texture_ref;
}
+ void SetCleared(gpu::gles2::TextureRef* texture_ref) override {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ texture_manager_->SetLevelCleared(
+ texture_ref, texture_ref->texture()->target(), 0, true);
+ }
+
+ void BindImage(gpu::gles2::TextureRef* texture_ref,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) override {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ GLenum target = gpu::gles2::GLES2Util::GLFaceTargetToTextureTarget(
+ texture_ref->texture()->target());
+ gpu::gles2::Texture::ImageState state = can_bind_to_sampler
+ ? gpu::gles2::Texture::BOUND
+ : gpu::gles2::Texture::UNBOUND;
+ texture_manager_->SetLevelImage(texture_ref, target, 0, image, state);
+ }
+
+ gl::GLContext* GetGLContext() override {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return decoder_->GetGLContext();
+ }
+
gpu::Mailbox CreateMailbox(gpu::gles2::TextureRef* texture_ref) override {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- gpu::gles2::ContextGroup* group = decoder_->GetContextGroup();
- gpu::MailboxManager* mailbox_manager = group->mailbox_manager();
gpu::Mailbox mailbox = gpu::Mailbox::Generate();
- mailbox_manager->ProduceTexture(mailbox, texture_ref->texture());
+ mailbox_manager_->ProduceTexture(mailbox, texture_ref->texture());
return mailbox;
}
private:
gpu::DecoderContext* decoder_;
+ gpu::gles2::TextureManager* texture_manager_;
+ gpu::MailboxManager* mailbox_manager_;
THREAD_CHECKER(thread_checker_);
DISALLOW_COPY_AND_ASSIGN(GLES2DecoderHelperImpl);
@@ -117,8 +148,6 @@ class GLES2DecoderHelperImpl : public GLES2DecoderHelper {
// static
std::unique_ptr<GLES2DecoderHelper> GLES2DecoderHelper::Create(
gpu::DecoderContext* decoder) {
- if (!decoder)
- return nullptr;
return std::make_unique<GLES2DecoderHelperImpl>(decoder);
}
diff --git a/chromium/media/gpu/gles2_decoder_helper.h b/chromium/media/gpu/gles2_decoder_helper.h
index 5f6befc0186..ee816217673 100644
--- a/chromium/media/gpu/gles2_decoder_helper.h
+++ b/chromium/media/gpu/gles2_decoder_helper.h
@@ -22,6 +22,11 @@ class TextureRef;
} // namespace gles2
} // namespace gpu
+namespace gl {
+class GLContext;
+class GLImage;
+} // namespace gl
+
namespace media {
// Utility methods to simplify working with a gpu::DecoderContext from
@@ -37,7 +42,7 @@ class MEDIA_GPU_EXPORT GLES2DecoderHelper {
virtual bool MakeContextCurrent() = 0;
// Creates a texture and configures it as a video frame (linear filtering,
- // clamp to edge, no mipmaps). The context must be current.
+ // clamp to edge). The context must be current.
//
// See glTexImage2D() for parameter definitions.
//
@@ -50,6 +55,17 @@ class MEDIA_GPU_EXPORT GLES2DecoderHelper {
GLenum format,
GLenum type) = 0;
+ // Sets the cleared flag on level 0 of the texture.
+ virtual void SetCleared(gpu::gles2::TextureRef* texture_ref) = 0;
+
+ // Binds level 0 of the texture to an image.
+ virtual void BindImage(gpu::gles2::TextureRef* texture_ref,
+ gl::GLImage* image,
+ bool can_bind_to_sampler) = 0;
+
+ // Gets the associated GLContext.
+ virtual gl::GLContext* GetGLContext() = 0;
+
// Creates a mailbox for a texture.
virtual gpu::Mailbox CreateMailbox(gpu::gles2::TextureRef* texture_ref) = 0;
};
diff --git a/chromium/media/gpu/gpu_jpeg_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_jpeg_decode_accelerator_factory.cc
index b3fe5398f26..c0dfb52cc77 100644
--- a/chromium/media/gpu/gpu_jpeg_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_jpeg_decode_accelerator_factory.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/command_line.h"
+#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
diff --git a/chromium/media/gpu/gpu_jpeg_encode_accelerator_factory.cc b/chromium/media/gpu/gpu_jpeg_encode_accelerator_factory.cc
index c4e6114403d..9e50d099913 100644
--- a/chromium/media/gpu/gpu_jpeg_encode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_jpeg_encode_accelerator_factory.cc
@@ -5,6 +5,7 @@
#include "media/gpu/gpu_jpeg_encode_accelerator_factory.h"
#include "base/bind.h"
+#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
index 1f488bbdc25..4c8f67c20b2 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
@@ -123,7 +123,8 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
VideoDecodeAccelerator::Client* client,
const VideoDecodeAccelerator::Config& config,
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) {
DCHECK(thread_checker_.CalledOnValidThread());
if (gpu_preferences.disable_accelerated_video_decode)
@@ -135,7 +136,8 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
// in GetDecoderCapabilities() above.
using CreateVDAFp = std::unique_ptr<VideoDecodeAccelerator> (
GpuVideoDecodeAcceleratorFactory::*)(const gpu::GpuDriverBugWorkarounds&,
- const gpu::GpuPreferences&) const;
+ const gpu::GpuPreferences&,
+ MediaLog* media_log) const;
const CreateVDAFp create_vda_fps[] = {
#if defined(OS_WIN)
&GpuVideoDecodeAcceleratorFactory::CreateDXVAVDA,
@@ -158,7 +160,7 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
std::unique_ptr<VideoDecodeAccelerator> vda;
for (const auto& create_vda_function : create_vda_fps) {
- vda = (this->*create_vda_function)(workarounds, gpu_preferences);
+ vda = (this->*create_vda_function)(workarounds, gpu_preferences, media_log);
if (vda && vda->Initialize(config, client))
return vda;
}
@@ -170,12 +172,13 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateDXVAVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
DVLOG(0) << "Initializing DXVA HW decoder for windows.";
decoder.reset(new DXVAVideoDecodeAccelerator(
get_gl_context_cb_, make_context_current_cb_, bind_image_cb_, workarounds,
- gpu_preferences));
+ gpu_preferences, media_log));
return decoder;
}
#endif
@@ -184,7 +187,8 @@ GpuVideoDecodeAcceleratorFactory::CreateDXVAVDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateV4L2VDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
scoped_refptr<V4L2Device> device = V4L2Device::Create();
if (device.get()) {
@@ -198,7 +202,8 @@ GpuVideoDecodeAcceleratorFactory::CreateV4L2VDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
scoped_refptr<V4L2Device> device = V4L2Device::Create();
if (device.get()) {
@@ -214,7 +219,8 @@ GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateVaapiVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
decoder.reset(new VaapiVideoDecodeAccelerator(make_context_current_cb_,
bind_image_cb_));
@@ -226,9 +232,10 @@ GpuVideoDecodeAcceleratorFactory::CreateVaapiVDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateVTVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
- decoder.reset(new VTVideoDecodeAccelerator(bind_image_cb_));
+ decoder.reset(new VTVideoDecodeAccelerator(bind_image_cb_, media_log));
return decoder;
}
#endif
@@ -237,7 +244,8 @@ GpuVideoDecodeAcceleratorFactory::CreateVTVDA(
std::unique_ptr<VideoDecodeAccelerator>
GpuVideoDecodeAcceleratorFactory::CreateAndroidVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const {
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
decoder.reset(new AndroidVideoDecodeAccelerator(
AVDACodecAllocator::GetInstance(base::ThreadTaskRunnerHandle::Get()),
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
index 3244daa4b0e..70eac2b126f 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.h
@@ -32,6 +32,8 @@ class ContextGroup;
namespace media {
+class MediaLog;
+
class MEDIA_GPU_EXPORT GpuVideoDecodeAcceleratorFactory {
public:
~GpuVideoDecodeAcceleratorFactory();
@@ -80,7 +82,8 @@ class MEDIA_GPU_EXPORT GpuVideoDecodeAcceleratorFactory {
VideoDecodeAccelerator::Client* client,
const VideoDecodeAccelerator::Config& config,
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences);
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log = nullptr);
private:
GpuVideoDecodeAcceleratorFactory(
@@ -93,33 +96,40 @@ class MEDIA_GPU_EXPORT GpuVideoDecodeAcceleratorFactory {
#if defined(OS_WIN)
std::unique_ptr<VideoDecodeAccelerator> CreateD3D11VDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
std::unique_ptr<VideoDecodeAccelerator> CreateDXVAVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
#endif
#if BUILDFLAG(USE_V4L2_CODEC)
std::unique_ptr<VideoDecodeAccelerator> CreateV4L2VDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
std::unique_ptr<VideoDecodeAccelerator> CreateV4L2SVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
#endif
#if BUILDFLAG(USE_VAAPI)
std::unique_ptr<VideoDecodeAccelerator> CreateVaapiVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
#endif
#if defined(OS_MACOSX)
std::unique_ptr<VideoDecodeAccelerator> CreateVTVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
#endif
#if defined(OS_ANDROID)
std::unique_ptr<VideoDecodeAccelerator> CreateAndroidVDA(
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences) const;
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log) const;
#endif
const GetGLContextCallback get_gl_context_cb_;
diff --git a/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
index d770f10353f..94989a52758 100644
--- a/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
@@ -13,7 +13,7 @@
#if BUILDFLAG(USE_V4L2_CODEC)
#include "media/gpu/v4l2/v4l2_video_encode_accelerator.h"
#endif
-#if defined(OS_ANDROID) && BUILDFLAG(ENABLE_WEBRTC)
+#if defined(OS_ANDROID)
#include "media/gpu/android/android_video_encode_accelerator.h"
#endif
#if defined(OS_MACOSX)
@@ -48,7 +48,7 @@ std::unique_ptr<VideoEncodeAccelerator> CreateVaapiVEA() {
}
#endif
-#if defined(OS_ANDROID) && BUILDFLAG(ENABLE_WEBRTC)
+#if defined(OS_ANDROID)
std::unique_ptr<VideoEncodeAccelerator> CreateAndroidVEA() {
return base::WrapUnique<VideoEncodeAccelerator>(
new AndroidVideoEncodeAccelerator());
@@ -92,7 +92,7 @@ std::vector<VEAFactoryFunction> GetVEAFactoryFunctions(
#if BUILDFLAG(USE_VAAPI)
vea_factory_functions.push_back(base::BindRepeating(&CreateVaapiVEA));
#endif
-#if defined(OS_ANDROID) && BUILDFLAG(ENABLE_WEBRTC)
+#if defined(OS_ANDROID)
vea_factory_functions.push_back(base::BindRepeating(&CreateAndroidVEA));
#endif
#if defined(OS_MACOSX)
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index ebc7e2b385c..05e0608362e 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -601,8 +601,8 @@ bool H264Decoder::ModifyReferencePicList(const H264SliceHeader* slice_hdr,
default:
// May be recoverable.
DVLOG(1) << "Invalid modification_of_pic_nums_idc="
- << list_mod->modification_of_pic_nums_idc
- << " in position " << i;
+ << list_mod->modification_of_pic_nums_idc << " in position "
+ << i;
break;
}
@@ -627,8 +627,8 @@ void H264Decoder::OutputPic(scoped_refptr<H264Picture> pic) {
}
DVLOG_IF(1, pic->pic_order_cnt < last_output_poc_)
- << "Outputting out of order, likely a broken stream: "
- << last_output_poc_ << " -> " << pic->pic_order_cnt;
+ << "Outputting out of order, likely a broken stream: " << last_output_poc_
+ << " -> " << pic->pic_order_cnt;
last_output_poc_ = pic->pic_order_cnt;
DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt;
@@ -1224,9 +1224,17 @@ bool H264Decoder::ProcessCurrentSlice() {
return H264Decoder::kDecodeError; \
} while (0)
-void H264Decoder::SetStream(int32_t id, const uint8_t* ptr, size_t size) {
+void H264Decoder::SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config) {
DCHECK(ptr);
DCHECK(size);
+ if (decrypt_config) {
+ NOTIMPLEMENTED();
+ state_ = kError;
+ return;
+ }
DVLOG(4) << "New input stream id: " << id << " at: " << (void*)ptr
<< " size: " << size;
diff --git a/chromium/media/gpu/h264_decoder.h b/chromium/media/gpu/h264_decoder.h
index 40d274ca070..dff6d936e60 100644
--- a/chromium/media/gpu/h264_decoder.h
+++ b/chromium/media/gpu/h264_decoder.h
@@ -105,7 +105,10 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
~H264Decoder() override;
// AcceleratedVideoDecoder implementation.
- void SetStream(int32_t id, const uint8_t* ptr, size_t size) override;
+ void SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config = nullptr) override;
bool Flush() override WARN_UNUSED_RESULT;
void Reset() override;
DecodeResult Decode() override WARN_UNUSED_RESULT;
diff --git a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
index f8c85d5fc18..c8de1270865 100644
--- a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
+++ b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
@@ -6,7 +6,6 @@
#include "base/bind.h"
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "gpu/ipc/client/gpu_channel_host.h"
@@ -264,7 +263,7 @@ void GpuVideoDecodeAcceleratorHost::OnPictureReady(
params.visible_rect, params.color_space,
params.allow_overlay);
picture.set_size_changed(params.size_changed);
- picture.set_surface_texture(params.surface_texture);
+ picture.set_texture_owner(params.surface_texture);
picture.set_wants_promotion_hint(params.wants_promotion_hint);
client_->PictureReady(picture);
}
diff --git a/chromium/media/gpu/ipc/service/BUILD.gn b/chromium/media/gpu/ipc/service/BUILD.gn
index a8ab2462725..77a9f50b782 100644
--- a/chromium/media/gpu/ipc/service/BUILD.gn
+++ b/chromium/media/gpu/ipc/service/BUILD.gn
@@ -22,6 +22,10 @@ target(link_target_type, "service") {
"media_gpu_channel.h",
"media_gpu_channel_manager.cc",
"media_gpu_channel_manager.h",
+ "picture_buffer_manager.cc",
+ "picture_buffer_manager.h",
+ "vda_video_decoder.cc",
+ "vda_video_decoder.h",
]
include_dirs = [ "//third_party/mesa/src/include" ]
@@ -37,6 +41,7 @@ target(link_target_type, "service") {
"//gpu/command_buffer/service:gles2",
"//gpu/ipc/service",
"//media:media_buildflags",
+ "//media/gpu",
"//media/gpu:buildflags",
"//media/gpu/ipc/common",
"//third_party/mesa:mesa_headers",
@@ -51,3 +56,19 @@ target(link_target_type, "service") {
deps += [ "//third_party/webrtc/common_video:common_video" ]
}
}
+
+source_set("unit_tests") {
+ testonly = true
+ sources = [
+ "picture_buffer_manager_unittest.cc",
+ "vda_video_decoder_unittest.cc",
+ ]
+ deps = [
+ ":service",
+ "//base",
+ "//base/test:test_support",
+ "//media:test_support",
+ "//testing/gmock",
+ "//testing/gtest",
+ ]
+}
diff --git a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
index 33c4c89936d..e06a485f388 100644
--- a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
+++ b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
@@ -268,7 +268,7 @@ void GpuVideoDecodeAccelerator::PictureReady(const Picture& picture) {
params.color_space = picture.color_space();
params.allow_overlay = picture.allow_overlay();
params.size_changed = picture.size_changed();
- params.surface_texture = picture.surface_texture();
+ params.surface_texture = picture.texture_owner();
params.wants_promotion_hint = picture.wants_promotion_hint();
if (!Send(new AcceleratedVideoDecoderHostMsg_PictureReady(host_route_id_,
params))) {
@@ -305,7 +305,7 @@ void GpuVideoDecodeAccelerator::NotifyError(
}
}
-void GpuVideoDecodeAccelerator::OnWillDestroyStub() {
+void GpuVideoDecodeAccelerator::OnWillDestroyStub(bool have_context) {
// The stub is going away, so we have to stop and destroy VDA here, before
// returning, because the VDA may need the GL context to run and/or do its
// cleanup. We cannot destroy the VDA before the IO thread message filter is
@@ -513,7 +513,7 @@ void GpuVideoDecodeAccelerator::OnSetOverlayInfo(
void GpuVideoDecodeAccelerator::OnDestroy() {
DCHECK(video_decode_accelerator_);
- OnWillDestroyStub();
+ OnWillDestroyStub(false);
}
void GpuVideoDecodeAccelerator::OnFilterRemoved() {
diff --git a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.h b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.h
index eb998ce68d9..c8ebc6f1fb5 100644
--- a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.h
+++ b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.h
@@ -73,7 +73,7 @@ class GpuVideoDecodeAccelerator
void NotifyError(VideoDecodeAccelerator::Error error) override;
// CommandBufferStub::DestructionObserver implementation.
- void OnWillDestroyStub() override;
+ void OnWillDestroyStub(bool have_context) override;
// Function to delegate sending to actual sender.
bool Send(IPC::Message* message) override;
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
new file mode 100644
index 00000000000..06d743df20a
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
@@ -0,0 +1,329 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/ipc/service/picture_buffer_manager.h"
+
+#include <map>
+#include <set>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/synchronization/lock.h"
+#include "gpu/command_buffer/common/mailbox_holder.h"
+
+namespace media {
+
+namespace {
+
+// Generates nonnegative picture buffer IDs, which are assumed to be unique.
+int32_t NextID(int32_t* counter) {
+ int32_t value = *counter;
+ *counter = (*counter + 1) & 0x3FFFFFFF;
+ return value;
+}
+
+class PictureBufferManagerImpl : public PictureBufferManager {
+ public:
+ explicit PictureBufferManagerImpl(
+ ReusePictureBufferCB reuse_picture_buffer_cb)
+ : reuse_picture_buffer_cb_(std::move(reuse_picture_buffer_cb)) {
+ DVLOG(1) << __func__;
+ }
+
+ void Initialize(
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ scoped_refptr<CommandBufferHelper> command_buffer_helper) override {
+ DVLOG(1) << __func__;
+ DCHECK(!gpu_task_runner_);
+
+ gpu_task_runner_ = std::move(gpu_task_runner);
+ command_buffer_helper_ = std::move(command_buffer_helper);
+ }
+
+ bool CanReadWithoutStalling() override {
+ DVLOG(3) << __func__;
+
+ base::AutoLock lock(picture_buffers_lock_);
+
+ // If there are no assigned picture buffers, predict that the VDA will
+ // request some.
+ if (picture_buffers_.empty())
+ return true;
+
+ // Predict that the VDA can output a picture if at least one picture buffer
+ // is not in use as an output.
+ for (const auto& it : picture_buffers_) {
+ if (it.second.state != PictureBufferState::OUTPUT)
+ return true;
+ }
+
+ return false;
+ }
+
+ std::vector<PictureBuffer> CreatePictureBuffers(
+ uint32_t count,
+ VideoPixelFormat pixel_format,
+ uint32_t planes,
+ gfx::Size texture_size,
+ uint32_t texture_target) override {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_);
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(count);
+ DCHECK(planes);
+ DCHECK_LE(planes, static_cast<uint32_t>(VideoFrame::kMaxPlanes));
+
+ // TODO(sandersd): Consider requiring that CreatePictureBuffers() is called
+ // with the context current.
+ if (!command_buffer_helper_->MakeContextCurrent()) {
+ DVLOG(1) << "Failed to make context current";
+ return std::vector<PictureBuffer>();
+ }
+
+ std::vector<PictureBuffer> picture_buffers;
+ for (uint32_t i = 0; i < count; i++) {
+ PictureBuffer::TextureIds service_ids;
+ PictureBufferData picture_data = {PictureBufferState::AVAILABLE,
+ pixel_format, texture_size};
+
+ for (uint32_t j = 0; j < planes; j++) {
+ // Create a texture for this plane.
+ GLuint service_id = command_buffer_helper_->CreateTexture(
+ texture_target, GL_RGBA, texture_size.width(),
+ texture_size.height(), GL_RGBA, GL_UNSIGNED_BYTE);
+ DCHECK(service_id);
+ service_ids.push_back(service_id);
+
+ // The texture is not cleared yet, but it will be before the VDA outputs
+ // it. Rather than requiring output to happen on the GPU thread, mark
+ // the texture as cleared immediately.
+ command_buffer_helper_->SetCleared(service_id);
+
+ // Generate a mailbox while we are still on the GPU thread.
+ picture_data.mailbox_holders[j] = gpu::MailboxHolder(
+ command_buffer_helper_->CreateMailbox(service_id), gpu::SyncToken(),
+ texture_target);
+ }
+
+ // Generate a picture buffer ID and record the picture buffer.
+ int32_t picture_buffer_id = NextID(&picture_buffer_id_);
+ {
+ base::AutoLock lock(picture_buffers_lock_);
+ DCHECK(!picture_buffers_.count(picture_buffer_id));
+ picture_buffers_[picture_buffer_id] = picture_data;
+ }
+
+ // Since our textures have no client IDs, we reuse the service IDs as
+ // convenient unique identifiers.
+ //
+ // TODO(sandersd): Refactor the bind image callback to use service IDs so
+ // that we can get rid of the client IDs altogether.
+ picture_buffers.emplace_back(picture_buffer_id, texture_size, service_ids,
+ service_ids, texture_target, pixel_format);
+
+ // Record the textures used by the picture buffer.
+ picture_buffer_textures_[picture_buffer_id] = std::move(service_ids);
+ }
+ return picture_buffers;
+ }
+
+ bool DismissPictureBuffer(int32_t picture_buffer_id) override {
+ DVLOG(2) << __func__ << "(" << picture_buffer_id << ")";
+ DCHECK(gpu_task_runner_);
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(picture_buffers_lock_);
+
+ // Check the state of the picture buffer.
+ const auto& it = picture_buffers_.find(picture_buffer_id);
+ if (it == picture_buffers_.end()) {
+ DVLOG(1) << "Unknown picture buffer " << picture_buffer_id;
+ return false;
+ }
+
+ bool is_available = it->second.state == PictureBufferState::AVAILABLE;
+
+ // Destroy the picture buffer data.
+ picture_buffers_.erase(it);
+
+ // If the picture was available, we can destroy its textures immediately.
+ if (is_available) {
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &PictureBufferManagerImpl::DestroyPictureBufferTextures, this,
+ picture_buffer_id));
+ }
+
+ return true;
+ }
+
+ scoped_refptr<VideoFrame> CreateVideoFrame(Picture picture,
+ base::TimeDelta timestamp,
+ gfx::Rect visible_rect,
+ gfx::Size natural_size) override {
+ DVLOG(2) << __func__ << "(" << picture.picture_buffer_id() << ")";
+ DCHECK(!picture.size_changed());
+ DCHECK(!picture.texture_owner());
+ DCHECK(!picture.wants_promotion_hint());
+
+ base::AutoLock lock(picture_buffers_lock_);
+
+ int32_t picture_buffer_id = picture.picture_buffer_id();
+
+ // Verify that the picture buffer is available.
+ const auto& it = picture_buffers_.find(picture_buffer_id);
+ if (it == picture_buffers_.end()) {
+ DVLOG(1) << "Unknown picture buffer " << picture_buffer_id;
+ return nullptr;
+ }
+
+ PictureBufferData& picture_buffer_data = it->second;
+ if (picture_buffer_data.state != PictureBufferState::AVAILABLE) {
+ DLOG(ERROR) << "Picture buffer " << picture_buffer_id
+ << " is not available";
+ return nullptr;
+ }
+
+ // Verify that the picture buffer is large enough.
+ if (!gfx::Rect(picture_buffer_data.texture_size).Contains(visible_rect)) {
+ DLOG(ERROR) << "visible_rect " << visible_rect.ToString()
+ << " exceeds coded_size "
+ << picture_buffer_data.texture_size.ToString();
+ return nullptr;
+ }
+
+ // Mark the picture as an output.
+ picture_buffer_data.state = PictureBufferState::OUTPUT;
+
+ // Create and return a VideoFrame for the picture buffer.
+ scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTextures(
+ picture_buffer_data.pixel_format, picture_buffer_data.mailbox_holders,
+ base::BindRepeating(&PictureBufferManagerImpl::OnVideoFrameDestroyed,
+ this, picture_buffer_id),
+ picture_buffer_data.texture_size, visible_rect, natural_size,
+ timestamp);
+
+ frame->set_color_space(picture.color_space());
+
+ if (picture.allow_overlay())
+ frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
+
+ // TODO(sandersd): Provide an API for VDAs to control this.
+ frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+
+ return frame;
+ }
+
+ private:
+ ~PictureBufferManagerImpl() override { DVLOG(1) << __func__; }
+
+ void OnVideoFrameDestroyed(int32_t picture_buffer_id,
+ const gpu::SyncToken& sync_token) {
+ DVLOG(3) << __func__ << "(" << picture_buffer_id << ")";
+
+ base::AutoLock lock(picture_buffers_lock_);
+
+ // If the picture buffer is still assigned, mark it as unreleased.
+ const auto& it = picture_buffers_.find(picture_buffer_id);
+ if (it != picture_buffers_.end()) {
+ DCHECK_EQ(it->second.state, PictureBufferState::OUTPUT);
+ it->second.state = PictureBufferState::WAITING_FOR_SYNCTOKEN;
+ }
+
+ // Wait for the SyncToken release.
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CommandBufferHelper::WaitForSyncToken, command_buffer_helper_,
+ sync_token,
+ base::BindOnce(&PictureBufferManagerImpl::OnSyncTokenReleased, this,
+ picture_buffer_id)));
+ }
+
+ void OnSyncTokenReleased(int32_t picture_buffer_id) {
+ DVLOG(3) << __func__ << "(" << picture_buffer_id << ")";
+ DCHECK(gpu_task_runner_);
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ // If the picture buffer is still assigned, mark it as available.
+ bool is_assigned = false;
+ {
+ base::AutoLock lock(picture_buffers_lock_);
+ const auto& it = picture_buffers_.find(picture_buffer_id);
+ if (it != picture_buffers_.end()) {
+ DCHECK_EQ(it->second.state, PictureBufferState::WAITING_FOR_SYNCTOKEN);
+ it->second.state = PictureBufferState::AVAILABLE;
+ is_assigned = true;
+ }
+ }
+
+ // If the picture buffer is still assigned, it is ready to be reused.
+ // Otherwise it has been dismissed and we can now delete its textures.
+ // Neither of these operations should be done while holding the lock.
+ if (is_assigned) {
+ reuse_picture_buffer_cb_.Run(picture_buffer_id);
+ } else {
+ DestroyPictureBufferTextures(picture_buffer_id);
+ }
+ }
+
+ void DestroyPictureBufferTextures(int32_t picture_buffer_id) {
+ DVLOG(3) << __func__ << "(" << picture_buffer_id << ")";
+ DCHECK(gpu_task_runner_);
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ if (!command_buffer_helper_->MakeContextCurrent())
+ return;
+
+ const auto& it = picture_buffer_textures_.find(picture_buffer_id);
+ DCHECK(it != picture_buffer_textures_.end());
+ for (GLuint service_id : it->second)
+ command_buffer_helper_->DestroyTexture(service_id);
+ picture_buffer_textures_.erase(it);
+ }
+
+ ReusePictureBufferCB reuse_picture_buffer_cb_;
+
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
+ scoped_refptr<CommandBufferHelper> command_buffer_helper_;
+
+ int32_t picture_buffer_id_ = 0;
+ // Includes picture puffers that have been dismissed if their textures have
+ // not been deleted yet.
+ std::map<int32_t, std::vector<GLuint>> picture_buffer_textures_;
+
+ base::Lock picture_buffers_lock_;
+ enum class PictureBufferState {
+ // Available for use by the VDA.
+ AVAILABLE,
+ // Output by the VDA, still bound to a VideoFrame.
+ OUTPUT,
+ // Waiting on a SyncToken before being reused.
+ WAITING_FOR_SYNCTOKEN,
+ };
+ struct PictureBufferData {
+ PictureBufferState state;
+ VideoPixelFormat pixel_format;
+ gfx::Size texture_size;
+ gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
+ };
+ // Pictures buffers that are assigned to the VDA.
+ std::map<int32_t, PictureBufferData> picture_buffers_;
+
+ DISALLOW_COPY_AND_ASSIGN(PictureBufferManagerImpl);
+};
+
+} // namespace
+
+// static
+scoped_refptr<PictureBufferManager> PictureBufferManager::Create(
+ ReusePictureBufferCB reuse_picture_buffer_cb) {
+ return base::MakeRefCounted<PictureBufferManagerImpl>(
+ std::move(reuse_picture_buffer_cb));
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager.h b/chromium/media/gpu/ipc/service/picture_buffer_manager.h
new file mode 100644
index 00000000000..8bc93a5aace
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager.h
@@ -0,0 +1,120 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_IPC_SERVICE_PICTURE_BUFFER_MANAGER_H_
+#define MEDIA_GPU_IPC_SERVICE_PICTURE_BUFFER_MANAGER_H_
+
+#include <stdint.h>
+
+#include <vector>
+
+#include "base/callback_forward.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/time/time.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "media/gpu/command_buffer_helper.h"
+#include "media/video/picture.h"
+#include "ui/gfx/geometry/rect.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+class PictureBufferManager
+ : public base::RefCountedThreadSafe<PictureBufferManager> {
+ public:
+ REQUIRE_ADOPTION_FOR_REFCOUNTED_TYPE();
+
+ using ReusePictureBufferCB = base::RepeatingCallback<void(int32_t)>;
+
+ // Creates a PictureBufferManager.
+ //
+ // |reuse_picture_buffer_cb|: Called when a picture is returned to the pool
+ // after its VideoFrame has been destructed.
+ static scoped_refptr<PictureBufferManager> Create(
+ ReusePictureBufferCB reuse_picture_buffer_cb);
+
+ // Provides access to a CommandBufferHelper. This must be done before calling
+ // CreatePictureBuffers().
+ //
+ // TODO(sandersd): It would be convenient to set this up at creation time.
+ // Consider changes to CommandBufferHelper that would enable that.
+ virtual void Initialize(
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ scoped_refptr<CommandBufferHelper> command_buffer_helper) = 0;
+
+ // Predicts whether the VDA can output a picture without reusing one first.
+ //
+ // Implementations should be pessimistic; it is better to incorrectly skip
+ // preroll than to hang waiting for an output that can never come.
+ virtual bool CanReadWithoutStalling() = 0;
+
+ // Creates and returns a vector of picture buffers, or an empty vector on
+ // failure.
+ //
+ // |count|: Number of picture buffers to create.
+ // |pixel_format|: Describes the arrangement of image data in the picture's
+ // textures and is surfaced by VideoFrames.
+ // |planes|: Number of image planes (textures) in the picture.
+ // |texture_size|: Size of textures to create.
+ // |texture_target|: Type of textures to create.
+ //
+ // Must be called on the GPU thread.
+ //
+ // TODO(sandersd): For many subsampled pixel formats, it doesn't make sense to
+ // allocate all planes with the same size.
+ // TODO(sandersd): Surface control over allocation for GL_TEXTURE_2D. Right
+ // now such textures are allocated as RGBA textures. (Other texture targets
+ // are not automatically allocated.)
+ // TODO(sandersd): The current implementation makes the context current.
+ // Consider requiring that the context is already current.
+ virtual std::vector<PictureBuffer> CreatePictureBuffers(
+ uint32_t count,
+ VideoPixelFormat pixel_format,
+ uint32_t planes,
+ gfx::Size texture_size,
+ uint32_t texture_target) = 0;
+
+ // Dismisses a picture buffer from the pool.
+ //
+ // A picture buffer may be dismissed even if it is bound to a VideoFrame; its
+ // backing textures will be maintained until the VideoFrame is destroyed.
+ //
+ // Must be called on the GPU thread.
+ virtual bool DismissPictureBuffer(int32_t picture_buffer_id) = 0;
+
+ // Creates and returns a VideoFrame bound to a picture buffer, or nullptr on
+ // failure.
+ //
+ // |picture|: Identifies the picture buffer and provides some metadata about
+ // the desired binding. Not all Picture features are supported.
+ // |timestamp|: Presentation timestamp of the VideoFrame.
+ // |visible_rect|: Visible region of the VideoFrame.
+ // |natural_size|: Natural size of the VideoFrame.
+ //
+ // TODO(sandersd): Specify which Picture features are supported.
+ virtual scoped_refptr<VideoFrame> CreateVideoFrame(
+ Picture picture,
+ base::TimeDelta timestamp,
+ gfx::Rect visible_rect,
+ gfx::Size natural_size) = 0;
+
+ protected:
+ PictureBufferManager() = default;
+
+ // Must be called on the GPU thread if Initialize() was called.
+ virtual ~PictureBufferManager() = default;
+
+ private:
+ friend class base::RefCountedThreadSafe<PictureBufferManager>;
+
+ DISALLOW_COPY_AND_ASSIGN(PictureBufferManager);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_IPC_SERVICE_PICTURE_BUFFER_MANAGER_H_
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
new file mode 100644
index 00000000000..1db8cc7fecf
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
@@ -0,0 +1,201 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include "media/gpu/ipc/service/picture_buffer_manager.h"
+
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/test/mock_callback.h"
+#include "base/test/scoped_task_environment.h"
+#include "media/gpu/fake_command_buffer_helper.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+// TODO(sandersd): Should be part of //media, as it is used by
+// MojoVideoDecoderService (production code) as well.
+class StaticSyncTokenClient : public VideoFrame::SyncTokenClient {
+ public:
+ explicit StaticSyncTokenClient(const gpu::SyncToken& sync_token)
+ : sync_token_(sync_token) {}
+
+ void GenerateSyncToken(gpu::SyncToken* sync_token) final {
+ *sync_token = sync_token_;
+ }
+
+ void WaitSyncToken(const gpu::SyncToken& sync_token) final {}
+
+ private:
+ gpu::SyncToken sync_token_;
+
+ DISALLOW_COPY_AND_ASSIGN(StaticSyncTokenClient);
+};
+
+} // namespace
+
+class PictureBufferManagerImplTest : public testing::Test {
+ public:
+ explicit PictureBufferManagerImplTest() {
+ // TODO(sandersd): Use a separate thread for the GPU task runner.
+ cbh_ = base::MakeRefCounted<FakeCommandBufferHelper>(
+ environment_.GetMainThreadTaskRunner());
+ pbm_ = PictureBufferManager::Create(reuse_cb_.Get());
+ }
+
+ ~PictureBufferManagerImplTest() override {
+ // Drop ownership of anything that may have an async destruction process,
+ // then allow destruction to complete.
+ cbh_->StubLost();
+ cbh_ = nullptr;
+ pbm_ = nullptr;
+ environment_.RunUntilIdle();
+ }
+
+ protected:
+ void Initialize() {
+ pbm_->Initialize(environment_.GetMainThreadTaskRunner(), cbh_);
+ }
+
+ std::vector<PictureBuffer> CreateARGBPictureBuffers(uint32_t count) {
+ return pbm_->CreatePictureBuffers(count, PIXEL_FORMAT_ARGB, 1,
+ gfx::Size(320, 240), GL_TEXTURE_2D);
+ }
+
+ PictureBuffer CreateARGBPictureBuffer() {
+ std::vector<PictureBuffer> picture_buffers = CreateARGBPictureBuffers(1);
+ DCHECK_EQ(picture_buffers.size(), 1U);
+ return picture_buffers[0];
+ }
+
+ scoped_refptr<VideoFrame> CreateVideoFrame(int32_t picture_buffer_id) {
+ return pbm_->CreateVideoFrame(
+ Picture(picture_buffer_id, // picture_buffer_id
+ 0, // bitstream_buffer_id
+ gfx::Rect(), // visible_rect (ignored)
+ gfx::ColorSpace::CreateSRGB(), // color_space
+ false), // allow_overlay
+ base::TimeDelta(), // timestamp
+ gfx::Rect(), // visible_rect
+ gfx::Size()); // natural_size
+ }
+
+ gpu::SyncToken GenerateSyncToken(scoped_refptr<VideoFrame> video_frame) {
+ gpu::SyncToken sync_token(gpu::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(1),
+ next_release_count_++);
+ StaticSyncTokenClient sync_token_client(sync_token);
+ video_frame->UpdateReleaseSyncToken(&sync_token_client);
+ return sync_token;
+ }
+
+ base::test::ScopedTaskEnvironment environment_;
+
+ uint64_t next_release_count_ = 1;
+ testing::StrictMock<
+ base::MockCallback<PictureBufferManager::ReusePictureBufferCB>>
+ reuse_cb_;
+ scoped_refptr<FakeCommandBufferHelper> cbh_;
+ scoped_refptr<PictureBufferManager> pbm_;
+
+ DISALLOW_COPY_AND_ASSIGN(PictureBufferManagerImplTest);
+};
+
+TEST_F(PictureBufferManagerImplTest, CreateAndDestroy) {}
+
+TEST_F(PictureBufferManagerImplTest, Initialize) {
+ Initialize();
+}
+
+TEST_F(PictureBufferManagerImplTest, CreatePictureBuffer) {
+ Initialize();
+ PictureBuffer pb = CreateARGBPictureBuffer();
+ EXPECT_TRUE(cbh_->HasTexture(pb.client_texture_ids()[0]));
+}
+
+TEST_F(PictureBufferManagerImplTest, CreatePictureBuffer_ContextLost) {
+ Initialize();
+ cbh_->ContextLost();
+ std::vector<PictureBuffer> pbs = CreateARGBPictureBuffers(1);
+ EXPECT_TRUE(pbs.empty());
+}
+
+TEST_F(PictureBufferManagerImplTest, ReusePictureBuffer) {
+ Initialize();
+ PictureBuffer pb = CreateARGBPictureBuffer();
+ scoped_refptr<VideoFrame> frame = CreateVideoFrame(pb.id());
+
+ // Dropping the frame does not immediately trigger reuse.
+ gpu::SyncToken sync_token = GenerateSyncToken(frame);
+ frame = nullptr;
+ environment_.RunUntilIdle();
+
+ // Completing the SyncToken wait does.
+ EXPECT_CALL(reuse_cb_, Run(pb.id()));
+ cbh_->ReleaseSyncToken(sync_token);
+ environment_.RunUntilIdle();
+}
+
+TEST_F(PictureBufferManagerImplTest, DismissPictureBuffer_Available) {
+ Initialize();
+ PictureBuffer pb = CreateARGBPictureBuffer();
+ pbm_->DismissPictureBuffer(pb.id());
+
+ // Allocated textures should be deleted soon.
+ environment_.RunUntilIdle();
+ EXPECT_FALSE(cbh_->HasTexture(pb.client_texture_ids()[0]));
+}
+
+TEST_F(PictureBufferManagerImplTest, DismissPictureBuffer_Output) {
+ Initialize();
+ PictureBuffer pb = CreateARGBPictureBuffer();
+ scoped_refptr<VideoFrame> frame = CreateVideoFrame(pb.id());
+ pbm_->DismissPictureBuffer(pb.id());
+
+ // Allocated textures should not be deleted while the VideoFrame exists.
+ environment_.RunUntilIdle();
+ EXPECT_TRUE(cbh_->HasTexture(pb.client_texture_ids()[0]));
+
+ // Or after it has been returned.
+ gpu::SyncToken sync_token = GenerateSyncToken(frame);
+ frame = nullptr;
+ environment_.RunUntilIdle();
+ EXPECT_TRUE(cbh_->HasTexture(pb.client_texture_ids()[0]));
+
+ // Until the SyncToken has been waited for. (Reuse callback should not be
+ // called for a dismissed picture buffer.)
+ cbh_->ReleaseSyncToken(sync_token);
+ environment_.RunUntilIdle();
+ EXPECT_FALSE(cbh_->HasTexture(pb.client_texture_ids()[0]));
+}
+
+TEST_F(PictureBufferManagerImplTest, CanReadWithoutStalling) {
+ // Works before Initialize().
+ EXPECT_TRUE(pbm_->CanReadWithoutStalling());
+
+ // True before any picture buffers are allocated.
+ Initialize();
+ EXPECT_TRUE(pbm_->CanReadWithoutStalling());
+
+ // True when a picture buffer is available.
+ PictureBuffer pb = CreateARGBPictureBuffer();
+ EXPECT_TRUE(pbm_->CanReadWithoutStalling());
+
+ // False when all picture buffers are used.
+ scoped_refptr<VideoFrame> frame = CreateVideoFrame(pb.id());
+ EXPECT_FALSE(pbm_->CanReadWithoutStalling());
+
+ // True once a picture buffer is returned.
+ frame = nullptr;
+ EXPECT_TRUE(pbm_->CanReadWithoutStalling());
+
+ // True after all picture buffers have been dismissed.
+ pbm_->DismissPictureBuffer(pb.id());
+ EXPECT_TRUE(pbm_->CanReadWithoutStalling());
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
new file mode 100644
index 00000000000..c91e3d9c5c6
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -0,0 +1,758 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/ipc/service/vda_video_decoder.h"
+
+#include <string.h>
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "gpu/config/gpu_info.h"
+#include "media/base/bitstream_buffer.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/media_log.h"
+#include "media/base/video_codecs.h"
+#include "media/base/video_types.h"
+#include "media/base/video_util.h"
+#include "media/gpu/gpu_video_accelerator_util.h"
+#include "media/gpu/gpu_video_decode_accelerator_factory.h"
+#include "media/video/picture.h"
+#include "ui/gfx/geometry/rect.h"
+#include "ui/gl/gl_image.h"
+
+namespace media {
+
+namespace {
+
+// Generates nonnegative bitstream buffer IDs, which are assumed to be unique.
+int32_t NextID(int32_t* counter) {
+ int32_t value = *counter;
+ *counter = (*counter + 1) & 0x3FFFFFFF;
+ return value;
+}
+
+scoped_refptr<CommandBufferHelper> CreateCommandBufferHelper(
+ VdaVideoDecoder::GetStubCB get_stub_cb) {
+ gpu::CommandBufferStub* stub = std::move(get_stub_cb).Run();
+
+ if (!stub) {
+ DVLOG(1) << "Failed to obtain command buffer stub";
+ return nullptr;
+ }
+
+ return CommandBufferHelper::Create(stub);
+}
+
+bool BindImage(scoped_refptr<CommandBufferHelper> command_buffer_helper,
+ uint32_t client_texture_id,
+ uint32_t texture_target,
+ const scoped_refptr<gl::GLImage>& image,
+ bool can_bind_to_sampler) {
+ return command_buffer_helper->BindImage(client_texture_id, image.get(),
+ can_bind_to_sampler);
+}
+
+std::unique_ptr<VideoDecodeAccelerator> CreateAndInitializeVda(
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ scoped_refptr<CommandBufferHelper> command_buffer_helper,
+ VideoDecodeAccelerator::Client* client,
+ MediaLog* media_log,
+ const VideoDecodeAccelerator::Config& config) {
+ std::unique_ptr<GpuVideoDecodeAcceleratorFactory> factory =
+ GpuVideoDecodeAcceleratorFactory::Create(
+ base::BindRepeating(&CommandBufferHelper::GetGLContext,
+ command_buffer_helper),
+ base::BindRepeating(&CommandBufferHelper::MakeContextCurrent,
+ command_buffer_helper),
+ base::BindRepeating(&BindImage, command_buffer_helper));
+ // Note: GpuVideoDecodeAcceleratorFactory may create and initialize more than
+ // one VDA. It is therefore important that VDAs do not call client methods
+ // from Initialize().
+ return factory->CreateVDA(client, config, gpu_workarounds, gpu_preferences,
+ media_log);
+}
+
+bool IsProfileSupported(
+ const VideoDecodeAccelerator::SupportedProfiles& supported_profiles,
+ VideoCodecProfile profile,
+ gfx::Size coded_size) {
+ for (const auto& supported_profile : supported_profiles) {
+ if (supported_profile.profile == profile &&
+ !supported_profile.encrypted_only &&
+ gfx::Rect(supported_profile.max_resolution)
+ .Contains(gfx::Rect(coded_size)) &&
+ gfx::Rect(coded_size)
+ .Contains(gfx::Rect(supported_profile.min_resolution))) {
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace
+
+// static
+std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>>
+VdaVideoDecoder::Create(
+ scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ MediaLog* media_log,
+ const gfx::ColorSpace& target_color_space,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ GetStubCB get_stub_cb) {
+ // Constructed in a variable to avoid _CheckUniquePtr() PRESUBMIT.py regular
+ // expressions, which do not understand custom deleters.
+ // TODO(sandersd): Extend base::WrapUnique() to handle this.
+ std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>> ptr(
+ new VdaVideoDecoder(
+ std::move(parent_task_runner), std::move(gpu_task_runner), media_log,
+ target_color_space, base::BindOnce(&PictureBufferManager::Create),
+ base::BindOnce(&CreateCommandBufferHelper, std::move(get_stub_cb)),
+ base::BindOnce(&CreateAndInitializeVda, gpu_preferences,
+ gpu_workarounds),
+ GpuVideoAcceleratorUtil::ConvertGpuToMediaDecodeCapabilities(
+ GpuVideoDecodeAcceleratorFactory::GetDecoderCapabilities(
+ gpu_preferences, gpu_workarounds))));
+ return ptr;
+}
+
+VdaVideoDecoder::VdaVideoDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ MediaLog* media_log,
+ const gfx::ColorSpace& target_color_space,
+ CreatePictureBufferManagerCB create_picture_buffer_manager_cb,
+ CreateCommandBufferHelperCB create_command_buffer_helper_cb,
+ CreateAndInitializeVdaCB create_and_initialize_vda_cb,
+ const VideoDecodeAccelerator::Capabilities& vda_capabilities)
+ : parent_task_runner_(std::move(parent_task_runner)),
+ gpu_task_runner_(std::move(gpu_task_runner)),
+ media_log_(media_log),
+ target_color_space_(target_color_space),
+ create_command_buffer_helper_cb_(
+ std::move(create_command_buffer_helper_cb)),
+ create_and_initialize_vda_cb_(std::move(create_and_initialize_vda_cb)),
+ vda_capabilities_(vda_capabilities),
+ timestamps_(128),
+ gpu_weak_this_factory_(this),
+ parent_weak_this_factory_(this) {
+ DVLOG(1) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK_EQ(vda_capabilities_.flags, 0U);
+ DCHECK(media_log_);
+
+ gpu_weak_this_ = gpu_weak_this_factory_.GetWeakPtr();
+ parent_weak_this_ = parent_weak_this_factory_.GetWeakPtr();
+
+ picture_buffer_manager_ =
+ std::move(create_picture_buffer_manager_cb)
+ .Run(base::BindRepeating(&VdaVideoDecoder::ReusePictureBuffer,
+ gpu_weak_this_));
+}
+
+void VdaVideoDecoder::Destroy() {
+ DVLOG(1) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ // TODO(sandersd): The documentation says that Destroy() fires any pending
+ // callbacks.
+
+ // Prevent any more callbacks to this thread.
+ parent_weak_this_factory_.InvalidateWeakPtrs();
+
+ // Pass ownership of the destruction process over to the GPU thread.
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::DestroyOnGpuThread, gpu_weak_this_));
+}
+
+void VdaVideoDecoder::DestroyOnGpuThread() {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ // VDA destruction is likely to result in reentrant calls to
+ // NotifyEndOfBitstreamBuffer(). Invalidating |gpu_weak_vda_| ensures that we
+ // don't call back into |vda_| during its destruction.
+ gpu_weak_vda_factory_ = nullptr;
+ vda_ = nullptr;
+
+ delete this;
+}
+
+VdaVideoDecoder::~VdaVideoDecoder() {
+ DVLOG(1) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(!gpu_weak_vda_);
+}
+
+std::string VdaVideoDecoder::GetDisplayName() const {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ return "VdaVideoDecoder";
+}
+
+void VdaVideoDecoder::Initialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) {
+ DVLOG(1) << __func__ << "(" << config.AsHumanReadableString() << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(config.IsValidConfig());
+ DCHECK(init_cb_.is_null());
+ DCHECK(flush_cb_.is_null());
+ DCHECK(reset_cb_.is_null());
+ DCHECK(decode_cbs_.empty());
+
+ if (has_error_) {
+ parent_task_runner_->PostTask(FROM_HERE, base::BindOnce(init_cb, false));
+ return;
+ }
+
+ bool reinitializing = config_.IsValidConfig();
+
+ // Store |init_cb| ASAP so that EnterErrorState() can use it. Leave |config_|
+ // alone for now so that the checks can inspect it.
+ init_cb_ = init_cb;
+ output_cb_ = output_cb;
+
+ // Verify that the configuration is supported.
+ if (reinitializing && config.codec() != config_.codec()) {
+ MEDIA_LOG(ERROR, media_log_) << "Codec cannot be changed";
+ EnterErrorState();
+ return;
+ }
+
+ if (!IsProfileSupported(vda_capabilities_.supported_profiles,
+ config.profile(), config.coded_size())) {
+ MEDIA_LOG(INFO, media_log_) << "Unsupported profile";
+ EnterErrorState();
+ return;
+ }
+
+ // TODO(sandersd): Change this to a capability if any VDA starts supporting
+ // alpha channels. This is believed to be impossible right now because VPx
+ // alpha channel data is passed in side data, which isn't sent to VDAs.
+ if (!IsOpaque(config.format())) {
+ MEDIA_LOG(INFO, media_log_) << "Alpha formats are not supported";
+ EnterErrorState();
+ return;
+ }
+
+ // Encrypted streams are not supported by design. To support encrypted stream,
+ // use a hardware VideoDecoder directly.
+ if (config.is_encrypted()) {
+ MEDIA_LOG(INFO, media_log_) << "Encrypted streams are not supported";
+ EnterErrorState();
+ return;
+ }
+
+ // The configuration is supported.
+ config_ = config;
+
+ if (reinitializing) {
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::InitializeDone,
+ parent_weak_this_, true));
+ return;
+ }
+
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::InitializeOnGpuThread, gpu_weak_this_));
+}
+
+void VdaVideoDecoder::InitializeOnGpuThread() {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(!vda_);
+
+ // Set up |command_buffer_helper_|.
+ scoped_refptr<CommandBufferHelper> command_buffer_helper =
+ std::move(create_command_buffer_helper_cb_).Run();
+ if (!command_buffer_helper) {
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::InitializeDone,
+ parent_weak_this_, false));
+ return;
+ }
+ picture_buffer_manager_->Initialize(gpu_task_runner_, command_buffer_helper);
+
+ // Convert the configuration.
+ VideoDecodeAccelerator::Config vda_config;
+ vda_config.profile = config_.profile();
+ // vda_config.cdm_id = [Encrypted streams are not supported]
+ // vda_config.overlay_info = [Only used by AVDA]
+ vda_config.encryption_scheme = config_.encryption_scheme();
+ vda_config.is_deferred_initialization_allowed = false;
+ vda_config.initial_expected_coded_size = config_.coded_size();
+ vda_config.container_color_space = config_.color_space_info();
+ vda_config.target_color_space = target_color_space_;
+ vda_config.hdr_metadata = config_.hdr_metadata();
+ // vda_config.sps = [Only used by AVDA]
+ // vda_config.pps = [Only used by AVDA]
+ // vda_config.output_mode = [Only used by ARC]
+ // vda_config.supported_output_formats = [Only used by PPAPI]
+
+ // Create and initialize the VDA.
+ vda_ = std::move(create_and_initialize_vda_cb_)
+ .Run(command_buffer_helper, this, this, vda_config);
+ if (!vda_) {
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::InitializeDone,
+ parent_weak_this_, false));
+ return;
+ }
+
+ // TODO(sandersd): TryToSetupDecodeOnSeparateThread().
+ gpu_weak_vda_factory_.reset(
+ new base::WeakPtrFactory<VideoDecodeAccelerator>(vda_.get()));
+ gpu_weak_vda_ = gpu_weak_vda_factory_->GetWeakPtr();
+ vda_initialized_ = true;
+
+ parent_task_runner_->PostTask(FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::InitializeDone,
+ parent_weak_this_, true));
+}
+
+void VdaVideoDecoder::InitializeDone(bool status) {
+ DVLOG(1) << __func__ << "(" << status << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ if (has_error_)
+ return;
+
+ if (!status) {
+ // TODO(sandersd): This adds an unnecessary PostTask().
+ EnterErrorState();
+ return;
+ }
+
+ base::ResetAndReturn(&init_cb_).Run(true);
+}
+
+void VdaVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
+ const DecodeCB& decode_cb) {
+ DVLOG(3) << __func__ << "(" << (buffer->end_of_stream() ? "EOS" : "") << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(init_cb_.is_null());
+ DCHECK(flush_cb_.is_null());
+ DCHECK(reset_cb_.is_null());
+ DCHECK(buffer->end_of_stream() || !buffer->decrypt_config());
+
+ if (has_error_) {
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(decode_cb, DecodeStatus::DECODE_ERROR));
+ return;
+ }
+
+ // Convert EOS frame to Flush().
+ if (buffer->end_of_stream()) {
+ flush_cb_ = decode_cb;
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoDecodeAccelerator::Flush, gpu_weak_vda_));
+ return;
+ }
+
+ // Assign a bitstream buffer ID and record the decode request.
+ int32_t bitstream_buffer_id = NextID(&bitstream_buffer_id_);
+ timestamps_.Put(bitstream_buffer_id, buffer->timestamp());
+ decode_cbs_[bitstream_buffer_id] = decode_cb;
+
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::DecodeOnGpuThread, gpu_weak_this_,
+ std::move(buffer), bitstream_buffer_id));
+}
+
+void VdaVideoDecoder::DecodeOnGpuThread(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) {
+ DVLOG(3) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ if (!gpu_weak_vda_)
+ return;
+
+ vda_->Decode(std::move(buffer), bitstream_id);
+}
+
+void VdaVideoDecoder::Reset(const base::RepeatingClosure& reset_cb) {
+ DVLOG(2) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(init_cb_.is_null());
+ // Note: |flush_cb_| may not be null.
+ DCHECK(reset_cb_.is_null());
+
+ if (has_error_) {
+ parent_task_runner_->PostTask(FROM_HERE, reset_cb);
+ return;
+ }
+
+ reset_cb_ = reset_cb;
+ gpu_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VideoDecodeAccelerator::Reset, gpu_weak_vda_));
+}
+
+bool VdaVideoDecoder::NeedsBitstreamConversion() const {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ // TODO(sandersd): Can we move bitstream conversion into VdaVideoDecoder and
+ // always return false?
+ return config_.codec() == kCodecH264 || config_.codec() == kCodecHEVC;
+}
+
+bool VdaVideoDecoder::CanReadWithoutStalling() const {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ return picture_buffer_manager_->CanReadWithoutStalling();
+}
+
+int VdaVideoDecoder::GetMaxDecodeRequests() const {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ return 4;
+}
+
+void VdaVideoDecoder::NotifyInitializationComplete(bool success) {
+ DVLOG(2) << __func__ << "(" << success << ")";
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ NOTIMPLEMENTED();
+}
+
+void VdaVideoDecoder::ProvidePictureBuffers(uint32_t requested_num_of_buffers,
+ VideoPixelFormat format,
+ uint32_t textures_per_buffer,
+ const gfx::Size& dimensions,
+ uint32_t texture_target) {
+ DVLOG(2) << __func__ << "(" << requested_num_of_buffers << ", " << format
+ << ", " << textures_per_buffer << ", " << dimensions.ToString()
+ << ", " << texture_target << ")";
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ gpu_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::ProvidePictureBuffersAsync,
+ gpu_weak_this_, requested_num_of_buffers, format,
+ textures_per_buffer, dimensions, texture_target));
+}
+
+void VdaVideoDecoder::ProvidePictureBuffersAsync(uint32_t count,
+ VideoPixelFormat pixel_format,
+ uint32_t planes,
+ gfx::Size texture_size,
+ GLenum texture_target) {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK_GT(count, 0U);
+
+ if (!gpu_weak_vda_)
+ return;
+
+ // TODO(sandersd): VDAs should always be explicit.
+ if (pixel_format == PIXEL_FORMAT_UNKNOWN)
+ pixel_format = PIXEL_FORMAT_XRGB;
+
+ std::vector<PictureBuffer> picture_buffers =
+ picture_buffer_manager_->CreatePictureBuffers(
+ count, pixel_format, planes, texture_size, texture_target);
+ if (picture_buffers.empty()) {
+ parent_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::EnterErrorState, parent_weak_this_));
+ return;
+ }
+
+ DCHECK(gpu_weak_vda_);
+ vda_->AssignPictureBuffers(std::move(picture_buffers));
+}
+
+void VdaVideoDecoder::DismissPictureBuffer(int32_t picture_buffer_id) {
+ DVLOG(2) << __func__ << "(" << picture_buffer_id << ")";
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ if (!picture_buffer_manager_->DismissPictureBuffer(picture_buffer_id)) {
+ parent_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::EnterErrorState, parent_weak_this_));
+ return;
+ }
+}
+
+void VdaVideoDecoder::PictureReady(const Picture& picture) {
+ DVLOG(3) << __func__ << "(" << picture.picture_buffer_id() << ")";
+ DCHECK(vda_initialized_);
+
+ if (parent_task_runner_->BelongsToCurrentThread()) {
+ // Note: This optimization is only correct if the output callback does not
+ // reentrantly call Decode(). MojoVideoDecoderService is safe, but there is
+ // no guarantee in the media::VideoDecoder interface definition.
+ PictureReadyOnParentThread(picture);
+ return;
+ }
+
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::PictureReadyOnParentThread,
+ parent_weak_this_, picture));
+}
+
+void VdaVideoDecoder::PictureReadyOnParentThread(Picture picture) {
+ DVLOG(3) << __func__ << "(" << picture.picture_buffer_id() << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ if (has_error_)
+ return;
+
+ // Substitute the container's visible rect if the VDA didn't specify one.
+ gfx::Rect visible_rect = picture.visible_rect();
+ if (visible_rect.IsEmpty())
+ visible_rect = config_.visible_rect();
+
+ // Look up the decode timestamp.
+ int32_t bitstream_buffer_id = picture.bitstream_buffer_id();
+ const auto timestamp_it = timestamps_.Peek(bitstream_buffer_id);
+ if (timestamp_it == timestamps_.end()) {
+ DLOG(ERROR) << "Unknown bitstream buffer " << bitstream_buffer_id;
+ EnterErrorState();
+ return;
+ }
+
+ // Create a VideoFrame for the picture.
+ scoped_refptr<VideoFrame> frame = picture_buffer_manager_->CreateVideoFrame(
+ picture, timestamp_it->second, visible_rect,
+ GetNaturalSize(visible_rect, config_.GetPixelAspectRatio()));
+ if (!frame) {
+ EnterErrorState();
+ return;
+ }
+
+ output_cb_.Run(std::move(frame));
+}
+
+void VdaVideoDecoder::NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) {
+ DVLOG(3) << __func__ << "(" << bitstream_buffer_id << ")";
+ DCHECK(vda_initialized_);
+
+ if (parent_task_runner_->BelongsToCurrentThread()) {
+ // Note: This optimization is only correct if the decode callback does not
+ // reentrantly call Decode(). MojoVideoDecoderService is safe, but there is
+ // no guarantee in the media::VideoDecoder interface definition.
+ NotifyEndOfBitstreamBufferOnParentThread(bitstream_buffer_id);
+ return;
+ }
+
+ parent_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::NotifyEndOfBitstreamBufferOnParentThread,
+ parent_weak_this_, bitstream_buffer_id));
+}
+
+void VdaVideoDecoder::NotifyEndOfBitstreamBufferOnParentThread(
+ int32_t bitstream_buffer_id) {
+ DVLOG(3) << __func__ << "(" << bitstream_buffer_id << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ if (has_error_)
+ return;
+
+ // Look up the decode callback.
+ const auto decode_cb_it = decode_cbs_.find(bitstream_buffer_id);
+ if (decode_cb_it == decode_cbs_.end()) {
+ DLOG(ERROR) << "Unknown bitstream buffer " << bitstream_buffer_id;
+ EnterErrorState();
+ return;
+ }
+
+ // Run a local copy in case the decode callback modifies |decode_cbs_|.
+ DecodeCB decode_cb = decode_cb_it->second;
+ decode_cbs_.erase(decode_cb_it);
+ decode_cb.Run(DecodeStatus::OK);
+}
+
+void VdaVideoDecoder::NotifyFlushDone() {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::NotifyFlushDoneOnParentThread,
+ parent_weak_this_));
+}
+
+void VdaVideoDecoder::NotifyFlushDoneOnParentThread() {
+ DVLOG(2) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ if (has_error_)
+ return;
+
+ DCHECK(decode_cbs_.empty());
+ base::ResetAndReturn(&flush_cb_).Run(DecodeStatus::OK);
+}
+
+void VdaVideoDecoder::NotifyResetDone() {
+ DVLOG(2) << __func__;
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::NotifyResetDoneOnParentThread,
+ parent_weak_this_));
+}
+
+void VdaVideoDecoder::NotifyResetDoneOnParentThread() {
+ DVLOG(2) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ if (has_error_)
+ return;
+
+ // If NotifyFlushDone() has not been called yet, it never will be.
+ //
+ // We use an on-stack WeakPtr to detect Destroy() being called. A correct
+ // client should not call Decode() or Reset() while there is a reset pending,
+ // but we should handle that safely as well.
+ //
+ // TODO(sandersd): This is similar to DestroyCallbacks(); see about merging
+ // them.
+ base::WeakPtr<VdaVideoDecoder> weak_this = parent_weak_this_;
+
+ std::map<int32_t, DecodeCB> local_decode_cbs = decode_cbs_;
+ decode_cbs_.clear();
+ for (const auto& it : local_decode_cbs) {
+ it.second.Run(DecodeStatus::ABORTED);
+ if (!weak_this)
+ return;
+ }
+
+ if (weak_this && !flush_cb_.is_null())
+ base::ResetAndReturn(&flush_cb_).Run(DecodeStatus::ABORTED);
+
+ if (weak_this)
+ base::ResetAndReturn(&reset_cb_).Run();
+}
+
+void VdaVideoDecoder::NotifyError(VideoDecodeAccelerator::Error error) {
+ DVLOG(1) << __func__ << "(" << error << ")";
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(vda_initialized_);
+
+ // Invalidate |gpu_weak_vda_| so that we won't make any more |vda_| calls.
+ gpu_weak_vda_factory_ = nullptr;
+
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::NotifyErrorOnParentThread,
+ parent_weak_this_, error));
+}
+
+void VdaVideoDecoder::NotifyErrorOnParentThread(
+ VideoDecodeAccelerator::Error error) {
+ DVLOG(1) << __func__ << "(" << error << ")";
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ MEDIA_LOG(ERROR, media_log_) << "VDA Error " << error;
+
+ EnterErrorState();
+}
+
+void VdaVideoDecoder::ReusePictureBuffer(int32_t picture_buffer_id) {
+ DVLOG(3) << __func__ << "(" << picture_buffer_id << ")";
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ if (!gpu_weak_vda_)
+ return;
+
+ vda_->ReusePictureBuffer(picture_buffer_id);
+}
+
+void VdaVideoDecoder::AddEvent(std::unique_ptr<MediaLogEvent> event) {
+ DVLOG(1) << __func__;
+
+ if (parent_task_runner_->BelongsToCurrentThread()) {
+ AddEventOnParentThread(std::move(event));
+ return;
+ }
+
+ // Hop to the parent thread to be sure we don't call into |media_log_| after
+ // Destroy() returns.
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VdaVideoDecoder::AddEventOnParentThread,
+ parent_weak_this_, std::move(event)));
+}
+
+void VdaVideoDecoder::AddEventOnParentThread(
+ std::unique_ptr<MediaLogEvent> event) {
+ DVLOG(1) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+
+ media_log_->AddEvent(std::move(event));
+}
+
+void VdaVideoDecoder::EnterErrorState() {
+ DVLOG(1) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(parent_weak_this_);
+
+ if (has_error_)
+ return;
+
+ // Start rejecting client calls immediately.
+ has_error_ = true;
+
+ // Destroy callbacks aynchronously to avoid calling them on a client stack.
+ parent_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VdaVideoDecoder::DestroyCallbacks, parent_weak_this_));
+}
+
+void VdaVideoDecoder::DestroyCallbacks() {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(parent_weak_this_);
+ DCHECK(has_error_);
+
+ // We use an on-stack WeakPtr to detect Destroy() being called. Note that any
+ // calls to Initialize(), Decode(), or Reset() are asynchronously rejected
+ // when |has_error_| is set.
+ base::WeakPtr<VdaVideoDecoder> weak_this = parent_weak_this_;
+
+ std::map<int32_t, DecodeCB> local_decode_cbs = decode_cbs_;
+ decode_cbs_.clear();
+ for (const auto& it : local_decode_cbs) {
+ it.second.Run(DecodeStatus::DECODE_ERROR);
+ if (!weak_this)
+ return;
+ }
+
+ if (weak_this && !flush_cb_.is_null())
+ base::ResetAndReturn(&flush_cb_).Run(DecodeStatus::DECODE_ERROR);
+
+ // Note: |reset_cb_| cannot return failure, so the client won't actually find
+ // out about the error until another operation is attempted.
+ if (weak_this && !reset_cb_.is_null())
+ base::ResetAndReturn(&reset_cb_).Run();
+
+ if (weak_this && !init_cb_.is_null())
+ base::ResetAndReturn(&init_cb_).Run(false);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.h b/chromium/media/gpu/ipc/service/vda_video_decoder.h
new file mode 100644
index 00000000000..3485836f94c
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.h
@@ -0,0 +1,228 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_IPC_SERVICE_VDA_VIDEO_DECODER_H_
+#define MEDIA_GPU_IPC_SERVICE_VDA_VIDEO_DECODER_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <memory>
+
+#include "base/callback_forward.h"
+#include "base/containers/mru_cache.h"
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/memory/shared_memory.h"
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/time/time.h"
+#include "media/base/media_log.h"
+#include "media/base/video_decoder.h"
+#include "media/gpu/command_buffer_helper.h"
+#include "media/gpu/ipc/service/picture_buffer_manager.h"
+#include "media/video/video_decode_accelerator.h"
+#include "ui/gfx/color_space.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gl/gl_bindings.h"
+
+namespace gpu {
+class CommandBufferStub;
+class GpuDriverBugWorkarounds;
+struct GpuPreferences;
+} // namespace gpu
+
+namespace media {
+
+// Implements the VideoDecoder interface backed by a VideoDecodeAccelerator.
+// This class expects to run in the GPU process via MojoVideoDecoder.
+class VdaVideoDecoder : public VideoDecoder,
+ public VideoDecodeAccelerator::Client,
+ public MediaLog {
+ public:
+ using GetStubCB = base::RepeatingCallback<gpu::CommandBufferStub*()>;
+ using CreatePictureBufferManagerCB =
+ base::OnceCallback<scoped_refptr<PictureBufferManager>(
+ PictureBufferManager::ReusePictureBufferCB)>;
+ using CreateCommandBufferHelperCB =
+ base::OnceCallback<scoped_refptr<CommandBufferHelper>()>;
+ using CreateAndInitializeVdaCB =
+ base::OnceCallback<std::unique_ptr<VideoDecodeAccelerator>(
+ scoped_refptr<CommandBufferHelper>,
+ VideoDecodeAccelerator::Client*,
+ MediaLog*,
+ const VideoDecodeAccelerator::Config&)>;
+ using GetVdaCapabilitiesCB =
+ base::OnceCallback<VideoDecodeAccelerator::Capabilities(
+ const gpu::GpuPreferences&,
+ const gpu::GpuDriverBugWorkarounds&)>;
+
+ // Creates a VdaVideoDecoder. The returned unique_ptr can be safely upcast to
+ // unique_ptr<VideoDecoder>.
+ //
+ // |get_stub_cb|: Callback to retrieve the CommandBufferStub that should be
+ // used for allocating textures and mailboxes. This callback will be
+ // called on the GPU thread.
+ //
+ // See VdaVideoDecoder() for other arguments.
+ static std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>>
+ Create(scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ MediaLog* media_log,
+ const gfx::ColorSpace& target_color_space,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ GetStubCB get_stub_cb);
+
+ // |parent_task_runner|: Task runner that |this| should operate on. All
+ // methods must be called on |parent_task_runner| (should be the Mojo
+ // MediaService task runner).
+ // |gpu_task_runner|: Task runner that GPU command buffer methods must be
+ // called on (should be the GPU main thread).
+ // |media_log|: MediaLog object to log to; must live at least until
+ // Destroy() returns.
+ // |target_color_space|: Color space of the output device.
+ // |create_picture_buffer_manager_cb|: PictureBufferManager factory.
+ // |create_command_buffer_helper_cb|: CommandBufferHelper factory.
+ // |create_and_initialize_vda_cb|: VideoDecodeAccelerator factory.
+ // |vda_capabilities|: Capabilities of the VDA that
+ // |create_and_initialize_vda_cb| will produce.
+ VdaVideoDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ MediaLog* media_log,
+ const gfx::ColorSpace& target_color_space,
+ CreatePictureBufferManagerCB create_picture_buffer_manager_cb,
+ CreateCommandBufferHelperCB create_command_buffer_helper_cb,
+ CreateAndInitializeVdaCB create_and_initialize_vda_cb,
+ const VideoDecodeAccelerator::Capabilities& vda_capabilities);
+
+ // media::VideoDecoder implementation.
+ std::string GetDisplayName() const override;
+ void Initialize(
+ const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) override;
+ void Decode(scoped_refptr<DecoderBuffer> buffer,
+ const DecodeCB& decode_cb) override;
+ void Reset(const base::RepeatingClosure& reset_cb) override;
+ bool NeedsBitstreamConversion() const override;
+ bool CanReadWithoutStalling() const override;
+ int GetMaxDecodeRequests() const override;
+
+ // media::MediaLog implementation.
+ void AddEvent(std::unique_ptr<MediaLogEvent> event) override;
+
+ private:
+ void Destroy() override;
+
+ protected:
+ // Owners should call Destroy(). This is automatic via
+ // std::default_delete<media::VideoDecoder> when held by a
+ // std::unique_ptr<media::VideoDecoder>.
+ ~VdaVideoDecoder() override;
+
+ private:
+ // media::VideoDecodeAccelerator::Client implementation.
+ void NotifyInitializationComplete(bool success) override;
+ void ProvidePictureBuffers(uint32_t requested_num_of_buffers,
+ VideoPixelFormat format,
+ uint32_t textures_per_buffer,
+ const gfx::Size& dimensions,
+ uint32_t texture_target) override;
+ void DismissPictureBuffer(int32_t picture_buffer_id) override;
+ void PictureReady(const Picture& picture) override;
+ void NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) override;
+ void NotifyFlushDone() override;
+ void NotifyResetDone() override;
+ void NotifyError(VideoDecodeAccelerator::Error error) override;
+
+ // Tasks and thread hopping.
+ void DestroyOnGpuThread();
+ void InitializeOnGpuThread();
+ void InitializeDone(bool status);
+ void DecodeOnGpuThread(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id);
+ void PictureReadyOnParentThread(Picture picture);
+ void NotifyEndOfBitstreamBufferOnParentThread(int32_t bitstream_buffer_id);
+ void NotifyFlushDoneOnParentThread();
+ void NotifyResetDoneOnParentThread();
+ void NotifyErrorOnParentThread(VideoDecodeAccelerator::Error error);
+ void ProvidePictureBuffersAsync(uint32_t count,
+ VideoPixelFormat pixel_format,
+ uint32_t planes,
+ gfx::Size texture_size,
+ GLenum texture_target);
+ void ReusePictureBuffer(int32_t picture_buffer_id);
+ void AddEventOnParentThread(std::unique_ptr<MediaLogEvent> event);
+
+ // Error handling.
+ void EnterErrorState();
+ void DestroyCallbacks();
+
+ //
+ // Construction parameters.
+ //
+ scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
+ MediaLog* media_log_;
+ gfx::ColorSpace target_color_space_;
+ scoped_refptr<PictureBufferManager> picture_buffer_manager_;
+ CreateCommandBufferHelperCB create_command_buffer_helper_cb_;
+ CreateAndInitializeVdaCB create_and_initialize_vda_cb_;
+ const VideoDecodeAccelerator::Capabilities vda_capabilities_;
+
+ //
+ // Parent thread state.
+ //
+ bool has_error_ = false;
+
+ InitCB init_cb_;
+ OutputCB output_cb_;
+ DecodeCB flush_cb_;
+ base::RepeatingClosure reset_cb_;
+
+ int32_t bitstream_buffer_id_ = 0;
+ std::map<int32_t, DecodeCB> decode_cbs_;
+ // Records timestamps so that they can be mapped to output pictures. Must be
+ // large enough to account for any amount of frame reordering.
+ base::MRUCache<int32_t, base::TimeDelta> timestamps_;
+
+ //
+ // GPU thread state.
+ //
+ std::unique_ptr<VideoDecodeAccelerator> vda_;
+ bool vda_initialized_ = false;
+
+ //
+ // Shared state.
+ //
+ VideoDecoderConfig config_;
+
+ //
+ // Weak pointers, prefixed by bound thread.
+ //
+ // |gpu_weak_vda_| is invalidated when the VDA has notified about an error, or
+ // has been destroyed. It is not valid to call VDA methods in those cases.
+ base::WeakPtr<VideoDecodeAccelerator> gpu_weak_vda_;
+ std::unique_ptr<base::WeakPtrFactory<VideoDecodeAccelerator>>
+ gpu_weak_vda_factory_;
+
+ // |gpu_weak_this_| is never explicitly invalidated.
+ // |parent_weak_this_| is invalidated when the client calls Destroy(), and
+ // indicates that we should not make any new client callbacks.
+ base::WeakPtr<VdaVideoDecoder> gpu_weak_this_;
+ base::WeakPtr<VdaVideoDecoder> parent_weak_this_;
+ base::WeakPtrFactory<VdaVideoDecoder> gpu_weak_this_factory_;
+ base::WeakPtrFactory<VdaVideoDecoder> parent_weak_this_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(VdaVideoDecoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_IPC_SERVICE_VDA_VIDEO_DECODER_H_
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
new file mode 100644
index 00000000000..56a253156c0
--- /dev/null
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -0,0 +1,405 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/ipc/service/vda_video_decoder.h"
+
+#include <stdint.h>
+
+#include "base/macros.h"
+#include "base/memory/ptr_util.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/test/mock_callback.h"
+#include "base/test/scoped_task_environment.h"
+#include "base/time/time.h"
+#include "gpu/command_buffer/common/sync_token.h"
+#include "media/base/decode_status.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/media_util.h"
+#include "media/base/mock_media_log.h"
+#include "media/base/video_codecs.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_rotation.h"
+#include "media/base/video_types.h"
+#include "media/gpu/fake_command_buffer_helper.h"
+#include "media/gpu/ipc/service/picture_buffer_manager.h"
+#include "media/video/mock_video_decode_accelerator.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/color_space.h"
+#include "ui/gfx/geometry/rect.h"
+#include "ui/gfx/geometry/size.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::Return;
+using ::testing::SaveArg;
+
+namespace media {
+
+namespace {
+
+constexpr uint8_t kData[] = "foo";
+constexpr size_t kDataSize = arraysize(kData);
+
+scoped_refptr<DecoderBuffer> CreateDecoderBuffer(base::TimeDelta timestamp) {
+ scoped_refptr<DecoderBuffer> buffer =
+ DecoderBuffer::CopyFrom(kData, kDataSize);
+ buffer->set_timestamp(timestamp);
+ return buffer;
+}
+
+// TODO(sandersd): Should be part of //media, as it is used by
+// MojoVideoDecoderService (production code) as well.
+class StaticSyncTokenClient : public VideoFrame::SyncTokenClient {
+ public:
+ explicit StaticSyncTokenClient(const gpu::SyncToken& sync_token)
+ : sync_token_(sync_token) {}
+
+ void GenerateSyncToken(gpu::SyncToken* sync_token) final {
+ *sync_token = sync_token_;
+ }
+
+ void WaitSyncToken(const gpu::SyncToken& sync_token) final {}
+
+ private:
+ gpu::SyncToken sync_token_;
+
+ DISALLOW_COPY_AND_ASSIGN(StaticSyncTokenClient);
+};
+
+VideoDecodeAccelerator::SupportedProfiles GetSupportedProfiles() {
+ VideoDecodeAccelerator::SupportedProfiles profiles;
+ {
+ VideoDecodeAccelerator::SupportedProfile profile;
+ profile.profile = VP9PROFILE_PROFILE0;
+ profile.max_resolution = gfx::Size(1920, 1088);
+ profile.min_resolution = gfx::Size(640, 480);
+ profile.encrypted_only = false;
+ profiles.push_back(std::move(profile));
+ }
+ return profiles;
+}
+
+VideoDecodeAccelerator::Capabilities GetCapabilities() {
+ VideoDecodeAccelerator::Capabilities capabilities;
+ capabilities.supported_profiles = GetSupportedProfiles();
+ capabilities.flags = 0;
+ return capabilities;
+}
+
+} // namespace
+
+class VdaVideoDecoderTest : public testing::Test {
+ public:
+ explicit VdaVideoDecoderTest() {
+ // TODO(sandersd): Use a separate thread for the GPU task runner.
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner =
+ environment_.GetMainThreadTaskRunner();
+ cbh_ = base::MakeRefCounted<FakeCommandBufferHelper>(task_runner);
+
+ // |owned_vda_| exists to delete |vda_| when |this| is destructed. Ownership
+ // is passed to |vdavd_| by CreateVda(), but |vda_| remains to be used for
+ // configuring mock expectations.
+ vda_ = new testing::StrictMock<MockVideoDecodeAccelerator>();
+ owned_vda_.reset(vda_);
+
+ // In either case, vda_->Destroy() should be called once.
+ EXPECT_CALL(*vda_, Destroy());
+
+ vdavd_.reset(new VdaVideoDecoder(
+ task_runner, task_runner, &media_log_, gfx::ColorSpace(),
+ base::BindOnce(&VdaVideoDecoderTest::CreatePictureBufferManager,
+ base::Unretained(this)),
+ base::BindOnce(&VdaVideoDecoderTest::CreateCommandBufferHelper,
+ base::Unretained(this)),
+ base::BindOnce(&VdaVideoDecoderTest::CreateAndInitializeVda,
+ base::Unretained(this)),
+ GetCapabilities()));
+ client_ = vdavd_.get();
+ }
+
+ ~VdaVideoDecoderTest() override {
+ // Drop ownership of anything that may have an async destruction process,
+ // then allow destruction to complete.
+ cbh_->StubLost();
+ cbh_ = nullptr;
+ owned_vda_ = nullptr;
+ pbm_ = nullptr;
+ vdavd_ = nullptr;
+ environment_.RunUntilIdle();
+ }
+
+ protected:
+ void InitializeWithConfig(const VideoDecoderConfig& config) {
+ vdavd_->Initialize(config, false, nullptr, init_cb_.Get(), output_cb_.Get(),
+ waiting_cb_.Get());
+ }
+
+ void Initialize() {
+ InitializeWithConfig(VideoDecoderConfig(
+ kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420,
+ COLOR_SPACE_HD_REC709, VIDEO_ROTATION_0, gfx::Size(1920, 1088),
+ gfx::Rect(1920, 1080), gfx::Size(1920, 1080), EmptyExtraData(),
+ Unencrypted()));
+
+ EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(true));
+ EXPECT_CALL(init_cb_, Run(true));
+ environment_.RunUntilIdle();
+ }
+
+ int32_t ProvidePictureBuffer() {
+ std::vector<PictureBuffer> picture_buffers;
+ client_->ProvidePictureBuffers(1, PIXEL_FORMAT_XRGB, 1,
+ gfx::Size(1920, 1088), GL_TEXTURE_2D);
+ EXPECT_CALL(*vda_, AssignPictureBuffers(_))
+ .WillOnce(SaveArg<0>(&picture_buffers));
+ environment_.RunUntilIdle();
+
+ DCHECK_EQ(picture_buffers.size(), 1U);
+ return picture_buffers[0].id();
+ }
+
+ int32_t Decode(base::TimeDelta timestamp) {
+ int32_t bitstream_id = 0;
+ vdavd_->Decode(CreateDecoderBuffer(timestamp), decode_cb_.Get());
+ EXPECT_CALL(*vda_, Decode(_, _)).WillOnce(SaveArg<1>(&bitstream_id));
+ environment_.RunUntilIdle();
+ return bitstream_id;
+ }
+
+ void NotifyEndOfBitstreamBuffer(int32_t bitstream_id) {
+ // Expectation must go before the call because NotifyEndOfBitstreamBuffer()
+ // implements the same-thread optimization.
+ EXPECT_CALL(decode_cb_, Run(DecodeStatus::OK));
+ client_->NotifyEndOfBitstreamBuffer(bitstream_id);
+ environment_.RunUntilIdle();
+ }
+
+ scoped_refptr<VideoFrame> PictureReady(int32_t bitstream_buffer_id,
+ int32_t picture_buffer_id) {
+ // Expectation must go before the call because PictureReady() implements the
+ // same-thread optimization.
+ scoped_refptr<VideoFrame> frame;
+ EXPECT_CALL(output_cb_, Run(_)).WillOnce(SaveArg<0>(&frame));
+ client_->PictureReady(Picture(picture_buffer_id, bitstream_buffer_id,
+ gfx::Rect(1920, 1080),
+ gfx::ColorSpace::CreateSRGB(), true));
+ environment_.RunUntilIdle();
+ return frame;
+ }
+
+ // TODO(sandersd): This exact code is also used in
+ // PictureBufferManagerImplTest. Share the implementation.
+ gpu::SyncToken GenerateSyncToken(scoped_refptr<VideoFrame> video_frame) {
+ gpu::SyncToken sync_token(gpu::GPU_IO,
+ gpu::CommandBufferId::FromUnsafeValue(1),
+ next_release_count_++);
+ StaticSyncTokenClient sync_token_client(sync_token);
+ video_frame->UpdateReleaseSyncToken(&sync_token_client);
+ return sync_token;
+ }
+
+ scoped_refptr<CommandBufferHelper> CreateCommandBufferHelper() {
+ return cbh_;
+ }
+
+ scoped_refptr<PictureBufferManager> CreatePictureBufferManager(
+ PictureBufferManager::ReusePictureBufferCB reuse_cb) {
+ DCHECK(!pbm_);
+ pbm_ = PictureBufferManager::Create(std::move(reuse_cb));
+ return pbm_;
+ }
+
+ std::unique_ptr<VideoDecodeAccelerator> CreateAndInitializeVda(
+ scoped_refptr<CommandBufferHelper> command_buffer_helper,
+ VideoDecodeAccelerator::Client* client,
+ MediaLog* media_log,
+ const VideoDecodeAccelerator::Config& config) {
+ DCHECK(owned_vda_);
+ if (!owned_vda_->Initialize(config, client))
+ return nullptr;
+ return std::move(owned_vda_);
+ }
+
+ base::test::ScopedTaskEnvironment environment_;
+
+ testing::NiceMock<MockMediaLog> media_log_;
+ testing::StrictMock<base::MockCallback<VideoDecoder::InitCB>> init_cb_;
+ testing::StrictMock<base::MockCallback<VideoDecoder::OutputCB>> output_cb_;
+ testing::StrictMock<
+ base::MockCallback<VideoDecoder::WaitingForDecryptionKeyCB>>
+ waiting_cb_;
+ testing::StrictMock<base::MockCallback<VideoDecoder::DecodeCB>> decode_cb_;
+ testing::StrictMock<base::MockCallback<base::RepeatingClosure>> reset_cb_;
+
+ scoped_refptr<FakeCommandBufferHelper> cbh_;
+ testing::StrictMock<MockVideoDecodeAccelerator>* vda_;
+ std::unique_ptr<VideoDecodeAccelerator> owned_vda_;
+ scoped_refptr<PictureBufferManager> pbm_;
+ std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>> vdavd_;
+
+ VideoDecodeAccelerator::Client* client_;
+ uint64_t next_release_count_ = 1;
+
+ DISALLOW_COPY_AND_ASSIGN(VdaVideoDecoderTest);
+};
+
+TEST_F(VdaVideoDecoderTest, CreateAndDestroy) {}
+
+TEST_F(VdaVideoDecoderTest, Initialize) {
+ Initialize();
+}
+
+TEST_F(VdaVideoDecoderTest, Initialize_UnsupportedSize) {
+ InitializeWithConfig(VideoDecoderConfig(
+ kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, COLOR_SPACE_SD_REC601,
+ VIDEO_ROTATION_0, gfx::Size(320, 240), gfx::Rect(320, 240),
+ gfx::Size(320, 240), EmptyExtraData(), Unencrypted()));
+ EXPECT_CALL(init_cb_, Run(false));
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Initialize_UnsupportedCodec) {
+ InitializeWithConfig(VideoDecoderConfig(
+ kCodecH264, H264PROFILE_BASELINE, PIXEL_FORMAT_I420,
+ COLOR_SPACE_HD_REC709, VIDEO_ROTATION_0, gfx::Size(1920, 1088),
+ gfx::Rect(1920, 1080), gfx::Size(1920, 1080), EmptyExtraData(),
+ Unencrypted()));
+ EXPECT_CALL(init_cb_, Run(false));
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Initialize_RejectedByVda) {
+ InitializeWithConfig(VideoDecoderConfig(
+ kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, COLOR_SPACE_HD_REC709,
+ VIDEO_ROTATION_0, gfx::Size(1920, 1088), gfx::Rect(1920, 1080),
+ gfx::Size(1920, 1080), EmptyExtraData(), Unencrypted()));
+
+ EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(false));
+ EXPECT_CALL(init_cb_, Run(false));
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, ProvideAndDismissPictureBuffer) {
+ Initialize();
+ int32_t id = ProvidePictureBuffer();
+ client_->DismissPictureBuffer(id);
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Decode) {
+ Initialize();
+ int32_t bitstream_id = Decode(base::TimeDelta());
+ NotifyEndOfBitstreamBuffer(bitstream_id);
+}
+
+TEST_F(VdaVideoDecoderTest, Decode_Reset) {
+ Initialize();
+ Decode(base::TimeDelta());
+
+ vdavd_->Reset(reset_cb_.Get());
+ EXPECT_CALL(*vda_, Reset());
+ environment_.RunUntilIdle();
+
+ client_->NotifyResetDone();
+ EXPECT_CALL(decode_cb_, Run(DecodeStatus::ABORTED));
+ EXPECT_CALL(reset_cb_, Run());
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Decode_NotifyError) {
+ Initialize();
+ Decode(base::TimeDelta());
+
+ client_->NotifyError(VideoDecodeAccelerator::PLATFORM_FAILURE);
+ EXPECT_CALL(decode_cb_, Run(DecodeStatus::DECODE_ERROR));
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Decode_OutputAndReuse) {
+ Initialize();
+ int32_t bitstream_id = Decode(base::TimeDelta());
+ NotifyEndOfBitstreamBuffer(bitstream_id);
+ int32_t picture_buffer_id = ProvidePictureBuffer();
+ scoped_refptr<VideoFrame> frame =
+ PictureReady(bitstream_id, picture_buffer_id);
+
+ // Dropping the frame triggers reuse, which will wait on the SyncPoint.
+ gpu::SyncToken sync_token = GenerateSyncToken(frame);
+ frame = nullptr;
+ environment_.RunUntilIdle();
+
+ // But the VDA won't be notified until the SyncPoint wait completes.
+ EXPECT_CALL(*vda_, ReusePictureBuffer(picture_buffer_id));
+ cbh_->ReleaseSyncToken(sync_token);
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Decode_OutputAndDismiss) {
+ Initialize();
+ int32_t bitstream_id = Decode(base::TimeDelta());
+ NotifyEndOfBitstreamBuffer(bitstream_id);
+ int32_t picture_buffer_id = ProvidePictureBuffer();
+ scoped_refptr<VideoFrame> frame =
+ PictureReady(bitstream_id, picture_buffer_id);
+
+ client_->DismissPictureBuffer(picture_buffer_id);
+ environment_.RunUntilIdle();
+
+ // Dropping the frame still requires a SyncPoint to wait on.
+ gpu::SyncToken sync_token = GenerateSyncToken(frame);
+ frame = nullptr;
+ environment_.RunUntilIdle();
+
+ // But the VDA should not be notified when it completes.
+ cbh_->ReleaseSyncToken(sync_token);
+ environment_.RunUntilIdle();
+}
+
+TEST_F(VdaVideoDecoderTest, Decode_Output_MaintainsAspect) {
+ // Initialize with a config that has a 2:1 pixel aspect ratio.
+ InitializeWithConfig(VideoDecoderConfig(
+ kCodecVP9, VP9PROFILE_PROFILE0, PIXEL_FORMAT_I420, COLOR_SPACE_HD_REC709,
+ VIDEO_ROTATION_0, gfx::Size(640, 480), gfx::Rect(640, 480),
+ gfx::Size(1280, 480), EmptyExtraData(), Unencrypted()));
+ EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(true));
+ EXPECT_CALL(init_cb_, Run(true));
+ environment_.RunUntilIdle();
+
+ // Assign a picture buffer that has size 1920x1088.
+ int32_t picture_buffer_id = ProvidePictureBuffer();
+
+ // Produce a frame that has visible size 320x240.
+ int32_t bitstream_id = Decode(base::TimeDelta());
+ NotifyEndOfBitstreamBuffer(bitstream_id);
+
+ scoped_refptr<VideoFrame> frame;
+ EXPECT_CALL(output_cb_, Run(_)).WillOnce(SaveArg<0>(&frame));
+ client_->PictureReady(Picture(picture_buffer_id, bitstream_id,
+ gfx::Rect(320, 240),
+ gfx::ColorSpace::CreateSRGB(), true));
+ environment_.RunUntilIdle();
+
+ // The frame should have |natural_size| 640x240 (pixel aspect ratio
+ // preserved).
+ ASSERT_TRUE(frame);
+ EXPECT_EQ(frame->natural_size(), gfx::Size(640, 240));
+ EXPECT_EQ(frame->coded_size(), gfx::Size(1920, 1088));
+ EXPECT_EQ(frame->visible_rect(), gfx::Rect(320, 240));
+}
+
+TEST_F(VdaVideoDecoderTest, Flush) {
+ Initialize();
+ vdavd_->Decode(DecoderBuffer::CreateEOSBuffer(), decode_cb_.Get());
+ EXPECT_CALL(*vda_, Flush());
+ environment_.RunUntilIdle();
+
+ client_->NotifyFlushDone();
+ EXPECT_CALL(decode_cb_, Run(DecodeStatus::OK));
+ environment_.RunUntilIdle();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc b/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
index 93750e19612..d5e0a0b649e 100644
--- a/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/jpeg_decode_accelerator_unittest.cc
@@ -28,7 +28,7 @@
#include "media/filters/jpeg_parser.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/gpu_jpeg_decode_accelerator_factory.h"
-#include "media/gpu/video_accelerator_unittest_helpers.h"
+#include "media/gpu/test/video_accelerator_unittest_helpers.h"
#include "media/video/jpeg_decode_accelerator.h"
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/codec/jpeg_codec.h"
diff --git a/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc b/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
index c0cc3ca0e14..63818b44750 100644
--- a/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/jpeg_encode_accelerator_unittest.cc
@@ -30,8 +30,8 @@
#include "media/base/test_data_util.h"
#include "media/filters/jpeg_parser.h"
#include "media/gpu/buildflags.h"
+#include "media/gpu/test/video_accelerator_unittest_helpers.h"
#include "media/gpu/vaapi/vaapi_jpeg_encode_accelerator.h"
-#include "media/gpu/video_accelerator_unittest_helpers.h"
#include "media/video/jpeg_encode_accelerator.h"
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/codec/jpeg_codec.h"
diff --git a/chromium/media/gpu/rendering_helper.cc b/chromium/media/gpu/rendering_helper.cc
deleted file mode 100644
index 80ed11baee1..00000000000
--- a/chromium/media/gpu/rendering_helper.cc
+++ /dev/null
@@ -1,559 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/rendering_helper.h"
-
-#include <string.h>
-
-#include <algorithm>
-#include <memory>
-#include <numeric>
-#include <vector>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/command_line.h"
-#include "base/mac/scoped_nsautorelease_pool.h"
-#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
-#include "base/run_loop.h"
-#include "base/single_thread_task_runner.h"
-#include "base/strings/stringize_macros.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "base/time/time.h"
-#include "build/build_config.h"
-#include "ui/gl/gl_context.h"
-#include "ui/gl/gl_implementation.h"
-#include "ui/gl/gl_surface.h"
-#include "ui/gl/gl_surface_egl.h"
-#include "ui/gl/init/gl_factory.h"
-
-#if defined(USE_OZONE)
-#include "ui/ozone/public/ozone_platform.h"
-#endif // defined(USE_OZONE)
-
-// Helper for Shader creation.
-static void CreateShader(GLuint program,
- GLenum type,
- const char* source,
- int size) {
- GLuint shader = glCreateShader(type);
- glShaderSource(shader, 1, &source, &size);
- glCompileShader(shader);
- int result = GL_FALSE;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &result);
- if (!result) {
- char log[4096];
- glGetShaderInfoLog(shader, arraysize(log), NULL, log);
- LOG(FATAL) << log;
- }
- glAttachShader(program, shader);
- glDeleteShader(shader);
- CHECK_EQ(static_cast<int>(glGetError()), GL_NO_ERROR);
-}
-
-namespace media {
-
-bool RenderingHelper::use_gl_ = false;
-
-RenderingHelperParams::RenderingHelperParams()
- : rendering_fps(0), render_as_thumbnails(false) {}
-
-RenderingHelperParams::RenderingHelperParams(
- const RenderingHelperParams& other) = default;
-
-RenderingHelperParams::~RenderingHelperParams() {}
-
-VideoFrameTexture::VideoFrameTexture(uint32_t texture_target,
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb)
- : texture_target_(texture_target),
- texture_id_(texture_id),
- no_longer_needed_cb_(no_longer_needed_cb) {
- DCHECK(!no_longer_needed_cb_.is_null());
-}
-
-VideoFrameTexture::~VideoFrameTexture() {
- base::ResetAndReturn(&no_longer_needed_cb_).Run();
-}
-
-RenderingHelper::RenderedVideo::RenderedVideo()
- : is_flushing(false), frames_to_drop(0) {}
-
-RenderingHelper::RenderedVideo::RenderedVideo(const RenderedVideo& other) =
- default;
-
-RenderingHelper::RenderedVideo::~RenderedVideo() {}
-
-// static
-void RenderingHelper::InitializeOneOff(bool use_gl, base::WaitableEvent* done) {
- base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
- cmd_line->AppendSwitchASCII(switches::kUseGL, gl::kGLImplementationEGLName);
-
- use_gl_ = use_gl;
-
-#if defined(USE_OZONE)
- ui::OzonePlatform::InitParams params;
- params.single_process = true;
- ui::OzonePlatform::InitializeForGPU(params);
- ui::OzonePlatform::GetInstance()->AfterSandboxEntry();
-#endif
-
- if (!use_gl_) {
- done->Signal();
- return;
- }
-
- if (!gl::init::InitializeGLOneOff())
- LOG(FATAL) << "Could not initialize GL";
- done->Signal();
-}
-
-RenderingHelper::RenderingHelper() {
- Clear();
-}
-
-RenderingHelper::~RenderingHelper() {
- CHECK_EQ(videos_.size(), 0U) << "Must call UnInitialize before dtor.";
- Clear();
-}
-
-void RenderingHelper::Initialize(const RenderingHelperParams& params,
- base::WaitableEvent* done) {
- // Use videos_.size() != 0 as a proxy for the class having already been
- // Initialize()'d, and UnInitialize() before continuing.
- if (videos_.size()) {
- base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- UnInitialize(&done);
- done.Wait();
- }
-
- render_task_.Reset(
- base::Bind(&RenderingHelper::RenderContent, base::Unretained(this)));
-
- frame_duration_ = params.rendering_fps > 0
- ? base::TimeDelta::FromSeconds(1) / params.rendering_fps
- : base::TimeDelta();
-
- render_as_thumbnails_ = params.render_as_thumbnails;
- task_runner_ = base::ThreadTaskRunnerHandle::Get();
-
- videos_.resize(params.num_windows);
-
- // Skip all the GL stuff if we don't use it
- if (!use_gl_) {
- done->Signal();
- return;
- }
-
- gl_surface_ = gl::init::CreateOffscreenGLSurface(gfx::Size());
- gl_context_ = gl::init::CreateGLContext(nullptr, gl_surface_.get(),
- gl::GLContextAttribs());
- CHECK(gl_context_->MakeCurrent(gl_surface_.get()));
-
- if (render_as_thumbnails_) {
- CHECK_EQ(videos_.size(), 1U);
-
- GLint max_texture_size;
- glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
- CHECK_GE(max_texture_size, params.thumbnails_page_size.width());
- CHECK_GE(max_texture_size, params.thumbnails_page_size.height());
-
- thumbnails_fbo_size_ = params.thumbnails_page_size;
- thumbnail_size_ = params.thumbnail_size;
-
- glGenFramebuffersEXT(1, &thumbnails_fbo_id_);
- glGenTextures(1, &thumbnails_texture_id_);
- glBindTexture(GL_TEXTURE_2D, thumbnails_texture_id_);
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- GL_RGB,
- thumbnails_fbo_size_.width(), thumbnails_fbo_size_.height(),
- 0,
- GL_RGB,
- GL_UNSIGNED_SHORT_5_6_5,
- NULL);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glBindTexture(GL_TEXTURE_2D, 0);
-
- glBindFramebufferEXT(GL_FRAMEBUFFER, thumbnails_fbo_id_);
- glFramebufferTexture2DEXT(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
- GL_TEXTURE_2D, thumbnails_texture_id_, 0);
-
- GLenum fb_status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
- CHECK(fb_status == GL_FRAMEBUFFER_COMPLETE) << fb_status;
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
- glBindFramebufferEXT(GL_FRAMEBUFFER,
- gl_surface_->GetBackingFramebufferObject());
- }
-
- // These vertices and texture coords. map (0,0) in the texture to the
- // bottom left of the viewport. Since we get the video frames with the
- // the top left at (0,0) we need to flip the texture y coordinate
- // in the vertex shader for this to be rendered the right way up.
- // In the case of thumbnail rendering we use the same vertex shader
- // to render the FBO the screen, where we do not want this flipping.
- // Vertices are 2 floats for position and 2 floats for texcoord each.
- static const float kVertices[] = {
- -1, 1, 0, 1, // Vertex 0
- -1, -1, 0, 0, // Vertex 1
- 1, 1, 1, 1, // Vertex 2
- 1, -1, 1, 0, // Vertex 3
- };
- static const GLvoid* kVertexPositionOffset = 0;
- static const GLvoid* kVertexTexcoordOffset =
- reinterpret_cast<GLvoid*>(sizeof(float) * 2);
- static const GLsizei kVertexStride = sizeof(float) * 4;
-
- glGenBuffersARB(1, &vertex_buffer_);
- glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer_);
- glBufferData(GL_ARRAY_BUFFER, sizeof(kVertices), kVertices, GL_STATIC_DRAW);
-
- static const char kVertexShader[] =
- STRINGIZE(varying vec2 interp_tc; attribute vec4 in_pos;
- attribute vec2 in_tc; uniform bool tex_flip; void main() {
- if (tex_flip)
- interp_tc = vec2(in_tc.x, 1.0 - in_tc.y);
- else
- interp_tc = in_tc;
- gl_Position = in_pos;
- });
-
-#if !defined(OS_WIN)
- static const char kFragmentShader[] =
- "#extension GL_OES_EGL_image_external : enable\n"
- "precision mediump float;\n"
- "varying vec2 interp_tc;\n"
- "uniform sampler2D tex;\n"
- "#ifdef GL_OES_EGL_image_external\n"
- "uniform samplerExternalOES tex_external;\n"
- "#endif\n"
- "void main() {\n"
- " vec4 color = texture2D(tex, interp_tc);\n"
- "#ifdef GL_OES_EGL_image_external\n"
- " color += texture2D(tex_external, interp_tc);\n"
- "#endif\n"
- " gl_FragColor = color;\n"
- "}\n";
-#else
- static const char kFragmentShader[] =
- "#ifdef GL_ES\n"
- "precision mediump float;\n"
- "#endif\n"
- "varying vec2 interp_tc;\n"
- "uniform sampler2D tex;\n"
- "void main() {\n"
- " gl_FragColor = texture2D(tex, interp_tc);\n"
- "}\n";
-#endif
- program_ = glCreateProgram();
- CreateShader(program_, GL_VERTEX_SHADER, kVertexShader,
- arraysize(kVertexShader));
- CreateShader(program_, GL_FRAGMENT_SHADER, kFragmentShader,
- arraysize(kFragmentShader));
- glLinkProgram(program_);
- int result = GL_FALSE;
- glGetProgramiv(program_, GL_LINK_STATUS, &result);
- if (!result) {
- char log[4096];
- glGetShaderInfoLog(program_, arraysize(log), NULL, log);
- LOG(FATAL) << log;
- }
- glUseProgram(program_);
- glDeleteProgram(program_);
-
- glUniform1i(glGetUniformLocation(program_, "tex_flip"), 0);
- glUniform1i(glGetUniformLocation(program_, "tex"), 0);
- GLint tex_external = glGetUniformLocation(program_, "tex_external");
- if (tex_external != -1) {
- glUniform1i(tex_external, 1);
- }
- int pos_location = glGetAttribLocation(program_, "in_pos");
- glEnableVertexAttribArray(pos_location);
- glVertexAttribPointer(pos_location, 2, GL_FLOAT, GL_FALSE, kVertexStride,
- kVertexPositionOffset);
- int tc_location = glGetAttribLocation(program_, "in_tc");
- glEnableVertexAttribArray(tc_location);
- glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, kVertexStride,
- kVertexTexcoordOffset);
-
- // Unbind the vertex buffer
- glBindBuffer(GL_ARRAY_BUFFER, 0);
- done->Signal();
-}
-
-void RenderingHelper::UnInitialize(base::WaitableEvent* done) {
- // We have never been initialized in the first place...
- if (task_runner_.get() == nullptr) {
- done->Signal();
- return;
- }
-
- CHECK(task_runner_->BelongsToCurrentThread());
-
- render_task_.Cancel();
-
- if (!use_gl_) {
- Clear();
- done->Signal();
- return;
- }
-
- if (render_as_thumbnails_) {
- glDeleteTextures(1, &thumbnails_texture_id_);
- glDeleteFramebuffersEXT(1, &thumbnails_fbo_id_);
- }
-
- glDeleteBuffersARB(1, &vertex_buffer_);
-
- gl_context_->ReleaseCurrent(gl_surface_.get());
- gl_context_ = NULL;
- gl_surface_ = NULL;
-
- Clear();
- done->Signal();
-}
-
-void RenderingHelper::CreateTexture(uint32_t texture_target,
- uint32_t* texture_id,
- const gfx::Size& size,
- base::WaitableEvent* done) {
- if (!task_runner_->BelongsToCurrentThread()) {
- task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&RenderingHelper::CreateTexture, base::Unretained(this),
- texture_target, texture_id, size, done));
- return;
- }
-
- if (!use_gl_) {
- *texture_id = 0;
- done->Signal();
- return;
- }
-
- glGenTextures(1, texture_id);
- glBindTexture(texture_target, *texture_id);
- if (texture_target == GL_TEXTURE_2D) {
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- GL_RGBA,
- size.width(), size.height(),
- 0,
- GL_RGBA,
- GL_UNSIGNED_BYTE,
- NULL);
- }
- glTexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- // OpenGLES2.0.25 section 3.8.2 requires CLAMP_TO_EDGE for NPOT textures.
- glTexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- CHECK_EQ(static_cast<int>(glGetError()), GL_NO_ERROR);
- done->Signal();
-}
-
-// Helper function to set GL viewport.
-static inline void GLSetViewPort(const gfx::Rect& area) {
- glViewport(area.x(), area.y(), area.width(), area.height());
- glScissor(area.x(), area.y(), area.width(), area.height());
-}
-
-void RenderingHelper::RenderThumbnail(uint32_t texture_target,
- uint32_t texture_id) {
- CHECK(task_runner_->BelongsToCurrentThread());
- CHECK(use_gl_);
-
- const int width = thumbnail_size_.width();
- const int height = thumbnail_size_.height();
- const int thumbnails_in_row = thumbnails_fbo_size_.width() / width;
- const int thumbnails_in_column = thumbnails_fbo_size_.height() / height;
- const int row = (frame_count_ / thumbnails_in_row) % thumbnails_in_column;
- const int col = frame_count_ % thumbnails_in_row;
-
- gfx::Rect area(col * width, row * height, width, height);
-
- glUniform1i(glGetUniformLocation(program_, "tex_flip"), 0);
- glBindFramebufferEXT(GL_FRAMEBUFFER, thumbnails_fbo_id_);
- GLSetViewPort(area);
- RenderTexture(texture_target, texture_id);
- glBindFramebufferEXT(GL_FRAMEBUFFER,
- gl_surface_->GetBackingFramebufferObject());
-
- // Need to flush the GL commands before we return the tnumbnail texture to
- // the decoder.
- glFlush();
- ++frame_count_;
-}
-
-void RenderingHelper::QueueVideoFrame(
- size_t window_id,
- scoped_refptr<VideoFrameTexture> video_frame) {
- CHECK(task_runner_->BelongsToCurrentThread());
- RenderedVideo* video = &videos_[window_id];
- DCHECK(!video->is_flushing);
-
- // If running at zero fps, return immediately. This will give the frame
- // back to the client once it drops its reference to video_frame.
- if (frame_duration_.is_zero())
- return;
-
- video->pending_frames.push(video_frame);
-
- if (video->frames_to_drop > 0 && video->pending_frames.size() > 1) {
- --video->frames_to_drop;
- video->pending_frames.pop();
- }
-
- // Schedules the first RenderContent() if need.
- if (scheduled_render_time_.is_null()) {
- scheduled_render_time_ = base::TimeTicks::Now();
- task_runner_->PostTask(FROM_HERE, render_task_.callback());
- }
-}
-
-void RenderingHelper::RenderTexture(uint32_t texture_target,
- uint32_t texture_id) {
- // The ExternalOES sampler is bound to GL_TEXTURE1 and the Texture2D sampler
- // is bound to GL_TEXTURE0.
- if (texture_target == GL_TEXTURE_2D) {
- glActiveTexture(GL_TEXTURE0 + 0);
- } else if (texture_target == GL_TEXTURE_EXTERNAL_OES) {
- glActiveTexture(GL_TEXTURE0 + 1);
- }
- glBindTexture(texture_target, texture_id);
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- glBindTexture(texture_target, 0);
-
- CHECK_EQ(static_cast<int>(glGetError()), GL_NO_ERROR);
-}
-
-void RenderingHelper::DeleteTexture(uint32_t texture_id) {
- CHECK(task_runner_->BelongsToCurrentThread());
-
- if (!use_gl_)
- return;
-
- glDeleteTextures(1, &texture_id);
- CHECK_EQ(static_cast<int>(glGetError()), GL_NO_ERROR);
-}
-
-gl::GLContext* RenderingHelper::GetGLContext() {
- return gl_context_.get();
-}
-
-void* RenderingHelper::GetGLDisplay() {
- return gl_surface_->GetDisplay();
-}
-
-void RenderingHelper::Clear() {
- videos_.clear();
- task_runner_ = nullptr;
- gl_context_ = NULL;
- gl_surface_ = NULL;
-
- render_as_thumbnails_ = false;
- frame_count_ = 0;
- thumbnails_fbo_id_ = 0;
- thumbnails_texture_id_ = 0;
-}
-
-void RenderingHelper::GetThumbnailsAsRGBA(std::vector<unsigned char>* rgba,
- base::WaitableEvent* done) {
- CHECK(render_as_thumbnails_ && use_gl_);
-
- const size_t num_pixels = thumbnails_fbo_size_.GetArea();
- rgba->resize(num_pixels * 4);
- glBindFramebufferEXT(GL_FRAMEBUFFER, thumbnails_fbo_id_);
- glPixelStorei(GL_PACK_ALIGNMENT, 1);
- // We can only count on GL_RGBA/GL_UNSIGNED_BYTE support.
- glReadPixels(0, 0, thumbnails_fbo_size_.width(),
- thumbnails_fbo_size_.height(), GL_RGBA, GL_UNSIGNED_BYTE,
- &(*rgba)[0]);
- glBindFramebufferEXT(GL_FRAMEBUFFER,
- gl_surface_->GetBackingFramebufferObject());
-
- done->Signal();
-}
-
-void RenderingHelper::Flush(size_t window_id) {
- videos_[window_id].is_flushing = true;
-}
-
-void RenderingHelper::RenderContent() {
- CHECK(task_runner_->BelongsToCurrentThread());
-
- // Frames that will be returned to the client (via the no_longer_needed_cb)
- // after this vector falls out of scope at the end of this method. We need
- // to keep references to them until after SwapBuffers() call below.
- std::vector<scoped_refptr<VideoFrameTexture>> frames_to_be_returned;
- for (RenderedVideo& video : videos_) {
- if (video.pending_frames.empty())
- continue;
- scoped_refptr<VideoFrameTexture> frame = video.pending_frames.front();
- // TODO(owenlin): Render to FBO.
- // RenderTexture(frame->texture_target(), frame->texture_id());
-
- if (video.pending_frames.size() > 1 || video.is_flushing) {
- frames_to_be_returned.push_back(video.pending_frames.front());
- video.pending_frames.pop();
- } else {
- ++video.frames_to_drop;
- }
- }
-
- ScheduleNextRenderContent();
-}
-
-void RenderingHelper::DropOneFrameForAllVideos() {
- for (RenderedVideo& video : videos_) {
- if (video.pending_frames.empty())
- continue;
-
- if (video.pending_frames.size() > 1 || video.is_flushing) {
- video.pending_frames.pop();
- } else {
- ++video.frames_to_drop;
- }
- }
-}
-
-void RenderingHelper::ScheduleNextRenderContent() {
- const auto vsync_interval = base::TimeDelta::FromSeconds(1) / 60;
-
- scheduled_render_time_ += frame_duration_;
- base::TimeTicks now = base::TimeTicks::Now();
- base::TimeTicks target;
-
- if (vsync_timebase_.is_null()) {
- vsync_timebase_ = now;
- }
-
- if (vsync_interval.is_zero()) {
- target = std::max(now, scheduled_render_time_);
- } else {
- // Schedules the next RenderContent() at latest VSYNC before the
- // |scheduled_render_time_|.
- target = std::max(now + vsync_interval, scheduled_render_time_);
-
- int64_t intervals = (target - vsync_timebase_) / vsync_interval;
- target = vsync_timebase_ + intervals * vsync_interval;
- }
-
- // When the rendering falls behind, drops frames.
- while (scheduled_render_time_ < target) {
- scheduled_render_time_ += frame_duration_;
- DropOneFrameForAllVideos();
- }
-
- task_runner_->PostDelayedTask(FROM_HERE, render_task_.callback(),
- target - now);
-}
-} // namespace media
diff --git a/chromium/media/gpu/rendering_helper.h b/chromium/media/gpu/rendering_helper.h
deleted file mode 100644
index ba03eb32c4c..00000000000
--- a/chromium/media/gpu/rendering_helper.h
+++ /dev/null
@@ -1,186 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_RENDERING_HELPER_H_
-#define MEDIA_GPU_RENDERING_HELPER_H_
-
-#include <stddef.h>
-#include <stdint.h>
-
-#include <map>
-#include <memory>
-#include <vector>
-
-#include "base/cancelable_callback.h"
-#include "base/containers/queue.h"
-#include "base/macros.h"
-#include "base/memory/ref_counted.h"
-#include "base/single_thread_task_runner.h"
-#include "base/time/time.h"
-#include "build/build_config.h"
-#include "ui/gfx/geometry/rect.h"
-#include "ui/gfx/geometry/size.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context.h"
-#include "ui/gl/gl_surface.h"
-
-namespace base {
-class WaitableEvent;
-}
-
-namespace media {
-
-class VideoFrameTexture : public base::RefCounted<VideoFrameTexture> {
- public:
- uint32_t texture_id() const { return texture_id_; }
- uint32_t texture_target() const { return texture_target_; }
-
- VideoFrameTexture(uint32_t texture_target,
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb);
-
- private:
- friend class base::RefCounted<VideoFrameTexture>;
-
- uint32_t texture_target_;
- uint32_t texture_id_;
- base::Closure no_longer_needed_cb_;
-
- ~VideoFrameTexture();
-};
-
-struct RenderingHelperParams {
- RenderingHelperParams();
- RenderingHelperParams(const RenderingHelperParams& other);
- ~RenderingHelperParams();
-
- // The target rendering FPS. A value of 0 makes the RenderingHelper return
- // frames immediately.
- int rendering_fps;
-
- // The number of windows. We play each stream in its own window
- // on the screen.
- int num_windows;
-
- // The members below are only used for the thumbnail mode where all frames
- // are rendered in sequence onto one FBO for comparison/verification purposes.
-
- // Whether the frames are rendered as scaled thumbnails within a
- // larger FBO that is in turn rendered to the window.
- bool render_as_thumbnails;
- // The size of the FBO containing all visible thumbnails.
- gfx::Size thumbnails_page_size;
- // The size of each thumbnail within the FBO.
- gfx::Size thumbnail_size;
-};
-
-// Creates and draws textures used by the video decoder.
-// This class is not thread safe and thus all the methods of this class
-// (except for ctor/dtor) ensure they're being run on a single thread.
-class RenderingHelper {
- public:
- RenderingHelper();
- ~RenderingHelper();
-
- // Initialize GL. This method must be called on the rendering thread.
- static void InitializeOneOff(bool use_gl, base::WaitableEvent* done);
-
- // Create the render context and windows by the specified
- // dimensions. This method must be called on the rendering thread.
- void Initialize(const RenderingHelperParams& params,
- base::WaitableEvent* done);
-
- // Undo the effects of Initialize() and signal |*done|. This method
- // must be called on the rendering thread.
- void UnInitialize(base::WaitableEvent* done);
-
- // Return a newly-created GLES2 texture id of the specified size, and
- // signal |*done|.
- void CreateTexture(uint32_t texture_target,
- uint32_t* texture_id,
- const gfx::Size& size,
- base::WaitableEvent* done);
-
- // Render thumbnail in the |texture_id| to the FBO buffer using target
- // |texture_target|.
- void RenderThumbnail(uint32_t texture_target, uint32_t texture_id);
-
- // Queues the |video_frame| for rendering.
- void QueueVideoFrame(size_t window_id,
- scoped_refptr<VideoFrameTexture> video_frame);
-
- // Flushes the pending frames. Notify the rendering_helper there won't be
- // more video frames.
- void Flush(size_t window_id);
-
- // Delete |texture_id|.
- void DeleteTexture(uint32_t texture_id);
-
- // Get the platform specific handle to the OpenGL display.
- void* GetGLDisplay();
-
- // Get the GL context.
- gl::GLContext* GetGLContext();
-
- // Get rendered thumbnails as RGBA.
- void GetThumbnailsAsRGBA(std::vector<unsigned char>* rgba,
- base::WaitableEvent* done);
-
- private:
- struct RenderedVideo {
-
- // True if there won't be any new video frames comming.
- bool is_flushing;
-
- // The number of frames need to be dropped to catch up the rendering. We
- // always keep the last remaining frame in pending_frames even after it
- // has been rendered, so that we have something to display if the client
- // is falling behind on providing us with new frames during timer-driven
- // playback.
- int frames_to_drop;
-
- // The video frames pending for rendering.
- base::queue<scoped_refptr<VideoFrameTexture>> pending_frames;
-
- RenderedVideo();
- RenderedVideo(const RenderedVideo& other);
- ~RenderedVideo();
- };
-
- void Clear();
- void RenderContent();
- void DropOneFrameForAllVideos();
- void ScheduleNextRenderContent();
-
- // Render |texture_id| to the current view port of the screen using target
- // |texture_target|.
- void RenderTexture(uint32_t texture_target, uint32_t texture_id);
-
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-
- scoped_refptr<gl::GLContext> gl_context_;
- scoped_refptr<gl::GLSurface> gl_surface_;
-
- std::vector<RenderedVideo> videos_;
-
- bool render_as_thumbnails_;
- int frame_count_;
- GLuint thumbnails_fbo_id_;
- GLuint thumbnails_texture_id_;
- gfx::Size thumbnails_fbo_size_;
- gfx::Size thumbnail_size_;
- GLuint vertex_buffer_;
- GLuint program_;
- static bool use_gl_;
- base::TimeDelta frame_duration_;
- base::TimeTicks scheduled_render_time_;
- base::CancelableClosure render_task_;
- base::TimeTicks vsync_timebase_;
-
- DISALLOW_COPY_AND_ASSIGN(RenderingHelper);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_RENDERING_HELPER_H_
diff --git a/chromium/media/gpu/shared_memory_region.cc b/chromium/media/gpu/shared_memory_region.cc
index 38d2038c69a..6db362d69bc 100644
--- a/chromium/media/gpu/shared_memory_region.cc
+++ b/chromium/media/gpu/shared_memory_region.cc
@@ -2,7 +2,6 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "base/sys_info.h"
#include "media/gpu/shared_memory_region.h"
namespace media {
@@ -11,12 +10,7 @@ SharedMemoryRegion::SharedMemoryRegion(const base::SharedMemoryHandle& handle,
off_t offset,
size_t size,
bool read_only)
- : shm_(handle, read_only),
- offset_(offset),
- size_(size),
- alignment_size_(offset % base::SysInfo::VMAllocationGranularity()) {
- DCHECK_GE(offset_, 0) << "Invalid offset: " << offset_;
-}
+ : shm_(handle, read_only), offset_(offset), size_(size) {}
SharedMemoryRegion::SharedMemoryRegion(const BitstreamBuffer& bitstream_buffer,
bool read_only)
@@ -26,16 +20,11 @@ SharedMemoryRegion::SharedMemoryRegion(const BitstreamBuffer& bitstream_buffer,
read_only) {}
bool SharedMemoryRegion::Map() {
- if (offset_ < 0) {
- DVLOG(1) << "Invalid offset: " << offset_;
- return false;
- }
- return shm_.MapAt(offset_ - alignment_size_, size_ + alignment_size_);
+ return shm_.MapAt(offset_, size_);
}
void* SharedMemoryRegion::memory() {
- int8_t* addr = reinterpret_cast<int8_t*>(shm_.memory());
- return addr ? addr + alignment_size_ : nullptr;
+ return shm_.memory();
}
} // namespace media
diff --git a/chromium/media/gpu/shared_memory_region.h b/chromium/media/gpu/shared_memory_region.h
index 09984a70bca..48c8098619e 100644
--- a/chromium/media/gpu/shared_memory_region.h
+++ b/chromium/media/gpu/shared_memory_region.h
@@ -5,8 +5,9 @@
#ifndef MEDIA_GPU_SHARED_MEMORY_REGION_H_
#define MEDIA_GPU_SHARED_MEMORY_REGION_H_
-#include "base/memory/shared_memory.h"
+#include "base/memory/shared_memory_handle.h"
#include "media/base/bitstream_buffer.h"
+#include "media/base/unaligned_shared_memory.h"
namespace media {
@@ -15,6 +16,10 @@ namespace media {
// the value of |SysInfo::VMAllocationGranularity()|, the |offset| of a
// SharedMemoryRegion needs not to be aligned, this class hides the details
// and returns the mapped address of the given offset.
+//
+// TODO(sandersd): This is now a trivial wrapper around
+// media::UnalignedSharedMemory. Switch all users over and delete
+// SharedMemoryRegion.
class SharedMemoryRegion {
public:
// Creates a SharedMemoryRegion.
@@ -43,10 +48,9 @@ class SharedMemoryRegion {
size_t size() const { return size_; }
private:
- base::SharedMemory shm_;
+ UnalignedSharedMemory shm_;
off_t offset_;
size_t size_;
- size_t alignment_size_;
DISALLOW_COPY_AND_ASSIGN(SharedMemoryRegion);
};
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 060fea19431..91650ff6e97 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -64,15 +64,6 @@
} while (0)
namespace media {
-namespace {
-void DropGLImage(scoped_refptr<gl::GLImage> gl_image,
- BindGLImageCallback bind_image_cb,
- GLuint client_texture_id,
- GLuint texture_target) {
- bind_image_cb.Run(client_texture_id, texture_target, nullptr, false);
-}
-
-} // namespace
// static
const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = {
@@ -326,11 +317,8 @@ class V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator
// VP8Decoder::VP8Accelerator implementation.
scoped_refptr<VP8Picture> CreateVP8Picture() override;
- bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
- const Vp8FrameHeader* frame_hdr,
- const scoped_refptr<VP8Picture>& last_frame,
- const scoped_refptr<VP8Picture>& golden_frame,
- const scoped_refptr<VP8Picture>& alt_frame) override;
+ bool SubmitDecode(scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) override;
bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
@@ -1442,6 +1430,13 @@ void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
VLOGF(1) << "Error decoding stream";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
+
+ case AcceleratedVideoDecoder::kNoKey:
+ NOTREACHED() << "Should not reach here unless this class accepts "
+ "encrypted streams.";
+ DVLOGF(4) << "No key for decoding stream.";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return;
}
}
}
@@ -1521,13 +1516,6 @@ bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
VLOGF(1) << "eglDestroySyncKHR failed.";
}
- if (output_record.gl_image) {
- child_task_runner_->PostTask(
- FROM_HERE, base::Bind(&DropGLImage, std::move(output_record.gl_image),
- bind_image_cb_, output_record.client_texture_id,
- device_->GetTextureTarget()));
- }
-
picture_buffers_to_dismiss.push_back(output_record.picture_id);
}
@@ -1639,7 +1627,6 @@ void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
OutputRecord& output_record = output_buffer_map_[i];
DCHECK(!output_record.at_device);
DCHECK(!output_record.at_client);
- DCHECK(!output_record.gl_image);
DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
DCHECK_EQ(output_record.picture_id, -1);
DCHECK(output_record.dmabuf_fds.empty());
@@ -1723,15 +1710,14 @@ void V4L2SliceVideoDecodeAccelerator::CreateGLImageFor(
true);
decoder_thread_task_runner_->PostTask(
FROM_HERE,
- base::Bind(&V4L2SliceVideoDecodeAccelerator::AssignGLImage,
- base::Unretained(this), buffer_index, picture_buffer_id,
- gl_image, base::Passed(&passed_dmabuf_fds)));
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::AssignDmaBufs,
+ base::Unretained(this), buffer_index, picture_buffer_id,
+ base::Passed(&passed_dmabuf_fds)));
}
-void V4L2SliceVideoDecodeAccelerator::AssignGLImage(
+void V4L2SliceVideoDecodeAccelerator::AssignDmaBufs(
size_t buffer_index,
int32_t picture_buffer_id,
- scoped_refptr<gl::GLImage> gl_image,
std::unique_ptr<std::vector<base::ScopedFD>> passed_dmabuf_fds) {
DVLOGF(3) << "index=" << buffer_index;
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
@@ -1750,12 +1736,10 @@ void V4L2SliceVideoDecodeAccelerator::AssignGLImage(
}
OutputRecord& output_record = output_buffer_map_[buffer_index];
- DCHECK(!output_record.gl_image);
DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
DCHECK(!output_record.at_client);
DCHECK(!output_record.at_device);
- output_record.gl_image = gl_image;
if (output_mode_ == Config::OutputMode::IMPORT) {
DCHECK(output_record.dmabuf_fds.empty());
output_record.dmabuf_fds = std::move(*passed_dmabuf_fds);
@@ -1839,7 +1823,6 @@ void V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask(
DCHECK(!iter->at_device);
iter->at_client = false;
if (iter->texture_id != 0) {
- iter->gl_image = nullptr;
child_task_runner_->PostTask(
FROM_HERE,
base::Bind(&V4L2SliceVideoDecodeAccelerator::CreateGLImageFor,
@@ -2648,16 +2631,14 @@ static void FillV4L2Vp8EntropyHeader(
}
bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
- const scoped_refptr<VP8Picture>& pic,
- const Vp8FrameHeader* frame_hdr,
- const scoped_refptr<VP8Picture>& last_frame,
- const scoped_refptr<VP8Picture>& golden_frame,
- const scoped_refptr<VP8Picture>& alt_frame) {
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) {
struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
+ const auto& frame_hdr = pic->frame_hdr;
+ v4l2_frame_hdr.key_frame = frame_hdr->frame_type;
#define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
- FHDR_TO_V4L2_FHDR(key_frame);
FHDR_TO_V4L2_FHDR(version);
FHDR_TO_V4L2_FHDR(width);
FHDR_TO_V4L2_FHDR(horizontal_scale);
@@ -2713,6 +2694,7 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
VP8PictureToV4L2DecodeSurface(pic);
std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
+ const auto last_frame = reference_frames.GetFrame(Vp8RefType::VP8_FRAME_LAST);
if (last_frame) {
scoped_refptr<V4L2DecodeSurface> last_frame_surface =
VP8PictureToV4L2DecodeSurface(last_frame);
@@ -2722,6 +2704,8 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
}
+ const auto golden_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_GOLDEN);
if (golden_frame) {
scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
VP8PictureToV4L2DecodeSurface(golden_frame);
@@ -2731,6 +2715,8 @@ bool V4L2SliceVideoDecodeAccelerator::V4L2VP8Accelerator::SubmitDecode(
v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
}
+ const auto alt_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_ALTREF);
if (alt_frame) {
scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
VP8PictureToV4L2DecodeSurface(alt_frame);
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
index 5838beb4120..fcf8e827698 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
@@ -90,7 +90,6 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
int32_t picture_id;
GLuint client_texture_id;
GLuint texture_id;
- scoped_refptr<gl::GLImage> gl_image;
EGLSyncKHR egl_sync;
std::vector<base::ScopedFD> dmabuf_fds;
bool cleared;
@@ -269,13 +268,12 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
const gfx::Size& size,
uint32_t fourcc);
- // Take the GLImage |gl_image|, created for |picture_buffer_id|, and use it
+ // Take the dmabuf |passed_dmabuf_fds|, for |picture_buffer_id|, and use it
// for OutputRecord at |buffer_index|. The buffer is backed by
// |passed_dmabuf_fds|, and the OutputRecord takes ownership of them.
- void AssignGLImage(
+ void AssignDmaBufs(
size_t buffer_index,
int32_t picture_buffer_id,
- scoped_refptr<gl::GLImage> gl_image,
// TODO(posciak): (https://crbug.com/561749) we should normally be able to
// pass the vector by itself via std::move, but it's not possible to do
// this if this method is used as a callback.
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index eb76c4650b5..3458b4a92d2 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -16,7 +16,6 @@
#include "base/bind.h"
#include "base/command_line.h"
-#include "base/message_loop/message_loop.h"
#include "base/numerics/safe_conversions.h"
#include "base/posix/eintr_wrapper.h"
#include "base/single_thread_task_runner.h"
@@ -365,7 +364,11 @@ void V4L2VideoDecodeAccelerator::AssignPictureBuffersTask(
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = buffers.size();
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
+ if (!image_processor_device_ && output_mode_ == Config::OutputMode::IMPORT) {
+ reqbufs.memory = V4L2_MEMORY_DMABUF;
+ } else {
+ reqbufs.memory = V4L2_MEMORY_MMAP;
+ }
IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
if (reqbufs.count != buffers.size()) {
@@ -434,13 +437,6 @@ void V4L2VideoDecodeAccelerator::AssignPictureBuffersTask(
}
if (output_mode_ == Config::OutputMode::ALLOCATE) {
- DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_);
- DVLOGF(3) << "Change state to kDecoding";
- decoder_state_ = kDecoding;
- if (reset_pending_) {
- FinishReset();
- return;
- }
ScheduleDecodeBufferTaskIfNeeded();
}
}
@@ -520,6 +516,10 @@ void V4L2VideoDecodeAccelerator::AssignEGLImage(
buffer_index),
0);
output_record.egl_image = egl_image;
+ if (output_mode_ == Config::OutputMode::IMPORT) {
+ DCHECK(output_record.output_fds.empty());
+ output_record.output_fds.swap(dmabuf_fds);
+ }
free_output_buffers_.push_back(buffer_index);
if (decoder_state_ != kChangingResolution) {
Enqueue();
@@ -610,8 +610,11 @@ void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
- int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel;
+ if (reset_pending_) {
+ FinishReset();
+ }
+ int adjusted_coded_width = stride * 8 / plane_horiz_bits_per_pixel;
if (image_processor_device_ && !image_processor_) {
// This is the first buffer import. Create the image processor and change
// the decoder state. The client may adjust the coded width. We don't have
@@ -624,13 +627,12 @@ void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
if (!CreateImageProcessor())
return;
DCHECK_EQ(kAwaitingPictureBuffers, decoder_state_);
- DVLOGF(3) << "Change state to kDecoding";
+ }
+ DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width);
+
+ if (decoder_state_ == kAwaitingPictureBuffers) {
decoder_state_ = kDecoding;
- if (reset_pending_) {
- FinishReset();
- }
- } else {
- DCHECK_EQ(egl_image_size_.width(), adjusted_coded_width);
+ DVLOGF(3) << "Change state to kDecoding";
}
size_t index = iter - output_buffer_map_.begin();
@@ -655,7 +657,7 @@ void V4L2VideoDecodeAccelerator::ImportBufferForPictureTask(
} else {
// No need for an EGLImage, start using this buffer now.
DCHECK_EQ(egl_image_planes_count_, dmabuf_fds.size());
- iter->processor_output_fds.swap(dmabuf_fds);
+ iter->output_fds.swap(dmabuf_fds);
free_output_buffers_.push_back(index);
if (decoder_state_ != kChangingResolution) {
Enqueue();
@@ -1540,7 +1542,15 @@ bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
sizeof(struct v4l2_plane) * output_planes_count_);
qbuf.index = buffer;
qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- qbuf.memory = V4L2_MEMORY_MMAP;
+ if (!image_processor_device_ && output_mode_ == Config::OutputMode::IMPORT) {
+ DCHECK_EQ(output_planes_count_, output_record.output_fds.size());
+ for (size_t i = 0; i < output_planes_count_; ++i) {
+ qbuf_planes[i].m.fd = output_record.output_fds[i].get();
+ }
+ qbuf.memory = V4L2_MEMORY_DMABUF;
+ } else {
+ qbuf.memory = V4L2_MEMORY_MMAP;
+ }
qbuf.m.planes = qbuf_planes.get();
qbuf.length = output_planes_count_;
DVLOGF(4) << "qbuf.index=" << qbuf.index;
@@ -2292,11 +2302,6 @@ bool V4L2VideoDecodeAccelerator::SetupFormats() {
}
egl_image_device_ = image_processor_device_;
} else {
- if (output_mode_ == Config::OutputMode::IMPORT) {
- VLOGF(1) << "Import mode without image processor is not implemented "
- << "yet.";
- return false;
- }
egl_image_format_fourcc_ = output_format_fourcc_;
egl_image_device_ = device_;
}
@@ -2433,12 +2438,11 @@ bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
coded_size_, gfx::Rect(visible_size_), visible_size_, processor_input_fds,
base::TimeDelta());
- std::vector<base::ScopedFD> processor_output_fds;
+ std::vector<base::ScopedFD> output_fds;
if (output_mode_ == Config::OutputMode::IMPORT) {
- for (auto& fd : output_record.processor_output_fds) {
- processor_output_fds.push_back(
- base::ScopedFD(HANDLE_EINTR(dup(fd.get()))));
- if (!processor_output_fds.back().is_valid()) {
+ for (auto& fd : output_record.output_fds) {
+ output_fds.push_back(base::ScopedFD(HANDLE_EINTR(dup(fd.get()))));
+ if (!output_fds.back().is_valid()) {
VPLOGF(1) << "Failed duplicating a dmabuf fd";
return false;
}
@@ -2447,7 +2451,7 @@ bool V4L2VideoDecodeAccelerator::ProcessFrame(int32_t bitstream_buffer_id,
// Unretained is safe because |this| owns image processor and there will
// be no callbacks after processor destroys.
image_processor_->Process(
- input_frame, output_buffer_index, std::move(processor_output_fds),
+ input_frame, output_buffer_index, std::move(output_fds),
base::Bind(&V4L2VideoDecodeAccelerator::FrameProcessed,
base::Unretained(this), bitstream_buffer_id));
return true;
@@ -2565,7 +2569,11 @@ bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
+ if (!image_processor_device_ && output_mode_ == Config::OutputMode::IMPORT) {
+ reqbufs.memory = V4L2_MEMORY_DMABUF;
+ } else {
+ reqbufs.memory = V4L2_MEMORY_MMAP;
+ }
if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
VPLOGF(1) << "ioctl() failed: VIDIOC_REQBUFS";
NOTIFY_ERROR(PLATFORM_FAILURE);
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
index 8e150ba40f6..21203f6ad4e 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
@@ -199,8 +199,8 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// from. See TextureManager for details.
// Input fds of the processor. Exported from the decoder.
std::vector<base::ScopedFD> processor_input_fds;
- // Output fds of the processor. Used only when OutputMode is IMPORT.
- std::vector<base::ScopedFD> processor_output_fds;
+ // Output fds. Used only when OutputMode is IMPORT.
+ std::vector<base::ScopedFD> output_fds;
};
//
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index 3aa0ea12300..56f193cb1fb 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -1014,10 +1014,8 @@ void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
VLOGF(2) << "bitrate=" << bitrate << ", framerate=" << framerate;
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- if (bitrate < 1)
- bitrate = 1;
- if (framerate < 1)
- framerate = 1;
+ DCHECK_GT(bitrate, 0u);
+ DCHECK_GT(framerate, 0u);
std::vector<struct v4l2_ext_control> ctrls;
struct v4l2_ext_control ctrl;
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index a3eaed3ac8f..d69ad3bd898 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -53,6 +53,10 @@ action("libva_generate_stubs") {
source_set("vaapi") {
defines = [ "MEDIA_GPU_IMPLEMENTATION" ]
sources = [
+ "accelerated_video_encoder.cc",
+ "accelerated_video_encoder.h",
+ "h264_encoder.cc",
+ "h264_encoder.h",
"va_surface.cc",
"va_surface.h",
"vaapi_common.cc",
@@ -81,6 +85,8 @@ source_set("vaapi") {
"vaapi_vp9_accelerator.h",
"vaapi_wrapper.cc",
"vaapi_wrapper.h",
+ "vp8_encoder.cc",
+ "vp8_encoder.h",
]
sources += get_target_outputs(":libva_generate_stubs")
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
new file mode 100644
index 00000000000..0334356e0ff
--- /dev/null
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
@@ -0,0 +1,50 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/accelerated_video_encoder.h"
+
+#include "media/base/video_frame.h"
+
+namespace media {
+
+AcceleratedVideoEncoder::EncodeJob::EncodeJob(
+ scoped_refptr<VideoFrame> input_frame,
+ bool keyframe,
+ base::OnceClosure execute_cb)
+ : input_frame_(input_frame),
+ timestamp_(input_frame->timestamp()),
+ keyframe_(keyframe),
+ execute_callback_(std::move(execute_cb)) {
+ DCHECK(!execute_callback_.is_null());
+}
+
+AcceleratedVideoEncoder::EncodeJob::~EncodeJob() = default;
+
+VaapiEncodeJob* AcceleratedVideoEncoder::EncodeJob::AsVaapiEncodeJob() {
+ CHECK(false);
+ return nullptr;
+}
+
+void AcceleratedVideoEncoder::EncodeJob::AddSetupCallback(
+ base::OnceClosure cb) {
+ DCHECK(!cb.is_null());
+ setup_callbacks_.push(std::move(cb));
+}
+
+void AcceleratedVideoEncoder::EncodeJob::AddReferencePicture(
+ scoped_refptr<CodecPicture> ref_pic) {
+ DCHECK(ref_pic);
+ reference_pictures_.push_back(ref_pic);
+}
+
+void AcceleratedVideoEncoder::EncodeJob::Execute() {
+ while (!setup_callbacks_.empty()) {
+ std::move(setup_callbacks_.front()).Run();
+ setup_callbacks_.pop();
+ }
+
+ std::move(execute_callback_).Run();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.h b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
new file mode 100644
index 00000000000..3564b248a32
--- /dev/null
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
@@ -0,0 +1,148 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_ACCELERATED_VIDEO_ENCODER_H_
+#define MEDIA_GPU_VAAPI_ACCELERATED_VIDEO_ENCODER_H_
+
+#include <vector>
+
+#include "base/callback.h"
+#include "base/containers/queue.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/time/time.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/codec_picture.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+class VaapiEncodeJob;
+class VideoFrame;
+
+// An AcceleratedVideoEncoder (AVE) performs high-level, platform-independent
+// encoding process tasks, such as managing codec state, reference frames, etc.,
+// but may require support from an external accelerator (typically a hardware
+// accelerator) to offload some stages of the actual encoding process, using
+// the parameters that AVE prepares beforehand.
+//
+// For each frame to be encoded, clients provide an EncodeJob object to be set
+// up by an AVE with job parameters, and execute the job afterwards. Any
+// resources required for the job are also provided by the clients, and
+// associated with the EncodeJob object.
+class AcceleratedVideoEncoder {
+ public:
+ AcceleratedVideoEncoder() = default;
+ virtual ~AcceleratedVideoEncoder() = default;
+
+ // An abstraction of an encode job for one frame. Parameters required for an
+ // EncodeJob to be executed are prepared by an AcceleratedVideoEncoder, while
+ // the accelerator-specific callbacks required to set up and execute it are
+ // provided by the accelerator itself, based on these parameters.
+ // Accelerators are also responsible for providing any resources (such as
+ // memory for output and reference pictures, etc.) as needed.
+ class EncodeJob : public base::RefCounted<EncodeJob> {
+ public:
+ // Creates an EncodeJob to encode |input_frame|, which will be executed
+ // by calling |execute_cb|. If |keyframe| is true, requests this job
+ // to produce a keyframe.
+ EncodeJob(scoped_refptr<VideoFrame> input_frame,
+ bool keyframe,
+ base::OnceClosure execute_cb);
+
+ // Schedules a callback to be run immediately before this job is executed.
+ // Can be called multiple times to schedule multiple callbacks, and all
+ // of them will be run, in order added.
+ // Callbacks can be used to e.g. set up hardware parameters before the job
+ // is executed.
+ void AddSetupCallback(base::OnceClosure cb);
+
+ // Adds |ref_pic| to the list of pictures to be used as reference pictures
+ // for this frame, to ensure they remain valid until the job is executed
+ // (or discarded).
+ void AddReferencePicture(scoped_refptr<CodecPicture> ref_pic);
+
+ // Runs all setup callbacks previously scheduled, if any, in order added,
+ // and executes the job by calling the execute callback. Note that the
+ // actual job execution may be asynchronous, and returning from this method
+ // does not have to indicate that the job has been finished. The execute
+ // callback is responsible for retaining references to any resources that
+ // may be in use after this method returns however, so it is safe to release
+ // the EncodeJob object itself immediately after this method returns.
+ void Execute();
+
+ // Requests this job to produce a keyframe; requesting a keyframe may not
+ // always result in one being produced by the encoder (e.g. if it would
+ // not fit in the bitrate budget).
+ void ProduceKeyframe() { keyframe_ = true; }
+
+ // Returns true if this job has been requested to produce a keyframe.
+ bool IsKeyframeRequested() const { return keyframe_; }
+
+ // Returns the timestamp associated with this job.
+ base::TimeDelta timestamp() const { return timestamp_; }
+
+ virtual VaapiEncodeJob* AsVaapiEncodeJob();
+
+ protected:
+ friend class base::RefCounted<EncodeJob>;
+ virtual ~EncodeJob();
+
+ private:
+ // Input VideoFrame to be encoded.
+ const scoped_refptr<VideoFrame> input_frame_;
+
+ // Source timestamp for |input_frame_|.
+ const base::TimeDelta timestamp_;
+
+ // True if this job is to produce a keyframe.
+ bool keyframe_;
+
+ // Callbacks to be run (in the same order as the order of AddSetupCallback()
+ // calls) to set up the job.
+ base::queue<base::OnceClosure> setup_callbacks_;
+
+ // Callback to be run to execute this job.
+ base::OnceClosure execute_callback_;
+
+ // Reference pictures required for this job.
+ std::vector<scoped_refptr<CodecPicture>> reference_pictures_;
+
+ DISALLOW_COPY_AND_ASSIGN(EncodeJob);
+ };
+
+ // Initializes the encoder to encode frames of |visible_size| into a stream
+ // for |profile|, at |initial_bitrate| and |initial_framerate|.
+ // Returns false if the requested set of parameters is not supported,
+ // true on success.
+ virtual bool Initialize(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t initial_bitrate,
+ uint32_t initial_framerate) = 0;
+
+ // Updates current framerate and/or bitrate to |framerate| in FPS
+ // and |bitrate| in bps.
+ virtual bool UpdateRates(uint32_t bitrate, uint32_t framerate) = 0;
+
+ // Returns coded size for the input buffers required to encode, in pixels;
+ // typically visible size adjusted to match codec alignment requirements.
+ virtual gfx::Size GetCodedSize() const = 0;
+
+ // Returns minimum size in bytes for bitstream buffers required to fit output
+ // stream buffers produced.
+ virtual size_t GetBitstreamBufferSize() const = 0;
+
+ // Returns maximum number of reference frames that may be used by the
+ // encoder to encode one frame. The client should be able to provide up to
+ // at least this many frames simultaneously for encode to make progress.
+ virtual size_t GetMaxNumOfRefFrames() const = 0;
+
+ // Prepares a new |encode_job| to be executed in Accelerator and returns true
+ // on success. The caller may then call Execute() on the job to run it.
+ virtual bool PrepareEncodeJob(EncodeJob* encode_job) = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_ACCELERATED_VIDEO_ENCODER_H_
diff --git a/chromium/media/gpu/vaapi/h264_encoder.cc b/chromium/media/gpu/vaapi/h264_encoder.cc
new file mode 100644
index 00000000000..d865ac8b31e
--- /dev/null
+++ b/chromium/media/gpu/vaapi/h264_encoder.cc
@@ -0,0 +1,482 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/h264_encoder.h"
+
+#include "base/bits.h"
+#include "base/stl_util.h"
+
+#define DVLOGF(level) DVLOG(level) << __func__ << "(): "
+
+namespace media {
+namespace {
+// An IDR every 2048 frames, an I frame every 256 and no B frames.
+// We choose IDR period to equal MaxFrameNum so it must be a power of 2.
+constexpr int kIDRPeriod = 2048;
+constexpr int kIPeriod = 256;
+constexpr int kIPPeriod = 1;
+
+constexpr int kDefaultQP = 26;
+
+// Subjectively chosen bitrate window size for rate control, in ms.
+constexpr int kCPBWindowSizeMs = 1500;
+
+// Subjectively chosen.
+constexpr size_t kMaxNumReferenceFrames = 4;
+constexpr size_t kMaxRefIdxL0Size = kMaxNumReferenceFrames;
+constexpr size_t kMaxRefIdxL1Size = 0;
+
+// HRD parameters (ch. E.2.2 in H264 spec).
+constexpr int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters.
+constexpr int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters.
+
+// Default to H264 profile 4.1.
+constexpr int kDefaultLevelIDC = 41;
+
+// 4:2:0
+constexpr int kChromaFormatIDC = 1;
+} // namespace
+
+H264Encoder::EncodeParams::EncodeParams()
+ : idr_period_frames(kIDRPeriod),
+ i_period_frames(kIPeriod),
+ ip_period_frames(kIPPeriod),
+ bitrate_bps(0),
+ framerate(0),
+ cpb_window_size_ms(kCPBWindowSizeMs),
+ cpb_size_bits(0),
+ qp(kDefaultQP) {}
+
+H264Encoder::Accelerator::~Accelerator() = default;
+
+H264Encoder::H264Encoder(std::unique_ptr<Accelerator> accelerator)
+ : packed_sps_(new H264BitstreamBuffer()),
+ packed_pps_(new H264BitstreamBuffer()),
+ accelerator_(std::move(accelerator)) {}
+
+H264Encoder::~H264Encoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+}
+
+bool H264Encoder::Initialize(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t initial_bitrate,
+ uint32_t initial_framerate) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ switch (profile) {
+ case H264PROFILE_BASELINE:
+ case H264PROFILE_MAIN:
+ case H264PROFILE_HIGH:
+ break;
+
+ default:
+ NOTIMPLEMENTED() << "Unsupported profile " << GetProfileName(profile);
+ return false;
+ }
+
+ DCHECK(!visible_size.IsEmpty());
+ visible_size_ = visible_size;
+ // For 4:2:0, the pixel sizes have to be even.
+ DCHECK_EQ(visible_size_.width() % 2, 0);
+ DCHECK_EQ(visible_size_.height() % 2, 0);
+ constexpr size_t kH264MacroblockSizeInPixels = 16;
+ coded_size_ = gfx::Size(
+ base::bits::Align(visible_size_.width(), kH264MacroblockSizeInPixels),
+ base::bits::Align(visible_size_.height(), kH264MacroblockSizeInPixels));
+ mb_width_ = coded_size_.width() / kH264MacroblockSizeInPixels;
+ mb_height_ = coded_size_.height() / kH264MacroblockSizeInPixels;
+
+ profile_ = profile;
+ if (!UpdateRates(initial_bitrate, initial_framerate))
+ return false;
+
+ UpdateSPS();
+ UpdatePPS();
+
+ return true;
+}
+
+gfx::Size H264Encoder::GetCodedSize() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!coded_size_.IsEmpty());
+
+ return coded_size_;
+}
+
+size_t H264Encoder::GetBitstreamBufferSize() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!coded_size_.IsEmpty());
+
+ return coded_size_.GetArea();
+}
+
+size_t H264Encoder::GetMaxNumOfRefFrames() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ return kMaxNumReferenceFrames;
+}
+
+bool H264Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ scoped_refptr<H264Picture> pic = accelerator_->GetPicture(encode_job);
+ DCHECK(pic);
+
+ if (encode_job->IsKeyframeRequested() || encoding_parameters_changed_)
+ frame_num_ = 0;
+
+ pic->frame_num = frame_num_++;
+ frame_num_ %= curr_params_.idr_period_frames;
+
+ if (pic->frame_num == 0) {
+ pic->idr = true;
+ // H264 spec mandates idr_pic_id to differ between two consecutive IDRs.
+ idr_pic_id_ ^= 1;
+ pic->idr_pic_id = idr_pic_id_;
+ ref_pic_list0_.clear();
+
+ encoding_parameters_changed_ = false;
+ encode_job->ProduceKeyframe();
+ }
+
+ if (pic->frame_num % curr_params_.i_period_frames == 0)
+ pic->type = H264SliceHeader::kISlice;
+ else
+ pic->type = H264SliceHeader::kPSlice;
+
+ if (curr_params_.ip_period_frames != 1) {
+ NOTIMPLEMENTED() << "B frames not implemented";
+ return false;
+ }
+
+ pic->ref = true;
+ pic->pic_order_cnt = pic->frame_num * 2;
+ pic->top_field_order_cnt = pic->pic_order_cnt;
+ pic->pic_order_cnt_lsb = pic->pic_order_cnt;
+
+ DVLOGF(4) << "Starting a new frame, type: " << pic->type
+ << (encode_job->IsKeyframeRequested() ? " (keyframe)" : "")
+ << " frame_num: " << pic->frame_num
+ << " POC: " << pic->pic_order_cnt;
+
+ if (!accelerator_->SubmitFrameParameters(
+ encode_job, curr_params_, current_sps_, current_pps_, pic,
+ ref_pic_list0_, std::list<scoped_refptr<H264Picture>>())) {
+ DVLOGF(1) << "Failed submitting frame parameters";
+ return false;
+ }
+
+ if (pic->type == H264SliceHeader::kISlice) {
+ if (!accelerator_->SubmitPackedHeaders(encode_job, packed_sps_,
+ packed_pps_)) {
+ DVLOGF(1) << "Failed submitting keyframe headers";
+ return false;
+ }
+ }
+
+ for (const auto& ref_pic : ref_pic_list0_)
+ encode_job->AddReferencePicture(ref_pic);
+
+ // Store the picture on the list of reference pictures and keep the list
+ // below maximum size, dropping oldest references.
+ if (pic->ref) {
+ ref_pic_list0_.push_front(pic);
+ const size_t max_num_ref_frames =
+ base::checked_cast<size_t>(current_sps_.max_num_ref_frames);
+ while (ref_pic_list0_.size() > max_num_ref_frames)
+ ref_pic_list0_.pop_back();
+ }
+
+ return true;
+}
+
+bool H264Encoder::UpdateRates(uint32_t bitrate, uint32_t framerate) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (bitrate == 0 || framerate == 0)
+ return false;
+
+ if (curr_params_.bitrate_bps == bitrate &&
+ curr_params_.framerate == framerate) {
+ return true;
+ }
+
+ curr_params_.bitrate_bps = bitrate;
+ curr_params_.framerate = framerate;
+ curr_params_.cpb_size_bits =
+ curr_params_.bitrate_bps * curr_params_.cpb_window_size_ms / 1000;
+
+ UpdateSPS();
+ return true;
+}
+
+void H264Encoder::UpdateSPS() {
+ memset(&current_sps_, 0, sizeof(H264SPS));
+
+ // Spec A.2 and A.3.
+ switch (profile_) {
+ case H264PROFILE_BASELINE:
+ // Due to https://crbug.com/345569, we don't distinguish between
+ // constrained and non-constrained baseline profiles. Since many codecs
+ // can't do non-constrained, and constrained is usually what we mean (and
+ // it's a subset of non-constrained), default to it.
+ current_sps_.profile_idc = H264SPS::kProfileIDCBaseline;
+ current_sps_.constraint_set0_flag = true;
+ break;
+ case H264PROFILE_MAIN:
+ current_sps_.profile_idc = H264SPS::kProfileIDCMain;
+ current_sps_.constraint_set1_flag = true;
+ break;
+ case H264PROFILE_HIGH:
+ current_sps_.profile_idc = H264SPS::kProfileIDCHigh;
+ break;
+ default:
+ NOTREACHED();
+ return;
+ }
+
+ current_sps_.level_idc = kDefaultLevelIDC;
+ current_sps_.seq_parameter_set_id = 0;
+ current_sps_.chroma_format_idc = kChromaFormatIDC;
+
+ DCHECK_GE(curr_params_.idr_period_frames, 16u)
+ << "idr_period_frames must be >= 16";
+ current_sps_.log2_max_frame_num_minus4 =
+ base::bits::Log2Ceiling(curr_params_.idr_period_frames) - 4;
+ current_sps_.pic_order_cnt_type = 0;
+ current_sps_.log2_max_pic_order_cnt_lsb_minus4 =
+ base::bits::Log2Ceiling(curr_params_.idr_period_frames * 2) - 4;
+ current_sps_.max_num_ref_frames = kMaxRefIdxL0Size;
+
+ current_sps_.frame_mbs_only_flag = true;
+
+ DCHECK_GT(mb_width_, 0u);
+ DCHECK_GT(mb_height_, 0u);
+ current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1;
+ DCHECK(current_sps_.frame_mbs_only_flag);
+ current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1;
+
+ if (visible_size_ != coded_size_) {
+ // Visible size differs from coded size, fill crop information.
+ current_sps_.frame_cropping_flag = true;
+ DCHECK(!current_sps_.separate_colour_plane_flag);
+ // Spec table 6-1. Only 4:2:0 for now.
+ DCHECK_EQ(current_sps_.chroma_format_idc, 1);
+ // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0.
+ const unsigned int crop_unit_x = 2;
+ const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag);
+ current_sps_.frame_crop_left_offset = 0;
+ current_sps_.frame_crop_right_offset =
+ (coded_size_.width() - visible_size_.width()) / crop_unit_x;
+ current_sps_.frame_crop_top_offset = 0;
+ current_sps_.frame_crop_bottom_offset =
+ (coded_size_.height() - visible_size_.height()) / crop_unit_y;
+ }
+
+ current_sps_.vui_parameters_present_flag = true;
+ current_sps_.timing_info_present_flag = true;
+ current_sps_.num_units_in_tick = 1;
+ current_sps_.time_scale =
+ curr_params_.framerate * 2; // See equation D-2 in spec.
+ current_sps_.fixed_frame_rate_flag = true;
+
+ current_sps_.nal_hrd_parameters_present_flag = true;
+ // H.264 spec ch. E.2.2.
+ current_sps_.cpb_cnt_minus1 = 0;
+ current_sps_.bit_rate_scale = kBitRateScale;
+ current_sps_.cpb_size_scale = kCPBSizeScale;
+ current_sps_.bit_rate_value_minus1[0] =
+ (curr_params_.bitrate_bps >>
+ (kBitRateScale + H264SPS::kBitRateScaleConstantTerm)) -
+ 1;
+ current_sps_.cpb_size_value_minus1[0] =
+ (curr_params_.cpb_size_bits >>
+ (kCPBSizeScale + H264SPS::kCPBSizeScaleConstantTerm)) -
+ 1;
+ current_sps_.cbr_flag[0] = true;
+ current_sps_.initial_cpb_removal_delay_length_minus_1 =
+ H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
+ current_sps_.cpb_removal_delay_length_minus1 =
+ H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
+ current_sps_.dpb_output_delay_length_minus1 =
+ H264SPS::kDefaultDPBOutputDelayLength - 1;
+ current_sps_.time_offset_length = H264SPS::kDefaultTimeOffsetLength;
+ current_sps_.low_delay_hrd_flag = false;
+
+ GeneratePackedSPS();
+ encoding_parameters_changed_ = true;
+}
+
+void H264Encoder::UpdatePPS() {
+ memset(&current_pps_, 0, sizeof(H264PPS));
+
+ current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id;
+ current_pps_.pic_parameter_set_id = 0;
+
+ current_pps_.entropy_coding_mode_flag =
+ current_sps_.profile_idc >= H264SPS::kProfileIDCMain;
+
+ DCHECK_GT(kMaxRefIdxL0Size, 0u);
+ current_pps_.num_ref_idx_l0_default_active_minus1 = kMaxRefIdxL0Size - 1;
+ current_pps_.num_ref_idx_l1_default_active_minus1 =
+ kMaxRefIdxL1Size > 0 ? kMaxRefIdxL1Size - 1 : kMaxRefIdxL1Size;
+ DCHECK_LE(curr_params_.qp, 51);
+ current_pps_.pic_init_qp_minus26 = curr_params_.qp - 26;
+ current_pps_.deblocking_filter_control_present_flag = true;
+ current_pps_.transform_8x8_mode_flag =
+ (current_sps_.profile_idc == H264SPS::kProfileIDCHigh);
+
+ GeneratePackedPPS();
+ encoding_parameters_changed_ = true;
+}
+
+void H264Encoder::GeneratePackedSPS() {
+ packed_sps_->Reset();
+
+ packed_sps_->BeginNALU(H264NALU::kSPS, 3);
+
+ packed_sps_->AppendBits(8, current_sps_.profile_idc);
+ packed_sps_->AppendBool(current_sps_.constraint_set0_flag);
+ packed_sps_->AppendBool(current_sps_.constraint_set1_flag);
+ packed_sps_->AppendBool(current_sps_.constraint_set2_flag);
+ packed_sps_->AppendBool(current_sps_.constraint_set3_flag);
+ packed_sps_->AppendBool(current_sps_.constraint_set4_flag);
+ packed_sps_->AppendBool(current_sps_.constraint_set5_flag);
+ packed_sps_->AppendBits(2, 0); // reserved_zero_2bits
+ packed_sps_->AppendBits(8, current_sps_.level_idc);
+ packed_sps_->AppendUE(current_sps_.seq_parameter_set_id);
+
+ if (current_sps_.profile_idc == H264SPS::kProfileIDCHigh) {
+ packed_sps_->AppendUE(current_sps_.chroma_format_idc);
+ if (current_sps_.chroma_format_idc == 3)
+ packed_sps_->AppendBool(current_sps_.separate_colour_plane_flag);
+ packed_sps_->AppendUE(current_sps_.bit_depth_luma_minus8);
+ packed_sps_->AppendUE(current_sps_.bit_depth_chroma_minus8);
+ packed_sps_->AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag);
+ packed_sps_->AppendBool(current_sps_.seq_scaling_matrix_present_flag);
+ CHECK(!current_sps_.seq_scaling_matrix_present_flag);
+ }
+
+ packed_sps_->AppendUE(current_sps_.log2_max_frame_num_minus4);
+ packed_sps_->AppendUE(current_sps_.pic_order_cnt_type);
+ if (current_sps_.pic_order_cnt_type == 0)
+ packed_sps_->AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4);
+ else if (current_sps_.pic_order_cnt_type == 1)
+ NOTREACHED();
+
+ packed_sps_->AppendUE(current_sps_.max_num_ref_frames);
+ packed_sps_->AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag);
+ packed_sps_->AppendUE(current_sps_.pic_width_in_mbs_minus1);
+ packed_sps_->AppendUE(current_sps_.pic_height_in_map_units_minus1);
+
+ packed_sps_->AppendBool(current_sps_.frame_mbs_only_flag);
+ if (!current_sps_.frame_mbs_only_flag)
+ packed_sps_->AppendBool(current_sps_.mb_adaptive_frame_field_flag);
+
+ packed_sps_->AppendBool(current_sps_.direct_8x8_inference_flag);
+
+ packed_sps_->AppendBool(current_sps_.frame_cropping_flag);
+ if (current_sps_.frame_cropping_flag) {
+ packed_sps_->AppendUE(current_sps_.frame_crop_left_offset);
+ packed_sps_->AppendUE(current_sps_.frame_crop_right_offset);
+ packed_sps_->AppendUE(current_sps_.frame_crop_top_offset);
+ packed_sps_->AppendUE(current_sps_.frame_crop_bottom_offset);
+ }
+
+ packed_sps_->AppendBool(current_sps_.vui_parameters_present_flag);
+ if (current_sps_.vui_parameters_present_flag) {
+ packed_sps_->AppendBool(false); // aspect_ratio_info_present_flag
+ packed_sps_->AppendBool(false); // overscan_info_present_flag
+ packed_sps_->AppendBool(false); // video_signal_type_present_flag
+ packed_sps_->AppendBool(false); // chroma_loc_info_present_flag
+
+ packed_sps_->AppendBool(current_sps_.timing_info_present_flag);
+ if (current_sps_.timing_info_present_flag) {
+ packed_sps_->AppendBits(32, current_sps_.num_units_in_tick);
+ packed_sps_->AppendBits(32, current_sps_.time_scale);
+ packed_sps_->AppendBool(current_sps_.fixed_frame_rate_flag);
+ }
+
+ packed_sps_->AppendBool(current_sps_.nal_hrd_parameters_present_flag);
+ if (current_sps_.nal_hrd_parameters_present_flag) {
+ packed_sps_->AppendUE(current_sps_.cpb_cnt_minus1);
+ packed_sps_->AppendBits(4, current_sps_.bit_rate_scale);
+ packed_sps_->AppendBits(4, current_sps_.cpb_size_scale);
+ CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1),
+ base::size(current_sps_.bit_rate_value_minus1));
+ for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) {
+ packed_sps_->AppendUE(current_sps_.bit_rate_value_minus1[i]);
+ packed_sps_->AppendUE(current_sps_.cpb_size_value_minus1[i]);
+ packed_sps_->AppendBool(current_sps_.cbr_flag[i]);
+ }
+ packed_sps_->AppendBits(
+ 5, current_sps_.initial_cpb_removal_delay_length_minus_1);
+ packed_sps_->AppendBits(5, current_sps_.cpb_removal_delay_length_minus1);
+ packed_sps_->AppendBits(5, current_sps_.dpb_output_delay_length_minus1);
+ packed_sps_->AppendBits(5, current_sps_.time_offset_length);
+ }
+
+ packed_sps_->AppendBool(false); // vcl_hrd_parameters_flag
+ if (current_sps_.nal_hrd_parameters_present_flag)
+ packed_sps_->AppendBool(current_sps_.low_delay_hrd_flag);
+
+ packed_sps_->AppendBool(false); // pic_struct_present_flag
+ packed_sps_->AppendBool(true); // bitstream_restriction_flag
+
+ packed_sps_->AppendBool(false); // motion_vectors_over_pic_boundaries_flag
+ packed_sps_->AppendUE(2); // max_bytes_per_pic_denom
+ packed_sps_->AppendUE(1); // max_bits_per_mb_denom
+ packed_sps_->AppendUE(16); // log2_max_mv_length_horizontal
+ packed_sps_->AppendUE(16); // log2_max_mv_length_vertical
+
+ // Explicitly set max_num_reorder_frames to 0 to allow the decoder to
+ // output pictures early.
+ packed_sps_->AppendUE(0); // max_num_reorder_frames
+
+ // The value of max_dec_frame_buffering shall be greater than or equal to
+ // max_num_ref_frames.
+ const unsigned int max_dec_frame_buffering =
+ current_sps_.max_num_ref_frames;
+ packed_sps_->AppendUE(max_dec_frame_buffering);
+ }
+
+ packed_sps_->FinishNALU();
+}
+
+void H264Encoder::GeneratePackedPPS() {
+ packed_pps_->Reset();
+
+ packed_pps_->BeginNALU(H264NALU::kPPS, 3);
+
+ packed_pps_->AppendUE(current_pps_.pic_parameter_set_id);
+ packed_pps_->AppendUE(current_pps_.seq_parameter_set_id);
+ packed_pps_->AppendBool(current_pps_.entropy_coding_mode_flag);
+ packed_pps_->AppendBool(
+ current_pps_.bottom_field_pic_order_in_frame_present_flag);
+ CHECK_EQ(current_pps_.num_slice_groups_minus1, 0);
+ packed_pps_->AppendUE(current_pps_.num_slice_groups_minus1);
+
+ packed_pps_->AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1);
+ packed_pps_->AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1);
+
+ packed_pps_->AppendBool(current_pps_.weighted_pred_flag);
+ packed_pps_->AppendBits(2, current_pps_.weighted_bipred_idc);
+
+ packed_pps_->AppendSE(current_pps_.pic_init_qp_minus26);
+ packed_pps_->AppendSE(current_pps_.pic_init_qs_minus26);
+ packed_pps_->AppendSE(current_pps_.chroma_qp_index_offset);
+
+ packed_pps_->AppendBool(current_pps_.deblocking_filter_control_present_flag);
+ packed_pps_->AppendBool(current_pps_.constrained_intra_pred_flag);
+ packed_pps_->AppendBool(current_pps_.redundant_pic_cnt_present_flag);
+
+ packed_pps_->AppendBool(current_pps_.transform_8x8_mode_flag);
+ packed_pps_->AppendBool(current_pps_.pic_scaling_matrix_present_flag);
+ DCHECK(!current_pps_.pic_scaling_matrix_present_flag);
+ packed_pps_->AppendSE(current_pps_.second_chroma_qp_index_offset);
+
+ packed_pps_->FinishNALU();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/h264_encoder.h b/chromium/media/gpu/vaapi/h264_encoder.h
new file mode 100644
index 00000000000..83e9613c618
--- /dev/null
+++ b/chromium/media/gpu/vaapi/h264_encoder.h
@@ -0,0 +1,163 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_H264_ENCODER_H_
+#define MEDIA_GPU_VAAPI_H264_ENCODER_H_
+
+#include <stddef.h>
+#include <list>
+
+#include "base/macros.h"
+#include "base/sequence_checker.h"
+#include "media/filters/h264_bitstream_buffer.h"
+#include "media/gpu/h264_dpb.h"
+#include "media/gpu/vaapi/accelerated_video_encoder.h"
+
+namespace media {
+
+// This class provides an H264 encoder functionality, generating stream headers,
+// managing encoder state, reference frames, and other codec parameters, while
+// requiring support from an Accelerator to encode frame data based on these
+// parameters.
+//
+// This class must be created, called and destroyed on a single sequence.
+//
+// Names used in documentation of this class refer directly to naming used
+// in the H.264 specification (http://www.itu.int/rec/T-REC-H.264).
+class H264Encoder : public AcceleratedVideoEncoder {
+ public:
+ struct EncodeParams {
+ EncodeParams();
+
+ // Produce an IDR at least once per this many frames.
+ // Must be >= 16 (per spec).
+ size_t idr_period_frames;
+
+ // Produce an I frame at least once per this many frames.
+ size_t i_period_frames;
+
+ // How often do we need to have either an I or a P frame in the stream.
+ // A period of 1 implies no B frames.
+ size_t ip_period_frames;
+
+ // Bitrate in bps.
+ uint32_t bitrate_bps;
+
+ // Framerate in FPS.
+ uint32_t framerate;
+
+ // Bitrate window size in ms.
+ unsigned int cpb_window_size_ms;
+
+ // Bitrate window size in bits.
+ unsigned int cpb_size_bits;
+
+ // Quantization parameter.
+ int qp;
+ };
+
+ // An accelerator interface. The client must provide an appropriate
+ // implementation on creation.
+ class Accelerator {
+ public:
+ Accelerator() = default;
+ virtual ~Accelerator();
+
+ // Returns the H264Picture to be used as output for |job|.
+ virtual scoped_refptr<H264Picture> GetPicture(EncodeJob* job) = 0;
+
+ // Initializes |job| to insert the provided |packed_sps| and |packed_pps|
+ // before the frame produced by |job| into the output video stream.
+ virtual bool SubmitPackedHeaders(
+ EncodeJob* job,
+ scoped_refptr<H264BitstreamBuffer> packed_sps,
+ scoped_refptr<H264BitstreamBuffer> packed_pps) = 0;
+
+ // Initializes |job| to use the provided |sps|, |pps|, |encode_params|, and
+ // encoded picture parameters in |pic|, as well as |ref_pic_list0| and
+ // |ref_pic_list1| as the corresponding H264 reference frame lists
+ // (RefPicList0 and RefPicList1 per spec) for the frame to be produced.
+ virtual bool SubmitFrameParameters(
+ EncodeJob* job,
+ const H264Encoder::EncodeParams& encode_params,
+ const media::H264SPS& sps,
+ const media::H264PPS& pps,
+ scoped_refptr<H264Picture> pic,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list0,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list1) = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(Accelerator);
+ };
+
+ explicit H264Encoder(std::unique_ptr<Accelerator> accelerator);
+ ~H264Encoder() override;
+
+ // AcceleratedVideoEncoder implementation.
+ bool Initialize(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t initial_bitrate,
+ uint32_t initial_framerate) override;
+ bool UpdateRates(uint32_t bitrate, uint32_t framerate) override;
+ gfx::Size GetCodedSize() const override;
+ size_t GetBitstreamBufferSize() const override;
+ size_t GetMaxNumOfRefFrames() const override;
+ bool PrepareEncodeJob(EncodeJob* encode_job) override;
+
+ private:
+ // Fill current_sps_ and current_pps_ with current encoding state parameters.
+ void UpdateSPS();
+ void UpdatePPS();
+
+ // Generate packed SPS and PPS in packed_sps_ and packed_pps_, using values
+ // in current_sps_ and current_pps_.
+ void GeneratePackedSPS();
+ void GeneratePackedPPS();
+
+ // Current SPS, PPS and their packed versions. Packed versions are NALUs
+ // in AnnexB format *without* emulation prevention three-byte sequences
+ // (those are expected to be added by the client as needed).
+ media::H264SPS current_sps_;
+ scoped_refptr<media::H264BitstreamBuffer> packed_sps_;
+ media::H264PPS current_pps_;
+ scoped_refptr<media::H264BitstreamBuffer> packed_pps_;
+
+ // Current encoding parameters being used.
+ EncodeParams curr_params_;
+
+ // H264 profile currently used.
+ media::VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
+
+ // Current visible and coded sizes in pixels.
+ gfx::Size visible_size_;
+ gfx::Size coded_size_;
+
+ // Width/height in macroblocks.
+ unsigned int mb_width_ = 0;
+ unsigned int mb_height_ = 0;
+
+ // frame_num (spec section 7.4.3) to be used for the next frame.
+ unsigned int frame_num_ = 0;
+
+ // idr_pic_id (spec section 7.4.3) to be used for the next frame.
+ unsigned int idr_pic_id_ = 0;
+
+ // True if encoding parameters have changed and we need to submit a keyframe
+ // with updated parameters.
+ bool encoding_parameters_changed_ = false;
+
+ // Currently active reference frames.
+ // RefPicList0 per spec (spec section 8.2.4.2).
+ std::list<scoped_refptr<H264Picture>> ref_pic_list0_;
+
+ // Accelerator instance used to prepare encode jobs.
+ const std::unique_ptr<Accelerator> accelerator_;
+
+ SEQUENCE_CHECKER(sequence_checker_);
+ DISALLOW_COPY_AND_ASSIGN(H264Encoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_H264_ENCODER_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
index 903890ac40f..89bfbb93a23 100644
--- a/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_h264_accelerator.cc
@@ -24,12 +24,12 @@ namespace {
// from ITU-T REC H.264 spec
// section 8.5.6
// "Inverse scanning process for 4x4 transform coefficients and scaling lists"
-static const int kZigzagScan4x4[16] = {0, 1, 4, 8, 5, 2, 3, 6,
- 9, 12, 13, 10, 7, 11, 14, 15};
+static constexpr int kZigzagScan4x4[16] = {0, 1, 4, 8, 5, 2, 3, 6,
+ 9, 12, 13, 10, 7, 11, 14, 15};
// section 8.5.7
// "Inverse scanning process for 8x8 transform coefficients and scaling lists"
-static const uint8_t kZigzagScan8x8[64] = {
+static constexpr uint8_t kZigzagScan8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
index 411b9257a39..35b7389b789 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
@@ -26,8 +26,8 @@
namespace media {
namespace {
-const char* kTestFilename = "pixel-1280x720.jpg";
-const char* kExpectedMd5Sum = "6e9e1716073c9a9a1282e3f0e0dab743";
+constexpr char* kTestFilename = "pixel-1280x720.jpg";
+constexpr char* kExpectedMd5Sum = "6e9e1716073c9a9a1282e3f0e0dab743";
void LogOnError() {
LOG(FATAL) << "Oh noes! Decoder failed";
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index 430331f2dfb..caf6ff832a6 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -30,7 +30,7 @@ namespace {
// JPEG format uses 2 bytes to denote the size of a segment, and the size
// includes the 2 bytes used for specifying it. Therefore, maximum data size
// allowed is: 65535 - 2 = 65533.
-const size_t kMaxExifSizeAllowed = 65533;
+constexpr size_t kMaxExifSizeAllowed = 65533;
// UMA results that the VaapiJpegEncodeAccelerator class reports.
// These values are persisted to logs, and should therefore never be renumbered
@@ -154,9 +154,17 @@ void VaapiJpegEncodeAccelerator::Encoder::EncodeTask(
exif_buffer = static_cast<uint8_t*>(request->exif_shm->memory());
exif_buffer_size = request->exif_shm->size();
}
- if (!jpeg_encoder_->Encode(input_size, exif_buffer, exif_buffer_size,
- request->quality, va_surface_id,
- cached_output_buffer_id_)) {
+
+ // When the exif buffer contains a thumbnail, the VAAPI encoder would
+ // generate a corrupted JPEG. We can work around the problem by supplying an
+ // all-zero buffer with the same size and fill in the real exif buffer after
+ // encoding.
+ // TODO(shenghao): Remove this mechanism after b/79840013 is fixed.
+ std::vector<uint8_t> exif_buffer_dummy(exif_buffer_size, 0);
+ size_t exif_offset = 0;
+ if (!jpeg_encoder_->Encode(input_size, exif_buffer_dummy.data(),
+ exif_buffer_size, request->quality, va_surface_id,
+ cached_output_buffer_id_, &exif_offset)) {
VLOGF(1) << "Encode JPEG failed";
notify_error_cb_.Run(buffer_id, PLATFORM_FAILURE);
return;
@@ -173,6 +181,10 @@ void VaapiJpegEncodeAccelerator::Encoder::EncodeTask(
notify_error_cb_.Run(buffer_id, PLATFORM_FAILURE);
}
+ // Copy the real exif buffer into preserved space.
+ memcpy(static_cast<uint8_t*>(request->output_shm->memory()) + exif_offset,
+ exif_buffer, exif_buffer_size);
+
video_frame_ready_cb_.Run(buffer_id, encoded_size);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
index 538b873ca2d..d12a1b05729 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
@@ -26,25 +26,25 @@ namespace media {
namespace {
// JPEG header only uses 2 bytes to represent width and height.
-const int kMaxDimension = 65535;
-const size_t kDctSize2 = 64;
-const size_t kNumDcRunSizeBits = 16;
-const size_t kNumAcRunSizeBits = 16;
-const size_t kNumDcCodeWordsHuffVal = 12;
-const size_t kNumAcCodeWordsHuffVal = 162;
-const size_t kJpegDefaultHeaderSize =
+constexpr int kMaxDimension = 65535;
+constexpr size_t kDctSize2 = 64;
+constexpr size_t kNumDcRunSizeBits = 16;
+constexpr size_t kNumAcRunSizeBits = 16;
+constexpr size_t kNumDcCodeWordsHuffVal = 12;
+constexpr size_t kNumAcCodeWordsHuffVal = 162;
+constexpr size_t kJpegDefaultHeaderSize =
67 + (kDctSize2 * 2) + (kNumDcRunSizeBits * 2) +
(kNumDcCodeWordsHuffVal * 2) + (kNumAcRunSizeBits * 2) +
(kNumAcCodeWordsHuffVal * 2);
-const size_t kJFIFApp0Size = 16;
+constexpr size_t kJFIFApp0Size = 16;
-const uint8_t kZigZag8x8[64] = {
+constexpr uint8_t kZigZag8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13, 6, 7, 14, 21, 28,
35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63};
-const JpegQuantizationTable kDefaultQuantTable[2] = {
+constexpr JpegQuantizationTable kDefaultQuantTable[2] = {
// Table K.1 Luminance quantization table values.
{
true,
@@ -174,7 +174,8 @@ size_t FillJpegHeader(const gfx::Size& input_size,
const uint8_t* exif_buffer,
size_t exif_buffer_size,
int quality,
- uint8_t* header) {
+ uint8_t* header,
+ size_t* exif_offset) {
unsigned int width = input_size.width();
unsigned int height = input_size.height();
@@ -193,6 +194,7 @@ size_t FillJpegHeader(const gfx::Size& input_size,
static_cast<uint8_t>(exif_segment_size % 256)};
memcpy(header + idx, kAppSegment, sizeof(kAppSegment));
idx += sizeof(kAppSegment);
+ *exif_offset = idx;
memcpy(header + idx, exif_buffer, exif_buffer_size);
idx += exif_buffer_size;
} else {
@@ -369,7 +371,8 @@ bool VaapiJpegEncoder::Encode(const gfx::Size& input_size,
size_t exif_buffer_size,
int quality,
VASurfaceID surface_id,
- VABufferID output_buffer_id) {
+ VABufferID output_buffer_id,
+ size_t* exif_offset) {
DCHECK_NE(surface_id, VA_INVALID_SURFACE);
if (input_size.width() > kMaxDimension ||
@@ -421,8 +424,9 @@ bool VaapiJpegEncoder::Encode(const gfx::Size& input_size,
? kJpegDefaultHeaderSize + exif_buffer_size
: kJpegDefaultHeaderSize + kJFIFApp0Size;
jpeg_header.resize(jpeg_header_size);
- size_t length_in_bits = FillJpegHeader(
- input_size, exif_buffer, exif_buffer_size, quality, jpeg_header.data());
+ size_t length_in_bits =
+ FillJpegHeader(input_size, exif_buffer, exif_buffer_size, quality,
+ jpeg_header.data(), exif_offset);
VAEncPackedHeaderParameterBuffer header_param;
memset(&header_param, 0, sizeof(header_param));
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
index f38516950b3..6aa53dc89e0 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.h
@@ -40,13 +40,15 @@ class MEDIA_GPU_EXPORT VaapiJpegEncoder {
// |output_buffer_id| is the ID of VA buffer that encoded image will be
// stored. The size of it should be at least as large as
// GetMaxCodedBufferSize().
+ // |exif_offset| is the offset where Exif data should be filled into.
// Return false on failure.
bool Encode(const gfx::Size& input_size,
const uint8_t* exif_buffer,
size_t exif_buffer_size,
int quality,
VASurfaceID surface_id,
- VABufferID output_buffer_id);
+ VABufferID output_buffer_id,
+ size_t* exif_offset);
// Gets the maximum possible encoded result size.
// |size| is the dimension of the YUV image to be encoded.
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index a6d995a8484..cac05e1dd1c 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -448,6 +448,13 @@ void VaapiVideoDecodeAccelerator::DecodeTask() {
RETURN_AND_NOTIFY_ON_FAILURE(false, "Error decoding stream",
PLATFORM_FAILURE, );
return;
+
+ case AcceleratedVideoDecoder::kNoKey:
+ NOTREACHED() << "Should not reach here unless this class accepts "
+ "encrypted streams.";
+ RETURN_AND_NOTIFY_ON_FAILURE(false, "Error decoding stream",
+ PLATFORM_FAILURE, );
+ return;
}
}
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index 7c2159da0ab..a2c88a3a305 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -22,7 +22,7 @@ using ::testing::Invoke;
using ::testing::Return;
using ::testing::TestWithParam;
using ::testing::ValuesIn;
-using ::testing::WithArgs;
+using ::testing::WithArg;
namespace media {
@@ -34,9 +34,15 @@ ACTION_P(RunClosure, closure) {
constexpr VideoCodecProfile kCodecProfiles[] = {H264PROFILE_MIN, VP8PROFILE_MIN,
VP9PROFILE_MIN};
-constexpr int kBitstreamId = 123;
+constexpr int32_t kBitstreamId = 123;
constexpr size_t kInputSize = 256;
+constexpr size_t kNumPictures = 2;
+const gfx::Size kPictureSize(64, 48);
+
+constexpr size_t kNewNumPictures = 3;
+const gfx::Size kNewPictureSize(64, 48);
+
} // namespace
class MockAcceleratedVideoDecoder : public AcceleratedVideoDecoder {
@@ -44,7 +50,9 @@ class MockAcceleratedVideoDecoder : public AcceleratedVideoDecoder {
MockAcceleratedVideoDecoder() = default;
~MockAcceleratedVideoDecoder() override = default;
- MOCK_METHOD3(SetStream, void(int32_t id, const uint8_t* ptr, size_t size));
+ MOCK_METHOD4(
+ SetStream,
+ void(int32_t id, const uint8_t* ptr, size_t size, const DecryptConfig*));
MOCK_METHOD0(Flush, bool());
MOCK_METHOD0(Reset, void());
MOCK_METHOD0(Decode, DecodeResult());
@@ -177,6 +185,112 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<VideoCodecProfile>,
run_loop.Run();
}
+ // Try and QueueInputBuffer()s, where we pretend that |mock_decoder_| requests
+ // to kAllocateNewSurfaces: |vda_| will ping us to ProvidePictureBuffers().
+ // If |expect_dismiss_picture_buffers| is signalled, then we expect as well
+ // that |vda_| will emit |num_picture_buffers_to_dismiss| DismissPictureBuffer
+ // calls.
+ void QueueInputBufferSequence(size_t num_pictures,
+ const gfx::Size& picture_size,
+ int32_t bitstream_id,
+ bool expect_dismiss_picture_buffers = false,
+ size_t num_picture_buffers_to_dismiss = 0) {
+ ::testing::InSequence s;
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize, nullptr));
+ EXPECT_CALL(*mock_decoder_, Decode())
+ .WillOnce(Return(AcceleratedVideoDecoder::kAllocateNewSurfaces));
+
+ EXPECT_CALL(*mock_decoder_, GetRequiredNumOfPictures())
+ .WillOnce(Return(num_pictures));
+ EXPECT_CALL(*mock_decoder_, GetPicSize()).WillOnce(Return(picture_size));
+ EXPECT_CALL(*mock_vaapi_wrapper_, DestroySurfaces());
+
+ if (expect_dismiss_picture_buffers) {
+ EXPECT_CALL(*this, DismissPictureBuffer(_))
+ .Times(num_picture_buffers_to_dismiss);
+ }
+
+ EXPECT_CALL(*this,
+ ProvidePictureBuffers(num_pictures, _, 1, picture_size, _))
+ .WillOnce(RunClosure(quit_closure));
+
+ base::SharedMemoryHandle handle;
+ handle = base::SharedMemory::DuplicateHandle(in_shm_->handle());
+ BitstreamBuffer bitstream_buffer(bitstream_id, handle, kInputSize);
+
+ QueueInputBuffer(bitstream_buffer);
+ run_loop.Run();
+ }
+
+ // Calls AssignPictureBuffers(), expecting the corresponding mock calls; we
+ // pretend |mock_decoder_| has kRanOutOfStreamData (i.e. it's finished
+ // decoding) and expect |vda_| to emit a NotifyEndOfBitstreamBuffer().
+ // QueueInputBufferSequence() must have been called beforehand.
+ void AssignPictureBuffersSequence(size_t num_pictures,
+ const gfx::Size& picture_size,
+ int32_t bitstream_id) {
+ ASSERT_TRUE(vda_.curr_input_buffer_)
+ << "QueueInputBuffer() should have been called";
+
+ ::testing::InSequence s;
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+
+ EXPECT_CALL(*mock_vaapi_wrapper_,
+ CreateSurfaces(_, picture_size, num_pictures, _))
+ .WillOnce(DoAll(
+ WithArg<3>(Invoke(
+ [num_pictures](std::vector<VASurfaceID>* va_surface_ids) {
+ va_surface_ids->resize(num_pictures);
+ })),
+ Return(true)));
+ EXPECT_CALL(*mock_vaapi_picture_factory_,
+ MockCreateVaapiPicture(mock_vaapi_wrapper_.get(), picture_size))
+ .Times(num_pictures);
+
+ EXPECT_CALL(*mock_decoder_, Decode())
+ .WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
+ EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(bitstream_id))
+ .WillOnce(RunClosure(quit_closure));
+
+ const auto tex_target = mock_vaapi_picture_factory_->GetGLTextureTarget();
+ int irrelevant_id = 2;
+ std::vector<PictureBuffer> picture_buffers;
+ for (size_t picture = 0; picture < num_pictures; ++picture) {
+ // The picture buffer id, client id and service texture ids are
+ // arbitrarily chosen.
+ picture_buffers.push_back({irrelevant_id++, picture_size,
+ PictureBuffer::TextureIds{irrelevant_id++},
+ PictureBuffer::TextureIds{irrelevant_id++},
+ tex_target, PIXEL_FORMAT_XRGB});
+ }
+
+ AssignPictureBuffers(picture_buffers);
+ run_loop.Run();
+ }
+
+ // Calls QueueInputBuffer(); we instruct from |mock_decoder_| that it has
+ // kRanOutOfStreamData (i.e. it's finished decoding). This is a fast method
+ // because the Decode() is (almost) immediate.
+ void DecodeOneFrameFast(int32_t bitstream_id) {
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize, nullptr));
+ EXPECT_CALL(*mock_decoder_, Decode())
+ .WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
+ EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(bitstream_id))
+ .WillOnce(RunClosure(quit_closure));
+
+ base::SharedMemoryHandle handle;
+ handle = base::SharedMemory::DuplicateHandle(in_shm_->handle());
+ BitstreamBuffer bitstream_buffer(bitstream_id, handle, kInputSize);
+
+ QueueInputBuffer(bitstream_buffer);
+ run_loop.Run();
+ }
+
// VideoDecodeAccelerator::Client methods.
MOCK_METHOD1(NotifyInitializationComplete, void(bool));
MOCK_METHOD5(
@@ -209,6 +323,22 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<VideoCodecProfile>,
DISALLOW_COPY_AND_ASSIGN(VaapiVideoDecodeAcceleratorTest);
};
+// Verify that it is possible to select DRM(egl) and TFP(glx) at runtime.
+TEST_P(VaapiVideoDecodeAcceleratorTest, SupportedPlatforms) {
+ EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationNone,
+ mock_vaapi_picture_factory_->GetVaapiImplementation(
+ gl::kGLImplementationNone));
+ EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationDrm,
+ mock_vaapi_picture_factory_->GetVaapiImplementation(
+ gl::kGLImplementationEGLGLES2));
+
+#if defined(USE_X11)
+ EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationX11,
+ mock_vaapi_picture_factory_->GetVaapiImplementation(
+ gl::kGLImplementationDesktopGL));
+#endif
+}
+
// This test checks that QueueInputBuffer() fails when state is kUnitialized.
TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndError) {
SetVdaStateToUnitialized();
@@ -230,7 +360,7 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndDecodeError) {
base::RunLoop run_loop;
base::Closure quit_closure = run_loop.QuitClosure();
- EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize));
+ EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize, nullptr));
EXPECT_CALL(*mock_decoder_, Decode())
.WillOnce(Return(AcceleratedVideoDecoder::kDecodeError));
EXPECT_CALL(*this, NotifyError(VaapiVideoDecodeAccelerator::PLATFORM_FAILURE))
@@ -240,113 +370,46 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndDecodeError) {
run_loop.Run();
}
-// Tests usual startup sequence: a BitstreamBuffer is enqueued for decode,
-// |vda_| asks for PictureBuffers, that we provide, and then the same Decode()
-// is tried again.
-TEST_P(VaapiVideoDecodeAcceleratorTest,
- QueueInputBufferAndAssignPictureBuffersAndDecode) {
- // Try and QueueInputBuffer(), |vda_| will ping us to ProvidePictureBuffers().
- const uint32_t kNumPictures = 2;
- const gfx::Size kPictureSize(64, 48);
- {
- base::SharedMemoryHandle handle;
- handle = base::SharedMemory::DuplicateHandle(in_shm_->handle());
- BitstreamBuffer bitstream_buffer(kBitstreamId, handle, kInputSize);
-
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
- EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize));
- EXPECT_CALL(*mock_decoder_, Decode())
- .WillOnce(Return(AcceleratedVideoDecoder::kAllocateNewSurfaces));
-
- EXPECT_CALL(*mock_decoder_, GetRequiredNumOfPictures())
- .WillOnce(Return(kNumPictures));
- EXPECT_CALL(*mock_decoder_, GetPicSize()).WillOnce(Return(kPictureSize));
- EXPECT_CALL(*mock_vaapi_wrapper_, DestroySurfaces());
-
- EXPECT_CALL(*this,
- ProvidePictureBuffers(kNumPictures, _, 1, kPictureSize, _))
- .WillOnce(RunClosure(quit_closure));
-
- QueueInputBuffer(bitstream_buffer);
- run_loop.Run();
- }
- // AssignPictureBuffers() accordingly and expect another go at Decode().
- {
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
-
- const uint32_t tex_target =
- mock_vaapi_picture_factory_->GetGLTextureTarget();
-
- // These client and service texture ids are arbitrarily chosen.
- const std::vector<PictureBuffer> kPictureBuffers(
- {{2, kPictureSize, PictureBuffer::TextureIds{0},
- PictureBuffer::TextureIds{1}, tex_target, PIXEL_FORMAT_XRGB},
- {3, kPictureSize, PictureBuffer::TextureIds{2},
- PictureBuffer::TextureIds{3}, tex_target, PIXEL_FORMAT_XRGB}});
- EXPECT_EQ(kPictureBuffers.size(), kNumPictures);
+// Verifies a single fast frame decoding..
+TEST_P(VaapiVideoDecodeAcceleratorTest, DecodeOneFrame) {
+ DecodeOneFrameFast(kBitstreamId);
- EXPECT_CALL(*mock_vaapi_wrapper_,
- CreateSurfaces(_, kPictureSize, kNumPictures, _))
- .WillOnce(DoAll(
- WithArgs<3>(Invoke([](std::vector<VASurfaceID>* va_surface_ids) {
- va_surface_ids->resize(kNumPictures);
- })),
- Return(true)));
- EXPECT_CALL(*mock_vaapi_picture_factory_,
- MockCreateVaapiPicture(mock_vaapi_wrapper_.get(), kPictureSize))
- .Times(2);
+ ResetSequence();
+}
- EXPECT_CALL(*mock_decoder_, Decode())
- .WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
- EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(kBitstreamId))
- .WillOnce(RunClosure(quit_closure));
+// Tests usual startup sequence: a BitstreamBuffer is enqueued for decode;
+// |vda_| asks for PictureBuffers, that we provide via AssignPictureBuffers().
+TEST_P(VaapiVideoDecodeAcceleratorTest,
+ QueueInputBuffersAndAssignPictureBuffers) {
+ QueueInputBufferSequence(kNumPictures, kPictureSize, kBitstreamId);
- AssignPictureBuffers(kPictureBuffers);
- run_loop.Run();
- }
+ AssignPictureBuffersSequence(kNumPictures, kPictureSize, kBitstreamId);
ResetSequence();
}
-// Verifies that Decode() replying kRanOutOfStreamData (to signal it's finished)
-// rolls to a NotifyEndOfBitstreamBuffer().
-TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndDecodeFinished) {
- base::SharedMemoryHandle handle;
- handle = base::SharedMemory::DuplicateHandle(in_shm_->handle());
- BitstreamBuffer bitstream_buffer(kBitstreamId, handle, kInputSize);
+// Tests a typical resolution change sequence: a BitstreamBuffer is enqueued;
+// |vda_| asks for PictureBuffers, we them provide via AssignPictureBuffers().
+// We then try to enqueue a few BitstreamBuffers of a different resolution: we
+// then expect the old ones to be dismissed and new ones provided.This sequence
+// is purely ingress-wise, i.e. there's no decoded output checks.
+TEST_P(VaapiVideoDecodeAcceleratorTest,
+ QueueInputBuffersAndAssignPictureBuffersAndReallocate) {
+ QueueInputBufferSequence(kNumPictures, kPictureSize, kBitstreamId);
- {
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
- EXPECT_CALL(*mock_decoder_, SetStream(_, _, kInputSize));
- EXPECT_CALL(*mock_decoder_, Decode())
- .WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
- EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(kBitstreamId))
- .WillOnce(RunClosure(quit_closure));
+ AssignPictureBuffersSequence(kNumPictures, kPictureSize, kBitstreamId);
- QueueInputBuffer(bitstream_buffer);
- run_loop.Run();
- }
+ // Decode a few frames. This step is not necessary.
+ for (int i = 0; i < 5; ++i)
+ DecodeOneFrameFast(kBitstreamId + i);
- ResetSequence();
-}
+ QueueInputBufferSequence(kNewNumPictures, kNewPictureSize, kBitstreamId,
+ true /* expect_dismiss_picture_buffers */,
+ kNumPictures /* num_picture_buffers_to_dismiss */);
-// Verify that it is possible to select DRM(egl) and TFP(glx) at runtime.
-TEST_P(VaapiVideoDecodeAcceleratorTest, SupportedPlatforms) {
- EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationNone,
- mock_vaapi_picture_factory_->GetVaapiImplementation(
- gl::kGLImplementationNone));
- EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationDrm,
- mock_vaapi_picture_factory_->GetVaapiImplementation(
- gl::kGLImplementationEGLGLES2));
+ AssignPictureBuffersSequence(kNewNumPictures, kNewPictureSize, kBitstreamId);
-#if defined(USE_X11)
- EXPECT_EQ(VaapiPictureFactory::kVaapiImplementationX11,
- mock_vaapi_picture_factory_->GetVaapiImplementation(
- gl::kGLImplementationDesktopGL));
-#endif
+ ResetSequence();
}
INSTANTIATE_TEST_CASE_P(/* No prefix. */,
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index 57bb2fcfdff..a1ebebe4012 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -6,22 +6,31 @@
#include <string.h>
+#include <algorithm>
#include <memory>
#include <utility>
#include <va/va.h>
+#include <va/va_enc_h264.h>
+#include <va/va_enc_vp8.h>
#include "base/bind.h"
#include "base/callback.h"
#include "base/macros.h"
+#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/safe_conversions.h"
#include "base/single_thread_task_runner.h"
+#include "base/stl_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
#include "media/gpu/h264_dpb.h"
#include "media/gpu/shared_memory_region.h"
+#include "media/gpu/vaapi/h264_encoder.h"
+#include "media/gpu/vaapi/vaapi_common.h"
+#include "media/gpu/vaapi/vp8_encoder.h"
+#include "media/gpu/vp8_reference_frame_vector.h"
#define VLOGF(level) VLOG(level) << __func__ << "(): "
#define DVLOGF(level) DVLOG(level) << __func__ << "(): "
@@ -37,69 +46,77 @@
namespace media {
namespace {
+// Minimum number of frames in flight for pipeline depth, adjust to this number
+// if encoder requests less.
+constexpr size_t kMinNumFramesInFlight = 4;
+
// Need 2 surfaces for each frame: one for input data and one for
// reconstructed picture, which is later used for reference.
-const size_t kMinSurfacesToEncode = 2;
+constexpr size_t kNumSurfacesPerFrame = 2;
+// TODO(owenlin): Adjust the value after b/71367113 is fixed
+constexpr size_t kExtraOutputBufferSizeInBytes = 32768;
-// Subjectively chosen.
-const size_t kNumInputBuffers = 4;
-const size_t kMaxNumReferenceFrames = 4;
+constexpr int kDefaultFramerate = 30;
-// TODO(owenlin): Adjust the value after b/71367113 is fixed
-const size_t kExtraOutputBufferSize = 32768; // bytes
-
-// We need up to kMaxNumReferenceFrames surfaces for reference, plus one
-// for input and one for encode (which will be added to the set of reference
-// frames for subsequent frames). Actual execution of HW encode is done
-// in parallel, and we want to process more frames in the meantime.
-// To have kNumInputBuffers in flight, we need a full set of reference +
-// encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and
-// (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames
-// in flight.
-const size_t kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode +
- kMinSurfacesToEncode * (kNumInputBuffers - 1);
-
-// An IDR every 2048 frames, an I frame every 256 and no B frames.
-// We choose IDR period to equal MaxFrameNum so it must be a power of 2.
-const int kIDRPeriod = 2048;
-const int kIPeriod = 256;
-const int kIPPeriod = 1;
-
-const int kDefaultFramerate = 30;
-
-// HRD parameters (ch. E.2.2 in spec).
-const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters.
-const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters.
-
-const int kDefaultQP = 26;
-// All Intel codecs can do at least 4.1.
-const int kDefaultLevelIDC = 41;
-const int kChromaFormatIDC = 1; // 4:2:0
-
-// Arbitrarily chosen bitrate window size for rate control, in ms.
-const int kCPBWindowSizeMs = 1500;
+// Percentage of bitrate set to be targeted by the HW encoder.
+constexpr unsigned int kTargetBitratePercentage = 90;
// UMA errors that the VaapiVideoEncodeAccelerator class reports.
enum VAVEAEncoderFailure {
VAAPI_ERROR = 0,
VAVEA_ENCODER_FAILURES_MAX,
};
-}
-
-// Round |value| up to |alignment|, which must be a power of 2.
-static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) {
- // Check that |alignment| is a power of 2.
- DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
- return ((value + (alignment - 1)) & ~(alignment - 1));
-}
static void ReportToUMA(VAVEAEncoderFailure failure) {
UMA_HISTOGRAM_ENUMERATION("Media.VAVEA.EncoderFailure", failure,
VAVEA_ENCODER_FAILURES_MAX + 1);
}
+} // namespace
+
+// Encode job for one frame. Created when an input frame is awaiting and
+// enough resources are available to proceed. Once the job is prepared and
+// submitted to the hardware, it awaits on the |submitted_encode_jobs_| queue
+// for an output bitstream buffer to become available. Once one is ready,
+// the encoded bytes are downloaded to it, job resources are released
+// and become available for reuse.
+class VaapiEncodeJob : public AcceleratedVideoEncoder::EncodeJob {
+ public:
+ VaapiEncodeJob(scoped_refptr<VideoFrame> input_frame,
+ bool keyframe,
+ base::OnceClosure execute_cb,
+ scoped_refptr<VASurface> input_surface,
+ scoped_refptr<VASurface> reconstructed_surface,
+ VABufferID coded_buffer_id);
+
+ VaapiEncodeJob* AsVaapiEncodeJob() override { return this; }
+
+ VABufferID coded_buffer_id() const { return coded_buffer_id_; }
+ const scoped_refptr<VASurface> input_surface() const {
+ return input_surface_;
+ }
+ const scoped_refptr<VASurface> reconstructed_surface() const {
+ return reconstructed_surface_;
+ }
+
+ private:
+ ~VaapiEncodeJob() override = default;
+
+ // Input surface for video frame data.
+ const scoped_refptr<VASurface> input_surface_;
+
+ // Surface for the reconstructed picture, used for reference
+ // for subsequent frames.
+ const scoped_refptr<VASurface> reconstructed_surface_;
+
+ // Buffer that will contain the output bitstream data for this frame.
+ VABufferID coded_buffer_id_;
+
+ DISALLOW_COPY_AND_ASSIGN(VaapiEncodeJob);
+};
+
struct VaapiVideoEncodeAccelerator::InputFrameRef {
- InputFrameRef(const scoped_refptr<VideoFrame>& frame, bool force_keyframe)
+ InputFrameRef(scoped_refptr<VideoFrame> frame, bool force_keyframe)
: frame(frame), force_keyframe(force_keyframe) {}
const scoped_refptr<VideoFrame> frame;
const bool force_keyframe;
@@ -117,40 +134,64 @@ VaapiVideoEncodeAccelerator::GetSupportedProfiles() {
return VaapiWrapper::GetSupportedEncodeProfiles();
}
-static unsigned int Log2OfPowerOf2(unsigned int x) {
- CHECK_GT(x, 0u);
- DCHECK_EQ(x & (x - 1), 0u);
+class VaapiVideoEncodeAccelerator::H264Accelerator
+ : public H264Encoder::Accelerator {
+ public:
+ explicit H264Accelerator(VaapiVideoEncodeAccelerator* vea) : vea_(vea) {}
+
+ ~H264Accelerator() override = default;
+
+ // H264Encoder::Accelerator implementation.
+ scoped_refptr<H264Picture> GetPicture(
+ AcceleratedVideoEncoder::EncodeJob* job) override;
+
+ bool SubmitPackedHeaders(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ scoped_refptr<H264BitstreamBuffer> packed_sps,
+ scoped_refptr<H264BitstreamBuffer> packed_pps) override;
+
+ bool SubmitFrameParameters(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ const H264Encoder::EncodeParams& encode_params,
+ const media::H264SPS& sps,
+ const media::H264PPS& pps,
+ scoped_refptr<H264Picture> pic,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list0,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list1) override;
+
+ private:
+ VaapiVideoEncodeAccelerator* const vea_;
+};
- int log = 0;
- while (x > 1) {
- x >>= 1;
- ++log;
- }
- return log;
-}
+class VaapiVideoEncodeAccelerator::VP8Accelerator
+ : public VP8Encoder::Accelerator {
+ public:
+ explicit VP8Accelerator(VaapiVideoEncodeAccelerator* vea) : vea_(vea) {}
+
+ ~VP8Accelerator() override = default;
+
+ // VP8Encoder::Accelerator implementation.
+ scoped_refptr<VP8Picture> GetPicture(
+ AcceleratedVideoEncoder::EncodeJob* job) override;
+
+ bool SubmitFrameParameters(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ const media::VP8Encoder::EncodeParams& encode_params,
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& ref_frames) override;
+
+ private:
+ VaapiVideoEncodeAccelerator* const vea_;
+};
VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator()
- : profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
- mb_width_(0),
- mb_height_(0),
+ : codec_(kUnknownVideoCodec),
output_buffer_byte_size_(0),
state_(kUninitialized),
- frame_num_(0),
- idr_pic_id_(0),
- bitrate_(0),
- framerate_(0),
- cpb_size_(0),
- encoding_parameters_changed_(false),
encoder_thread_("VAVEAEncoderThread"),
child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
weak_this_ptr_factory_(this) {
VLOGF(2);
- weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
- max_ref_idx_l0_size_ = kMaxNumReferenceFrames;
- qp_ = kDefaultQP;
- idr_period_ = kIDRPeriod;
- i_period_ = kIPeriod;
- ip_period_ = kIPPeriod;
}
VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() {
@@ -178,6 +219,18 @@ bool VaapiVideoEncodeAccelerator::Initialize(
client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
client_ = client_ptr_factory_->GetWeakPtr();
+ codec_ = VideoCodecProfileToVideoCodec(output_profile);
+ if (codec_ != kCodecH264 && codec_ != kCodecVP8) {
+ DVLOGF(1) << "Unsupported profile: " << GetProfileName(output_profile);
+ return false;
+ }
+
+ if (format != PIXEL_FORMAT_I420) {
+ DVLOGF(1) << "Unsupported input format: "
+ << VideoPixelFormatToString(format);
+ return false;
+ }
+
const SupportedProfiles& profiles = GetSupportedProfiles();
auto profile = find_if(profiles.begin(), profiles.end(),
[output_profile](const SupportedProfile& profile) {
@@ -187,6 +240,7 @@ bool VaapiVideoEncodeAccelerator::Initialize(
VLOGF(1) << "Unsupported output profile " << GetProfileName(output_profile);
return false;
}
+
if (input_visible_size.width() > profile->max_resolution.width() ||
input_visible_size.height() > profile->max_resolution.height()) {
VLOGF(1) << "Input size too big: " << input_visible_size.ToString()
@@ -194,29 +248,10 @@ bool VaapiVideoEncodeAccelerator::Initialize(
return false;
}
- if (format != PIXEL_FORMAT_I420) {
- VLOGF(1) << "Unsupported input format: "
- << VideoPixelFormatToString(format);
- return false;
- }
-
- profile_ = output_profile;
- visible_size_ = input_visible_size;
- // 4:2:0 format has to be 2-aligned.
- DCHECK_EQ(visible_size_.width() % 2, 0);
- DCHECK_EQ(visible_size_.height() % 2, 0);
- coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16),
- RoundUpToPowerOf2(visible_size_.height(), 16));
- mb_width_ = coded_size_.width() / 16;
- mb_height_ = coded_size_.height() / 16;
- output_buffer_byte_size_ = coded_size_.GetArea() + kExtraOutputBufferSize;
-
- UpdateRates(initial_bitrate, kDefaultFramerate);
-
vaapi_wrapper_ =
VaapiWrapper::CreateForVideoCodec(VaapiWrapper::kEncode, output_profile,
base::Bind(&ReportToUMA, VAAPI_ERROR));
- if (!vaapi_wrapper_.get()) {
+ if (!vaapi_wrapper_) {
VLOGF(1) << "Failed initializing VAAPI for profile "
<< GetProfileName(output_profile);
return false;
@@ -226,42 +261,74 @@ bool VaapiVideoEncodeAccelerator::Initialize(
VLOGF(1) << "Failed to start encoder thread";
return false;
}
+
encoder_thread_task_runner_ = encoder_thread_.task_runner();
- // Finish the remaining initialization on the encoder thread.
+ // Finish remaining initialization on the encoder thread.
encoder_thread_task_runner_->PostTask(
FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask,
- base::Unretained(this)));
-
+ base::Unretained(this), input_visible_size,
+ output_profile, initial_bitrate));
return true;
}
-void VaapiVideoEncodeAccelerator::InitializeTask() {
+void VaapiVideoEncodeAccelerator::InitializeTask(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t bitrate) {
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kUninitialized);
VLOGF(2);
+ weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
+
+ switch (codec_) {
+ case kCodecH264:
+ encoder_ = std::make_unique<H264Encoder>(
+ std::make_unique<H264Accelerator>(this));
+ break;
+
+ case kCodecVP8:
+ encoder_ =
+ std::make_unique<VP8Encoder>(std::make_unique<VP8Accelerator>(this));
+ break;
+
+ default:
+ NOTREACHED() << "Unsupported codec type " << GetCodecName(codec_);
+ return;
+ }
+
+ if (!encoder_->Initialize(visible_size, profile, bitrate,
+ kDefaultFramerate)) {
+ NOTIFY_ERROR(kInvalidArgumentError, "Failed initializing encoder");
+ return;
+ }
+
+ coded_size_ = encoder_->GetCodedSize();
+ output_buffer_byte_size_ =
+ encoder_->GetBitstreamBufferSize() + kExtraOutputBufferSizeInBytes;
+ const size_t max_ref_frames = encoder_->GetMaxNumOfRefFrames();
+ // Use at least kMinNumFramesInFlight if encoder requested less for
+ // pipeline depth.
+ const size_t num_frames_in_flight =
+ std::max(kMinNumFramesInFlight, max_ref_frames);
+ const size_t num_surfaces = (num_frames_in_flight + 1) * kNumSurfacesPerFrame;
+ DVLOGF(1) << "Frames in flight: " << num_frames_in_flight;
+
va_surface_release_cb_ = BindToCurrentLoop(
base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID,
base::Unretained(this)));
if (!vaapi_wrapper_->CreateSurfaces(VA_RT_FORMAT_YUV420, coded_size_,
- kNumSurfaces,
+ num_surfaces,
&available_va_surface_ids_)) {
NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces");
return;
}
- UpdateSPS();
- GeneratePackedSPS();
-
- UpdatePPS();
- GeneratePackedPPS();
-
child_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers,
- coded_size_, output_buffer_byte_size_));
+ FROM_HERE, base::BindOnce(&Client::RequireBitstreamBuffers, client_,
+ num_frames_in_flight, coded_size_,
+ output_buffer_byte_size_));
SetState(kEncoding);
}
@@ -272,264 +339,50 @@ void VaapiVideoEncodeAccelerator::RecycleVASurfaceID(
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
available_va_surface_ids_.push_back(va_surface_id);
- EncodeFrameTask();
+ EncodePendingInputs();
}
-void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) {
- current_pic_ = new H264Picture();
-
- // If the current picture is an IDR picture, frame_num shall be equal to 0.
- if (force_keyframe)
- frame_num_ = 0;
-
- current_pic_->frame_num = frame_num_++;
- frame_num_ %= idr_period_;
-
- if (current_pic_->frame_num == 0) {
- current_pic_->idr = true;
- // H264 spec mandates idr_pic_id to differ between two consecutive IDRs.
- idr_pic_id_ ^= 1;
- ref_pic_list0_.clear();
- }
-
- if (current_pic_->frame_num % i_period_ == 0)
- current_pic_->type = H264SliceHeader::kISlice;
- else
- current_pic_->type = H264SliceHeader::kPSlice;
-
- if (current_pic_->type != H264SliceHeader::kBSlice)
- current_pic_->ref = true;
-
- current_pic_->pic_order_cnt = current_pic_->frame_num * 2;
- current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt;
- current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt;
-
- current_encode_job_->keyframe = current_pic_->idr;
-
- DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type
- << (force_keyframe ? " (forced keyframe)" : "")
- << " frame_num: " << current_pic_->frame_num
- << " POC: " << current_pic_->pic_order_cnt;
+void VaapiVideoEncodeAccelerator::ExecuteEncode(VASurfaceID va_surface_id) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ if (!vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(va_surface_id))
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to execute encode");
}
-void VaapiVideoEncodeAccelerator::EndFrame() {
- DCHECK(current_pic_);
- // Store the picture on the list of reference pictures and keep the list
- // below maximum size, dropping oldest references.
- if (current_pic_->ref)
- ref_pic_list0_.push_front(current_encode_job_->recon_surface);
- size_t max_num_ref_frames =
- base::checked_cast<size_t>(current_sps_.max_num_ref_frames);
- while (ref_pic_list0_.size() > max_num_ref_frames)
- ref_pic_list0_.pop_back();
-
- submitted_encode_jobs_.push(std::move(current_encode_job_));
+void VaapiVideoEncodeAccelerator::UploadFrame(scoped_refptr<VideoFrame> frame,
+ VASurfaceID va_surface_id) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ if (!vaapi_wrapper_->UploadVideoFrameToSurface(frame, va_surface_id))
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to upload frame");
}
-static void InitVAPicture(VAPictureH264* va_pic) {
- memset(va_pic, 0, sizeof(*va_pic));
- va_pic->picture_id = VA_INVALID_ID;
- va_pic->flags = VA_PICTURE_H264_INVALID;
+void VaapiVideoEncodeAccelerator::SubmitBuffer(
+ VABufferType type,
+ scoped_refptr<base::RefCountedBytes> buffer) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ if (!vaapi_wrapper_->SubmitBuffer(type, buffer->size(), buffer->front()))
+ NOTIFY_ERROR(kPlatformFailureError, "Failed submitting a buffer");
}
-bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() {
- DCHECK(current_pic_);
- VAEncSequenceParameterBufferH264 seq_param;
- memset(&seq_param, 0, sizeof(seq_param));
-
-#define SPS_TO_SP(a) seq_param.a = current_sps_.a;
- SPS_TO_SP(seq_parameter_set_id);
- SPS_TO_SP(level_idc);
-
- seq_param.intra_period = i_period_;
- seq_param.intra_idr_period = idr_period_;
- seq_param.ip_period = ip_period_;
- seq_param.bits_per_second = bitrate_;
-
- SPS_TO_SP(max_num_ref_frames);
- seq_param.picture_width_in_mbs = mb_width_;
- seq_param.picture_height_in_mbs = mb_height_;
-
-#define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a;
- SPS_TO_SP_FS(chroma_format_idc);
- SPS_TO_SP_FS(frame_mbs_only_flag);
- SPS_TO_SP_FS(log2_max_frame_num_minus4);
- SPS_TO_SP_FS(pic_order_cnt_type);
- SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4);
-#undef SPS_TO_SP_FS
-
- SPS_TO_SP(bit_depth_luma_minus8);
- SPS_TO_SP(bit_depth_chroma_minus8);
-
- SPS_TO_SP(frame_cropping_flag);
- if (current_sps_.frame_cropping_flag) {
- SPS_TO_SP(frame_crop_left_offset);
- SPS_TO_SP(frame_crop_right_offset);
- SPS_TO_SP(frame_crop_top_offset);
- SPS_TO_SP(frame_crop_bottom_offset);
- }
-
- SPS_TO_SP(vui_parameters_present_flag);
-#define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a;
- SPS_TO_SP_VF(timing_info_present_flag);
-#undef SPS_TO_SP_VF
- SPS_TO_SP(num_units_in_tick);
- SPS_TO_SP(time_scale);
-#undef SPS_TO_SP
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType,
- sizeof(seq_param), &seq_param))
- return false;
-
- VAEncPictureParameterBufferH264 pic_param;
- memset(&pic_param, 0, sizeof(pic_param));
-
- pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id();
- pic_param.CurrPic.TopFieldOrderCnt = current_pic_->top_field_order_cnt;
- pic_param.CurrPic.BottomFieldOrderCnt = current_pic_->bottom_field_order_cnt;
- pic_param.CurrPic.flags = 0;
-
- for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i)
- InitVAPicture(&pic_param.ReferenceFrames[i]);
-
- DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames));
- RefPicList::const_iterator iter = ref_pic_list0_.begin();
- for (size_t i = 0;
- i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end();
- ++iter, ++i) {
- pic_param.ReferenceFrames[i].picture_id = (*iter)->id();
- pic_param.ReferenceFrames[i].flags = 0;
- }
-
- pic_param.coded_buf = current_encode_job_->coded_buffer;
- pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
- pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id;
- pic_param.frame_num = current_pic_->frame_num;
- pic_param.pic_init_qp = qp_;
- pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1;
- pic_param.pic_fields.bits.idr_pic_flag = current_pic_->idr;
- pic_param.pic_fields.bits.reference_pic_flag = current_pic_->ref;
-#define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a;
- PPS_TO_PP_PF(entropy_coding_mode_flag);
- PPS_TO_PP_PF(transform_8x8_mode_flag);
- PPS_TO_PP_PF(deblocking_filter_control_present_flag);
-#undef PPS_TO_PP_PF
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType,
- sizeof(pic_param), &pic_param))
- return false;
-
- VAEncSliceParameterBufferH264 slice_param;
- memset(&slice_param, 0, sizeof(slice_param));
-
- slice_param.num_macroblocks = mb_width_ * mb_height_;
- slice_param.macroblock_info = VA_INVALID_ID;
- slice_param.slice_type = current_pic_->type;
- slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
- slice_param.idr_pic_id = idr_pic_id_;
- slice_param.pic_order_cnt_lsb = current_pic_->pic_order_cnt_lsb;
- slice_param.num_ref_idx_active_override_flag = true;
-
- for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i)
- InitVAPicture(&slice_param.RefPicList0[i]);
-
- for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i)
- InitVAPicture(&slice_param.RefPicList1[i]);
-
- DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0));
- iter = ref_pic_list0_.begin();
- for (size_t i = 0;
- i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end();
- ++iter, ++i) {
- InitVAPicture(&slice_param.RefPicList0[i]);
- slice_param.RefPicList0[i].picture_id = (*iter)->id();
- slice_param.RefPicList0[i].flags = 0;
+void VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer(
+ VAEncMiscParameterType type,
+ scoped_refptr<base::RefCountedBytes> buffer) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(type, buffer->size(),
+ buffer->front())) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed submitting a parameter buffer");
}
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType,
- sizeof(slice_param), &slice_param))
- return false;
-
- VAEncMiscParameterRateControl rate_control_param;
- memset(&rate_control_param, 0, sizeof(rate_control_param));
- rate_control_param.bits_per_second = bitrate_;
- rate_control_param.target_percentage = 90;
- rate_control_param.window_size = kCPBWindowSizeMs;
- rate_control_param.initial_qp = qp_;
- rate_control_param.rc_flags.bits.disable_frame_skip = true;
-
- if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
- VAEncMiscParameterTypeRateControl, sizeof(rate_control_param),
- &rate_control_param))
- return false;
-
- VAEncMiscParameterFrameRate framerate_param;
- memset(&framerate_param, 0, sizeof(framerate_param));
- framerate_param.framerate = framerate_;
- if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
- VAEncMiscParameterTypeFrameRate, sizeof(framerate_param),
- &framerate_param))
- return false;
-
- VAEncMiscParameterHRD hrd_param;
- memset(&hrd_param, 0, sizeof(hrd_param));
- hrd_param.buffer_size = cpb_size_;
- hrd_param.initial_buffer_fullness = cpb_size_ / 2;
- if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
- VAEncMiscParameterTypeHRD, sizeof(hrd_param), &hrd_param))
- return false;
-
- return true;
}
-bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() {
- DCHECK(current_pic_);
- if (current_pic_->type != H264SliceHeader::kISlice)
- return true;
-
- // Submit SPS.
- VAEncPackedHeaderParameterBuffer par_buffer;
- memset(&par_buffer, 0, sizeof(par_buffer));
- par_buffer.type = VAEncPackedHeaderSequence;
- par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8;
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
- sizeof(par_buffer), &par_buffer))
- return false;
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
- packed_sps_.BytesInBuffer(),
- packed_sps_.data()))
- return false;
-
- // Submit PPS.
- memset(&par_buffer, 0, sizeof(par_buffer));
- par_buffer.type = VAEncPackedHeaderPicture;
- par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8;
-
- if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
- sizeof(par_buffer), &par_buffer))
- return false;
-
+void VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer(
+ scoped_refptr<H264BitstreamBuffer> buffer) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ // TODO(crbug.com/844303): use vaMapBuffer in VaapiWrapper::SubmitBuffer()
+ // instead to avoid this.
+ void* non_const_ptr = const_cast<uint8_t*>(buffer->data());
if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
- packed_pps_.BytesInBuffer(),
- packed_pps_.data()))
- return false;
-
- return true;
-}
-
-bool VaapiVideoEncodeAccelerator::ExecuteEncode() {
- DCHECK(current_pic_);
- DVLOGF(4) << "Encoding frame_num: " << current_pic_->frame_num;
- return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
- current_encode_job_->input_surface->id());
-}
-
-bool VaapiVideoEncodeAccelerator::UploadFrame(
- const scoped_refptr<VideoFrame>& frame) {
- return vaapi_wrapper_->UploadVideoFrameToSurface(
- frame, current_encode_job_->input_surface->id());
+ buffer->BytesInBuffer(), non_const_ptr)) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed submitting a bitstream buffer");
+ }
}
void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
@@ -538,45 +391,51 @@ void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
if (state_ != kEncoding)
return;
- while (!submitted_encode_jobs_.empty()) {
+ while (!submitted_encode_jobs_.empty() &&
+ submitted_encode_jobs_.front() == nullptr) {
// A null job indicates a flush command.
- if (submitted_encode_jobs_.front() == nullptr) {
- submitted_encode_jobs_.pop();
- DVLOGF(2) << "FlushDone";
- DCHECK(flush_callback_);
- child_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
- continue;
- }
+ submitted_encode_jobs_.pop();
+ DVLOGF(2) << "FlushDone";
+ DCHECK(flush_callback_);
+ child_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
+ }
- if (available_bitstream_buffers_.empty())
- break;
+ if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty())
+ return;
- const auto encode_job = std::move(submitted_encode_jobs_.front());
- submitted_encode_jobs_.pop();
- const auto buffer = std::move(available_bitstream_buffers_.front());
- available_bitstream_buffers_.pop();
+ auto buffer = std::move(available_bitstream_buffers_.front());
+ available_bitstream_buffers_.pop();
+ auto encode_job = submitted_encode_jobs_.front();
+ submitted_encode_jobs_.pop();
- uint8_t* target_data = reinterpret_cast<uint8_t*>(buffer->shm->memory());
+ ReturnBitstreamBuffer(encode_job, std::move(buffer));
+}
- size_t data_size = 0;
- if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer(
- encode_job->coded_buffer, encode_job->input_surface->id(),
- target_data, buffer->shm->size(), &data_size)) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
- return;
- }
+void VaapiVideoEncodeAccelerator::ReturnBitstreamBuffer(
+ scoped_refptr<VaapiEncodeJob> encode_job,
+ std::unique_ptr<BitstreamBufferRef> buffer) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- DVLOGF(4) << "Returning bitstream buffer "
- << (encode_job->keyframe ? "(keyframe)" : "")
- << " id: " << buffer->id << " size: " << data_size;
+ uint8_t* target_data = reinterpret_cast<uint8_t*>(buffer->shm->memory());
+ size_t data_size = 0;
- child_task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&Client::BitstreamBufferReady, client_, buffer->id,
- data_size, encode_job->keyframe, encode_job->timestamp));
- break;
+ if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer(
+ encode_job->coded_buffer_id(), encode_job->input_surface()->id(),
+ target_data, buffer->shm->size(), &data_size)) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
+ return;
}
+
+ DVLOGF(4) << "Returning bitstream buffer "
+ << (encode_job->IsKeyframeRequested() ? "(keyframe)" : "")
+ << " id: " << buffer->id << " size: " << data_size;
+
+ child_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&Client::BitstreamBufferReady, client_, buffer->id,
+ data_size, encode_job->IsKeyframeRequested(),
+ encode_job->timestamp()));
}
void VaapiVideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame,
@@ -590,99 +449,87 @@ void VaapiVideoEncodeAccelerator::Encode(const scoped_refptr<VideoFrame>& frame,
base::Unretained(this), frame, force_keyframe));
}
-bool VaapiVideoEncodeAccelerator::PrepareNextJob(base::TimeDelta timestamp) {
- if (available_va_surface_ids_.size() < kMinSurfacesToEncode)
- return false;
+void VaapiVideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
+ bool force_keyframe) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ DCHECK_NE(state_, kUninitialized);
- DCHECK(!current_encode_job_);
- current_encode_job_.reset(new EncodeJob());
+ input_queue_.push(std::make_unique<InputFrameRef>(frame, force_keyframe));
+ EncodePendingInputs();
+}
+
+scoped_refptr<VaapiEncodeJob> VaapiVideoEncodeAccelerator::CreateEncodeJob(
+ scoped_refptr<VideoFrame> frame,
+ bool force_keyframe) {
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ if (available_va_surface_ids_.size() < kNumSurfacesPerFrame) {
+ DVLOGF(4) << "Not enough surfaces available";
+ return nullptr;
+ }
+ VABufferID coded_buffer_id;
if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_,
- &current_encode_job_->coded_buffer)) {
+ &coded_buffer_id)) {
NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
- return false;
+ return nullptr;
}
- current_encode_job_->timestamp = timestamp;
-
- current_encode_job_->input_surface = new VASurface(
+ static_assert(kNumSurfacesPerFrame == 2, "kNumSurfacesPerFrame must be 2");
+ scoped_refptr<VASurface> input_surface = new VASurface(
available_va_surface_ids_.back(), coded_size_,
vaapi_wrapper_->va_surface_format(), va_surface_release_cb_);
available_va_surface_ids_.pop_back();
- current_encode_job_->recon_surface = new VASurface(
+ scoped_refptr<VASurface> reconstructed_surface = new VASurface(
available_va_surface_ids_.back(), coded_size_,
vaapi_wrapper_->va_surface_format(), va_surface_release_cb_);
available_va_surface_ids_.pop_back();
- // Reference surfaces are needed until the job is done, but they get
- // removed from ref_pic_list0_ when it's full at the end of job submission.
- // Keep refs to them along with the job and only release after sync.
- current_encode_job_->reference_surfaces = ref_pic_list0_;
+ auto job = base::MakeRefCounted<VaapiEncodeJob>(
+ frame, force_keyframe,
+ base::BindOnce(&VaapiVideoEncodeAccelerator::ExecuteEncode,
+ base::Unretained(this), input_surface->id()),
+ input_surface, reconstructed_surface, coded_buffer_id);
- return true;
-}
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::UploadFrame,
+ base::Unretained(this), frame, input_surface->id()));
-void VaapiVideoEncodeAccelerator::EncodeTask(
- const scoped_refptr<VideoFrame>& frame,
- bool force_keyframe) {
- DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK_NE(state_, kUninitialized);
-
- encoder_input_queue_.push(
- std::make_unique<InputFrameRef>(frame, force_keyframe));
- EncodeFrameTask();
+ return job;
}
-void VaapiVideoEncodeAccelerator::EncodeFrameTask() {
+void VaapiVideoEncodeAccelerator::EncodePendingInputs() {
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+ DVLOGF(4);
+
+ while (state_ == kEncoding && !input_queue_.empty()) {
+ const std::unique_ptr<InputFrameRef>& input_frame = input_queue_.front();
+
+ // If this is a flush (null) frame, don't create/submit a new encode job for
+ // it, but forward a null job to the submitted_encode_jobs_ queue.
+ scoped_refptr<VaapiEncodeJob> job;
+ if (input_frame) {
+ job = CreateEncodeJob(input_frame->frame, input_frame->force_keyframe);
+ if (!job)
+ return;
+ }
- if (state_ != kEncoding || encoder_input_queue_.empty())
- return;
-
- // Pass the nullptr to the next queue |submitted_encode_jobs_|.
- if (encoder_input_queue_.front() == nullptr) {
- encoder_input_queue_.pop();
- submitted_encode_jobs_.push(nullptr);
- TryToReturnBitstreamBuffer();
- return;
- }
-
- if (!PrepareNextJob(encoder_input_queue_.front()->frame->timestamp())) {
- DVLOGF(4) << "Not ready for next frame yet";
- return;
- }
-
- const auto frame_ref = std::move(encoder_input_queue_.front());
- encoder_input_queue_.pop();
-
- TRACE_EVENT0("media,gpu", "VAVEA::EncodeFrameTask");
-
- if (!UploadFrame(frame_ref->frame)) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW.");
- return;
- }
-
- BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_);
- encoding_parameters_changed_ = false;
+ input_queue_.pop();
- if (!SubmitFrameParameters()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters.");
- return;
- }
+ if (job) {
+ if (!encoder_->PrepareEncodeJob(job.get())) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed preparing an encode job.");
+ return;
+ }
- if (!SubmitHeadersIfNeeded()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers.");
- return;
- }
+ TRACE_EVENT0("media,gpu", "VAVEA: Execute");
+ job->Execute();
+ }
- if (!ExecuteEncode()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW.");
- return;
+ submitted_encode_jobs_.push(job);
+ TryToReturnBitstreamBuffer();
}
-
- EndFrame();
- TryToReturnBitstreamBuffer();
}
void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
@@ -695,15 +542,8 @@ void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
return;
}
- std::unique_ptr<SharedMemoryRegion> shm(
- new SharedMemoryRegion(buffer, false));
- if (!shm->Map()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
- return;
- }
-
- std::unique_ptr<BitstreamBufferRef> buffer_ref(
- new BitstreamBufferRef(buffer.id(), std::move(shm)));
+ auto buffer_ref = std::make_unique<BitstreamBufferRef>(
+ buffer.id(), std::make_unique<SharedMemoryRegion>(buffer, false));
encoder_thread_task_runner_->PostTask(
FROM_HERE,
@@ -716,6 +556,11 @@ void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, kUninitialized);
+ if (!buffer_ref->shm->Map()) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
+ return;
+ }
+
available_bitstream_buffers_.push(std::move(buffer_ref));
TryToReturnBitstreamBuffer();
}
@@ -733,17 +578,6 @@ void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
base::Unretained(this), bitrate, framerate));
}
-void VaapiVideoEncodeAccelerator::UpdateRates(uint32_t bitrate,
- uint32_t framerate) {
- if (encoder_thread_.IsRunning())
- DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK_NE(bitrate, 0u);
- DCHECK_NE(framerate, 0u);
- bitrate_ = bitrate;
- framerate_ = framerate;
- cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000;
-}
-
void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
uint32_t bitrate,
uint32_t framerate) {
@@ -751,26 +585,8 @@ void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, kUninitialized);
- // This is a workaround to zero being temporarily, as part of the initial
- // setup, provided by the webrtc video encode and a zero bitrate and
- // framerate not being accepted by VAAPI
- // TODO: This code is common with v4l2_video_encode_accelerator.cc, perhaps
- // it could be pulled up to RTCVideoEncoder
- if (bitrate < 1)
- bitrate = 1;
- if (framerate < 1)
- framerate = 1;
-
- if (bitrate_ == bitrate && framerate_ == framerate)
- return;
-
- UpdateRates(bitrate, framerate);
-
- UpdateSPS();
- GeneratePackedSPS();
-
- // Submit new parameters along with next frame that will be processed.
- encoding_parameters_changed_ = true;
+ if (!encoder_->UpdateRates(bitrate, framerate))
+ VLOGF(1) << "Failed to update rates to " << bitrate << " " << framerate;
}
void VaapiVideoEncodeAccelerator::Flush(FlushCallback flush_callback) {
@@ -791,18 +607,14 @@ void VaapiVideoEncodeAccelerator::FlushTask() {
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
// Insert an null job to indicate a flush command.
- encoder_input_queue_.push(std::unique_ptr<InputFrameRef>(nullptr));
- EncodeFrameTask();
+ input_queue_.push(std::unique_ptr<InputFrameRef>(nullptr));
+ EncodePendingInputs();
}
void VaapiVideoEncodeAccelerator::Destroy() {
+ DVLOGF(2);
DCHECK(child_task_runner_->BelongsToCurrentThread());
- // Can't call client anymore after Destroy() returns.
- client_ptr_factory_.reset();
- weak_this_ptr_factory_.InvalidateWeakPtrs();
-
- // Early-exit encoder tasks if they are running and join the thread.
if (encoder_thread_.IsRunning()) {
encoder_thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask,
@@ -813,301 +625,460 @@ void VaapiVideoEncodeAccelerator::Destroy() {
if (flush_callback_)
std::move(flush_callback_).Run(false);
+ weak_this_ptr_factory_.InvalidateWeakPtrs();
delete this;
}
void VaapiVideoEncodeAccelerator::DestroyTask() {
VLOGF(2);
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- SetState(kError);
+
+ // Clean up members that are to be accessed on the encoder thread only.
+ available_va_surface_ids_.clear();
+ available_va_buffer_ids_.clear();
+
+ while (!available_bitstream_buffers_.empty())
+ available_bitstream_buffers_.pop();
+
+ while (!input_queue_.empty())
+ input_queue_.pop();
+
+ while (!submitted_encode_jobs_.empty())
+ submitted_encode_jobs_.pop();
+
+ encoder_ = nullptr;
}
-void VaapiVideoEncodeAccelerator::UpdateSPS() {
- memset(&current_sps_, 0, sizeof(H264SPS));
-
- // Spec A.2 and A.3.
- switch (profile_) {
- case H264PROFILE_BASELINE:
- // Due to https://crbug.com/345569, we don't distinguish between
- // constrained and non-constrained baseline profiles. Since many codecs
- // can't do non-constrained, and constrained is usually what we mean (and
- // it's a subset of non-constrained), default to it.
- current_sps_.profile_idc = H264SPS::kProfileIDCBaseline;
- current_sps_.constraint_set0_flag = true;
- break;
- case H264PROFILE_MAIN:
- current_sps_.profile_idc = H264SPS::kProfileIDCMain;
- current_sps_.constraint_set1_flag = true;
- break;
- case H264PROFILE_HIGH:
- current_sps_.profile_idc = H264SPS::kProfileIDCHigh;
- break;
- default:
- NOTIMPLEMENTED();
- return;
+void VaapiVideoEncodeAccelerator::SetState(State state) {
+ // Only touch state on encoder thread, unless it's not running.
+ if (encoder_thread_.IsRunning() &&
+ !encoder_thread_task_runner_->BelongsToCurrentThread()) {
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::SetState,
+ base::Unretained(this), state));
+ return;
+ }
+
+ VLOGF(2) << "setting state to: " << state;
+ state_ = state;
+}
+
+void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
+ if (!child_task_runner_->BelongsToCurrentThread()) {
+ child_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VaapiVideoEncodeAccelerator::NotifyError,
+ weak_this_, error));
+ return;
}
- current_sps_.level_idc = kDefaultLevelIDC;
- current_sps_.seq_parameter_set_id = 0;
- current_sps_.chroma_format_idc = kChromaFormatIDC;
-
- DCHECK_GE(idr_period_, 1u << 4);
- current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4;
- current_sps_.pic_order_cnt_type = 0;
- current_sps_.log2_max_pic_order_cnt_lsb_minus4 =
- Log2OfPowerOf2(idr_period_ * 2) - 4;
- current_sps_.max_num_ref_frames = max_ref_idx_l0_size_;
-
- current_sps_.frame_mbs_only_flag = true;
-
- DCHECK_GT(mb_width_, 0u);
- DCHECK_GT(mb_height_, 0u);
- current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1;
- DCHECK(current_sps_.frame_mbs_only_flag);
- current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1;
-
- if (visible_size_ != coded_size_) {
- // Visible size differs from coded size, fill crop information.
- current_sps_.frame_cropping_flag = true;
- DCHECK(!current_sps_.separate_colour_plane_flag);
- // Spec table 6-1. Only 4:2:0 for now.
- DCHECK_EQ(current_sps_.chroma_format_idc, 1);
- // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0.
- const unsigned int crop_unit_x = 2;
- const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag);
- current_sps_.frame_crop_left_offset = 0;
- current_sps_.frame_crop_right_offset =
- (coded_size_.width() - visible_size_.width()) / crop_unit_x;
- current_sps_.frame_crop_top_offset = 0;
- current_sps_.frame_crop_bottom_offset =
- (coded_size_.height() - visible_size_.height()) / crop_unit_y;
+ if (client_) {
+ client_->NotifyError(error);
+ client_ptr_factory_.reset();
}
+}
+
+VaapiEncodeJob::VaapiEncodeJob(scoped_refptr<VideoFrame> input_frame,
+ bool keyframe,
+ base::OnceClosure execute_cb,
+ scoped_refptr<VASurface> input_surface,
+ scoped_refptr<VASurface> reconstructed_surface,
+ VABufferID coded_buffer_id)
+ : EncodeJob(input_frame, keyframe, std::move(execute_cb)),
+ input_surface_(input_surface),
+ reconstructed_surface_(reconstructed_surface),
+ coded_buffer_id_(coded_buffer_id) {
+ DCHECK(input_surface_);
+ DCHECK(reconstructed_surface_);
+ DCHECK_NE(coded_buffer_id_, VA_INVALID_ID);
+}
+
+static void InitVAPictureH264(VAPictureH264* va_pic) {
+ *va_pic = {};
+ va_pic->picture_id = VA_INVALID_ID;
+ va_pic->flags = VA_PICTURE_H264_INVALID;
+}
- current_sps_.vui_parameters_present_flag = true;
- current_sps_.timing_info_present_flag = true;
- current_sps_.num_units_in_tick = 1;
- current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec.
- current_sps_.fixed_frame_rate_flag = true;
-
- current_sps_.nal_hrd_parameters_present_flag = true;
- // H.264 spec ch. E.2.2.
- current_sps_.cpb_cnt_minus1 = 0;
- current_sps_.bit_rate_scale = kBitRateScale;
- current_sps_.cpb_size_scale = kCPBSizeScale;
- current_sps_.bit_rate_value_minus1[0] =
- (bitrate_ >> (kBitRateScale + H264SPS::kBitRateScaleConstantTerm)) - 1;
- current_sps_.cpb_size_value_minus1[0] =
- (cpb_size_ >> (kCPBSizeScale + H264SPS::kCPBSizeScaleConstantTerm)) - 1;
- current_sps_.cbr_flag[0] = true;
- current_sps_.initial_cpb_removal_delay_length_minus_1 =
- H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
- current_sps_.cpb_removal_delay_length_minus1 =
- H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
- current_sps_.dpb_output_delay_length_minus1 =
- H264SPS::kDefaultDPBOutputDelayLength - 1;
- current_sps_.time_offset_length = H264SPS::kDefaultTimeOffsetLength;
- current_sps_.low_delay_hrd_flag = false;
+static scoped_refptr<base::RefCountedBytes> MakeRefCountedBytes(void* ptr,
+ size_t size) {
+ return base::MakeRefCounted<base::RefCountedBytes>(
+ reinterpret_cast<uint8_t*>(ptr), size);
}
-void VaapiVideoEncodeAccelerator::GeneratePackedSPS() {
- packed_sps_.Reset();
-
- packed_sps_.BeginNALU(H264NALU::kSPS, 3);
-
- packed_sps_.AppendBits(8, current_sps_.profile_idc);
- packed_sps_.AppendBool(current_sps_.constraint_set0_flag);
- packed_sps_.AppendBool(current_sps_.constraint_set1_flag);
- packed_sps_.AppendBool(current_sps_.constraint_set2_flag);
- packed_sps_.AppendBool(current_sps_.constraint_set3_flag);
- packed_sps_.AppendBool(current_sps_.constraint_set4_flag);
- packed_sps_.AppendBool(current_sps_.constraint_set5_flag);
- packed_sps_.AppendBits(2, 0); // reserved_zero_2bits
- packed_sps_.AppendBits(8, current_sps_.level_idc);
- packed_sps_.AppendUE(current_sps_.seq_parameter_set_id);
-
- if (current_sps_.profile_idc == H264SPS::kProfileIDCHigh) {
- packed_sps_.AppendUE(current_sps_.chroma_format_idc);
- if (current_sps_.chroma_format_idc == 3)
- packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag);
- packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8);
- packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8);
- packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag);
- packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag);
- CHECK(!current_sps_.seq_scaling_matrix_present_flag);
+bool VaapiVideoEncodeAccelerator::H264Accelerator::SubmitFrameParameters(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ const media::H264Encoder::EncodeParams& encode_params,
+ const media::H264SPS& sps,
+ const media::H264PPS& pps,
+ scoped_refptr<H264Picture> pic,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list0,
+ const std::list<scoped_refptr<H264Picture>>& ref_pic_list1) {
+ VAEncSequenceParameterBufferH264 seq_param = {};
+
+#define SPS_TO_SP(a) seq_param.a = sps.a;
+ SPS_TO_SP(seq_parameter_set_id);
+ SPS_TO_SP(level_idc);
+
+ seq_param.intra_period = encode_params.i_period_frames;
+ seq_param.intra_idr_period = encode_params.idr_period_frames;
+ seq_param.ip_period = encode_params.ip_period_frames;
+ seq_param.bits_per_second = encode_params.bitrate_bps;
+
+ SPS_TO_SP(max_num_ref_frames);
+ base::Optional<gfx::Size> coded_size = sps.GetCodedSize();
+ if (!coded_size) {
+ DVLOGF(1) << "Invalid coded size";
+ return false;
}
+ constexpr int kH264MacroblockSizeInPixels = 16;
+ seq_param.picture_width_in_mbs =
+ coded_size->width() / kH264MacroblockSizeInPixels;
+ seq_param.picture_height_in_mbs =
+ coded_size->height() / kH264MacroblockSizeInPixels;
+
+#define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = sps.a;
+ SPS_TO_SP_FS(chroma_format_idc);
+ SPS_TO_SP_FS(frame_mbs_only_flag);
+ SPS_TO_SP_FS(log2_max_frame_num_minus4);
+ SPS_TO_SP_FS(pic_order_cnt_type);
+ SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4);
+#undef SPS_TO_SP_FS
+
+ SPS_TO_SP(bit_depth_luma_minus8);
+ SPS_TO_SP(bit_depth_chroma_minus8);
- packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4);
- packed_sps_.AppendUE(current_sps_.pic_order_cnt_type);
- if (current_sps_.pic_order_cnt_type == 0)
- packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4);
- else if (current_sps_.pic_order_cnt_type == 1) {
- CHECK(1);
+ SPS_TO_SP(frame_cropping_flag);
+ if (sps.frame_cropping_flag) {
+ SPS_TO_SP(frame_crop_left_offset);
+ SPS_TO_SP(frame_crop_right_offset);
+ SPS_TO_SP(frame_crop_top_offset);
+ SPS_TO_SP(frame_crop_bottom_offset);
}
- packed_sps_.AppendUE(current_sps_.max_num_ref_frames);
- packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag);
- packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1);
- packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1);
+ SPS_TO_SP(vui_parameters_present_flag);
+#define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = sps.a;
+ SPS_TO_SP_VF(timing_info_present_flag);
+#undef SPS_TO_SP_VF
+ SPS_TO_SP(num_units_in_tick);
+ SPS_TO_SP(time_scale);
+#undef SPS_TO_SP
- packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag);
- if (!current_sps_.frame_mbs_only_flag)
- packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag);
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncSequenceParameterBufferType,
+ MakeRefCountedBytes(&seq_param, sizeof(seq_param))));
- packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag);
+ VAEncPictureParameterBufferH264 pic_param = {};
- packed_sps_.AppendBool(current_sps_.frame_cropping_flag);
- if (current_sps_.frame_cropping_flag) {
- packed_sps_.AppendUE(current_sps_.frame_crop_left_offset);
- packed_sps_.AppendUE(current_sps_.frame_crop_right_offset);
- packed_sps_.AppendUE(current_sps_.frame_crop_top_offset);
- packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset);
- }
+ auto va_surface_id = pic->AsVaapiH264Picture()->GetVASurfaceID();
+ pic_param.CurrPic.picture_id = va_surface_id;
+ pic_param.CurrPic.TopFieldOrderCnt = pic->top_field_order_cnt;
+ pic_param.CurrPic.BottomFieldOrderCnt = pic->bottom_field_order_cnt;
+ pic_param.CurrPic.flags = 0;
- packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag);
- if (current_sps_.vui_parameters_present_flag) {
- packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag
- packed_sps_.AppendBool(false); // overscan_info_present_flag
- packed_sps_.AppendBool(false); // video_signal_type_present_flag
- packed_sps_.AppendBool(false); // chroma_loc_info_present_flag
-
- packed_sps_.AppendBool(current_sps_.timing_info_present_flag);
- if (current_sps_.timing_info_present_flag) {
- packed_sps_.AppendBits(32, current_sps_.num_units_in_tick);
- packed_sps_.AppendBits(32, current_sps_.time_scale);
- packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag);
- }
+ pic_param.coded_buf = job->AsVaapiEncodeJob()->coded_buffer_id();
+ pic_param.pic_parameter_set_id = pps.pic_parameter_set_id;
+ pic_param.seq_parameter_set_id = pps.seq_parameter_set_id;
+ pic_param.frame_num = pic->frame_num;
+ pic_param.pic_init_qp = pps.pic_init_qp_minus26 + 26;
+ pic_param.num_ref_idx_l0_active_minus1 =
+ pps.num_ref_idx_l0_default_active_minus1;
+
+ pic_param.pic_fields.bits.idr_pic_flag = pic->idr;
+ pic_param.pic_fields.bits.reference_pic_flag = pic->ref;
+#define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = pps.a;
+ PPS_TO_PP_PF(entropy_coding_mode_flag);
+ PPS_TO_PP_PF(transform_8x8_mode_flag);
+ PPS_TO_PP_PF(deblocking_filter_control_present_flag);
+#undef PPS_TO_PP_PF
- packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag);
- if (current_sps_.nal_hrd_parameters_present_flag) {
- packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1);
- packed_sps_.AppendBits(4, current_sps_.bit_rate_scale);
- packed_sps_.AppendBits(4, current_sps_.cpb_size_scale);
- CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1),
- arraysize(current_sps_.bit_rate_value_minus1));
- for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) {
- packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]);
- packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]);
- packed_sps_.AppendBool(current_sps_.cbr_flag[i]);
- }
- packed_sps_.AppendBits(
- 5, current_sps_.initial_cpb_removal_delay_length_minus_1);
- packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1);
- packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1);
- packed_sps_.AppendBits(5, current_sps_.time_offset_length);
- }
+ VAEncSliceParameterBufferH264 slice_param = {};
- packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag
- if (current_sps_.nal_hrd_parameters_present_flag)
- packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag);
+ slice_param.num_macroblocks =
+ seq_param.picture_width_in_mbs * seq_param.picture_height_in_mbs;
+ slice_param.macroblock_info = VA_INVALID_ID;
+ slice_param.slice_type = pic->type;
+ slice_param.pic_parameter_set_id = pps.pic_parameter_set_id;
+ slice_param.idr_pic_id = pic->idr_pic_id;
+ slice_param.pic_order_cnt_lsb = pic->pic_order_cnt_lsb;
+ slice_param.num_ref_idx_active_override_flag = true;
- packed_sps_.AppendBool(false); // pic_struct_present_flag
- packed_sps_.AppendBool(true); // bitstream_restriction_flag
+ for (size_t i = 0; i < base::size(pic_param.ReferenceFrames); ++i)
+ InitVAPictureH264(&pic_param.ReferenceFrames[i]);
+
+ for (size_t i = 0; i < base::size(slice_param.RefPicList0); ++i)
+ InitVAPictureH264(&slice_param.RefPicList0[i]);
+
+ for (size_t i = 0; i < base::size(slice_param.RefPicList1); ++i)
+ InitVAPictureH264(&slice_param.RefPicList1[i]);
+
+ VAPictureH264* ref_frames_entry = pic_param.ReferenceFrames;
+ VAPictureH264* ref_list_entry = slice_param.RefPicList0;
+ // Initialize the current entry on slice and picture reference lists to
+ // |ref_pic| and advance list pointers.
+ auto fill_ref_frame = [&ref_frames_entry,
+ &ref_list_entry](scoped_refptr<H264Picture> ref_pic) {
+ VAPictureH264 va_pic_h264;
+ InitVAPictureH264(&va_pic_h264);
+ va_pic_h264.picture_id = ref_pic->AsVaapiH264Picture()->GetVASurfaceID();
+ va_pic_h264.flags = 0;
+
+ *ref_frames_entry = va_pic_h264;
+ *ref_list_entry = va_pic_h264;
+ ++ref_frames_entry;
+ ++ref_list_entry;
+ };
+
+ // Fill slice_param.RefPicList{0,1} with pictures from ref_pic_list{0,1},
+ // respectively, and pic_param.ReferenceFrames with entries from both.
+ std::for_each(ref_pic_list0.begin(), ref_pic_list0.end(), fill_ref_frame);
+ ref_list_entry = slice_param.RefPicList1;
+ std::for_each(ref_pic_list1.begin(), ref_pic_list1.end(), fill_ref_frame);
+
+ VAEncMiscParameterRateControl rate_control_param = {};
+ rate_control_param.bits_per_second = encode_params.bitrate_bps;
+ rate_control_param.target_percentage = kTargetBitratePercentage;
+ rate_control_param.window_size = encode_params.cpb_window_size_ms;
+ rate_control_param.initial_qp = pic_param.pic_init_qp;
+ rate_control_param.rc_flags.bits.disable_frame_skip = true;
- packed_sps_.AppendBool(false); // motion_vectors_over_pic_boundaries_flag
- packed_sps_.AppendUE(2); // max_bytes_per_pic_denom
- packed_sps_.AppendUE(1); // max_bits_per_mb_denom
- packed_sps_.AppendUE(16); // log2_max_mv_length_horizontal
- packed_sps_.AppendUE(16); // log2_max_mv_length_vertical
+ VAEncMiscParameterFrameRate framerate_param = {};
+ framerate_param.framerate = encode_params.framerate;
- // Explicitly set max_num_reorder_frames to 0 to allow the decoder to
- // output pictures early.
- packed_sps_.AppendUE(0); // max_num_reorder_frames
+ VAEncMiscParameterHRD hrd_param = {};
+ hrd_param.buffer_size = encode_params.cpb_size_bits;
+ hrd_param.initial_buffer_fullness = hrd_param.buffer_size / 2;
- // The value of max_dec_frame_buffering shall be greater than or equal to
- // max_num_ref_frames.
- const unsigned int max_dec_frame_buffering =
- current_sps_.max_num_ref_frames;
- packed_sps_.AppendUE(max_dec_frame_buffering);
- }
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncPictureParameterBufferType,
+ MakeRefCountedBytes(&pic_param, sizeof(pic_param))));
- packed_sps_.FinishNALU();
-}
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncSliceParameterBufferType,
+ MakeRefCountedBytes(&slice_param, sizeof(slice_param))));
-void VaapiVideoEncodeAccelerator::UpdatePPS() {
- memset(&current_pps_, 0, sizeof(H264PPS));
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeRateControl,
+ MakeRefCountedBytes(&rate_control_param, sizeof(rate_control_param))));
- current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id;
- current_pps_.pic_parameter_set_id = 0;
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeFrameRate,
+ MakeRefCountedBytes(&framerate_param, sizeof(framerate_param))));
- current_pps_.entropy_coding_mode_flag =
- current_sps_.profile_idc >= H264SPS::kProfileIDCMain;
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeHRD,
+ MakeRefCountedBytes(&hrd_param, sizeof(hrd_param))));
- CHECK_GT(max_ref_idx_l0_size_, 0u);
- current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1;
- current_pps_.num_ref_idx_l1_default_active_minus1 = 0;
- DCHECK_LE(qp_, 51u);
- current_pps_.pic_init_qp_minus26 = qp_ - 26;
- current_pps_.deblocking_filter_control_present_flag = true;
- current_pps_.transform_8x8_mode_flag =
- (current_sps_.profile_idc == H264SPS::kProfileIDCHigh);
+ return true;
}
-void VaapiVideoEncodeAccelerator::GeneratePackedPPS() {
- packed_pps_.Reset();
+scoped_refptr<H264Picture>
+VaapiVideoEncodeAccelerator::H264Accelerator::GetPicture(
+ AcceleratedVideoEncoder::EncodeJob* job) {
+ return base::MakeRefCounted<VaapiH264Picture>(
+ job->AsVaapiEncodeJob()->reconstructed_surface());
+}
- packed_pps_.BeginNALU(H264NALU::kPPS, 3);
+bool VaapiVideoEncodeAccelerator::H264Accelerator::SubmitPackedHeaders(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ scoped_refptr<H264BitstreamBuffer> packed_sps,
+ scoped_refptr<H264BitstreamBuffer> packed_pps) {
+ // Submit SPS.
+ VAEncPackedHeaderParameterBuffer par_buffer = {};
+ par_buffer.type = VAEncPackedHeaderSequence;
+ par_buffer.bit_length = packed_sps->BytesInBuffer() * 8;
- packed_pps_.AppendUE(current_pps_.pic_parameter_set_id);
- packed_pps_.AppendUE(current_pps_.seq_parameter_set_id);
- packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag);
- packed_pps_.AppendBool(
- current_pps_.bottom_field_pic_order_in_frame_present_flag);
- CHECK_EQ(current_pps_.num_slice_groups_minus1, 0);
- packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1);
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitBuffer, base::Unretained(vea_),
+ VAEncPackedHeaderParameterBufferType,
+ MakeRefCountedBytes(&par_buffer, sizeof(par_buffer))));
- packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1);
- packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1);
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer,
+ base::Unretained(vea_), packed_sps));
- packed_pps_.AppendBool(current_pps_.weighted_pred_flag);
- packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc);
+ // Submit PPS.
+ par_buffer = {};
+ par_buffer.type = VAEncPackedHeaderPicture;
+ par_buffer.bit_length = packed_pps->BytesInBuffer() * 8;
- packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26);
- packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26);
- packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset);
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitBuffer, base::Unretained(vea_),
+ VAEncPackedHeaderParameterBufferType,
+ MakeRefCountedBytes(&par_buffer, sizeof(par_buffer))));
- packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag);
- packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag);
- packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag);
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer,
+ base::Unretained(vea_), packed_pps));
- packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag);
- packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag);
- DCHECK(!current_pps_.pic_scaling_matrix_present_flag);
- packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset);
+ return true;
+}
- packed_pps_.FinishNALU();
+scoped_refptr<VP8Picture>
+VaapiVideoEncodeAccelerator::VP8Accelerator::GetPicture(
+ AcceleratedVideoEncoder::EncodeJob* job) {
+ return base::MakeRefCounted<VaapiVP8Picture>(
+ job->AsVaapiEncodeJob()->reconstructed_surface());
}
-void VaapiVideoEncodeAccelerator::SetState(State state) {
- // Only touch state on encoder thread, unless it's not running.
- if (encoder_thread_.IsRunning() &&
- !encoder_thread_task_runner_->BelongsToCurrentThread()) {
- encoder_thread_task_runner_->PostTask(
- FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::SetState,
- base::Unretained(this), state));
- return;
+bool VaapiVideoEncodeAccelerator::VP8Accelerator::SubmitFrameParameters(
+ AcceleratedVideoEncoder::EncodeJob* job,
+ const media::VP8Encoder::EncodeParams& encode_params,
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& ref_frames) {
+ VAEncSequenceParameterBufferVP8 seq_param = {};
+
+ const auto& frame_header = pic->frame_hdr;
+ seq_param.frame_width = frame_header->width;
+ seq_param.frame_height = frame_header->height;
+ seq_param.frame_width_scale = frame_header->horizontal_scale;
+ seq_param.frame_height_scale = frame_header->vertical_scale;
+ seq_param.error_resilient = 1;
+ seq_param.bits_per_second = encode_params.bitrate_bps;
+ seq_param.intra_period = encode_params.kf_period_frames;
+
+ VAEncPictureParameterBufferVP8 pic_param = {};
+
+ pic_param.reconstructed_frame = pic->AsVaapiVP8Picture()->GetVASurfaceID();
+ DCHECK_NE(pic_param.reconstructed_frame, VA_INVALID_ID);
+
+ auto last_frame = ref_frames.GetFrame(Vp8RefType::VP8_FRAME_LAST);
+ pic_param.ref_last_frame =
+ last_frame ? last_frame->AsVaapiVP8Picture()->GetVASurfaceID()
+ : VA_INVALID_ID;
+ auto golden_frame = ref_frames.GetFrame(Vp8RefType::VP8_FRAME_GOLDEN);
+ pic_param.ref_gf_frame =
+ golden_frame ? golden_frame->AsVaapiVP8Picture()->GetVASurfaceID()
+ : VA_INVALID_ID;
+ auto alt_frame = ref_frames.GetFrame(Vp8RefType::VP8_FRAME_ALTREF);
+ pic_param.ref_arf_frame =
+ alt_frame ? alt_frame->AsVaapiVP8Picture()->GetVASurfaceID()
+ : VA_INVALID_ID;
+ pic_param.coded_buf = job->AsVaapiEncodeJob()->coded_buffer_id();
+ DCHECK_NE(pic_param.coded_buf, VA_INVALID_ID);
+
+ if (frame_header->IsKeyframe())
+ pic_param.ref_flags.bits.force_kf = true;
+
+ pic_param.pic_flags.bits.frame_type = frame_header->frame_type;
+ pic_param.pic_flags.bits.version = frame_header->version;
+ pic_param.pic_flags.bits.show_frame = frame_header->show_frame;
+ pic_param.pic_flags.bits.loop_filter_type = frame_header->loopfilter_hdr.type;
+ pic_param.pic_flags.bits.num_token_partitions =
+ frame_header->num_of_dct_partitions;
+ pic_param.pic_flags.bits.segmentation_enabled =
+ frame_header->segmentation_hdr.segmentation_enabled;
+ pic_param.pic_flags.bits.update_mb_segmentation_map =
+ frame_header->segmentation_hdr.update_mb_segmentation_map;
+ pic_param.pic_flags.bits.update_segment_feature_data =
+ frame_header->segmentation_hdr.update_segment_feature_data;
+
+ pic_param.pic_flags.bits.loop_filter_adj_enable =
+ frame_header->loopfilter_hdr.loop_filter_adj_enable;
+
+ pic_param.pic_flags.bits.refresh_entropy_probs =
+ frame_header->refresh_entropy_probs;
+ pic_param.pic_flags.bits.refresh_golden_frame =
+ frame_header->refresh_golden_frame;
+ pic_param.pic_flags.bits.refresh_alternate_frame =
+ frame_header->refresh_alternate_frame;
+ pic_param.pic_flags.bits.refresh_last = frame_header->refresh_last;
+ pic_param.pic_flags.bits.copy_buffer_to_golden =
+ frame_header->copy_buffer_to_golden;
+ pic_param.pic_flags.bits.copy_buffer_to_alternate =
+ frame_header->copy_buffer_to_alternate;
+ pic_param.pic_flags.bits.sign_bias_golden = frame_header->sign_bias_golden;
+ pic_param.pic_flags.bits.sign_bias_alternate =
+ frame_header->sign_bias_alternate;
+ pic_param.pic_flags.bits.mb_no_coeff_skip = frame_header->mb_no_skip_coeff;
+ if (frame_header->IsKeyframe())
+ pic_param.pic_flags.bits.forced_lf_adjustment = true;
+
+ static_assert(
+ arraysize(pic_param.loop_filter_level) ==
+ arraysize(pic_param.ref_lf_delta) &&
+ arraysize(pic_param.ref_lf_delta) ==
+ arraysize(pic_param.mode_lf_delta) &&
+ arraysize(pic_param.ref_lf_delta) ==
+ arraysize(frame_header->loopfilter_hdr.ref_frame_delta) &&
+ arraysize(pic_param.mode_lf_delta) ==
+ arraysize(frame_header->loopfilter_hdr.mb_mode_delta),
+ "Invalid loop filter array sizes");
+
+ for (size_t i = 0; i < base::size(pic_param.loop_filter_level); ++i) {
+ pic_param.loop_filter_level[i] = frame_header->loopfilter_hdr.level;
+ pic_param.ref_lf_delta[i] = frame_header->loopfilter_hdr.ref_frame_delta[i];
+ pic_param.mode_lf_delta[i] = frame_header->loopfilter_hdr.mb_mode_delta[i];
}
- VLOGF(2) << "setting state to: " << state;
- state_ = state;
-}
+ pic_param.sharpness_level = frame_header->loopfilter_hdr.sharpness_level;
+ pic_param.clamp_qindex_high = encode_params.max_qp;
+ pic_param.clamp_qindex_low = encode_params.min_qp;
+
+ VAQMatrixBufferVP8 qmatrix_buf = {};
+ for (size_t i = 0; i < base::size(qmatrix_buf.quantization_index); ++i)
+ qmatrix_buf.quantization_index[i] = frame_header->quantization_hdr.y_ac_qi;
+
+ qmatrix_buf.quantization_index_delta[0] =
+ frame_header->quantization_hdr.y_dc_delta;
+ qmatrix_buf.quantization_index_delta[1] =
+ frame_header->quantization_hdr.y2_dc_delta;
+ qmatrix_buf.quantization_index_delta[2] =
+ frame_header->quantization_hdr.y2_ac_delta;
+ qmatrix_buf.quantization_index_delta[3] =
+ frame_header->quantization_hdr.uv_dc_delta;
+ qmatrix_buf.quantization_index_delta[4] =
+ frame_header->quantization_hdr.uv_ac_delta;
+
+ VAEncMiscParameterRateControl rate_control_param = {};
+ rate_control_param.bits_per_second = encode_params.bitrate_bps;
+ rate_control_param.target_percentage = kTargetBitratePercentage;
+ rate_control_param.window_size = encode_params.cpb_window_size_ms;
+ rate_control_param.initial_qp = encode_params.initial_qp;
+ rate_control_param.rc_flags.bits.disable_frame_skip = true;
-void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
- if (!child_task_runner_->BelongsToCurrentThread()) {
- child_task_runner_->PostTask(
- FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::NotifyError,
- weak_this_, error));
- return;
- }
+ VAEncMiscParameterFrameRate framerate_param = {};
+ framerate_param.framerate = encode_params.framerate;
- if (client_) {
- client_->NotifyError(error);
- client_ptr_factory_.reset();
- }
-}
+ VAEncMiscParameterHRD hrd_param = {};
+ hrd_param.buffer_size = encode_params.cpb_size_bits;
+ hrd_param.initial_buffer_fullness = hrd_param.buffer_size / 2;
+
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncSequenceParameterBufferType,
+ MakeRefCountedBytes(&seq_param, sizeof(seq_param))));
+
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncPictureParameterBufferType,
+ MakeRefCountedBytes(&pic_param, sizeof(pic_param))));
-VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob()
- : coded_buffer(VA_INVALID_ID), keyframe(false) {}
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAQMatrixBufferType,
+ MakeRefCountedBytes(&qmatrix_buf, sizeof(qmatrix_buf))));
-VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() {}
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeRateControl,
+ MakeRefCountedBytes(&rate_control_param, sizeof(rate_control_param))));
+
+ job->AddSetupCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeFrameRate,
+ MakeRefCountedBytes(&framerate_param, sizeof(framerate_param))));
+
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
+ base::Unretained(vea_), VAEncMiscParameterTypeHRD,
+ MakeRefCountedBytes(&hrd_param, sizeof(hrd_param))));
+
+ return true;
+}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
index 4d6754dcc81..bcd4fcc8d8f 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
@@ -8,24 +8,24 @@
#include <stddef.h>
#include <stdint.h>
-#include <list>
#include <memory>
#include "base/containers/queue.h"
#include "base/macros.h"
+#include "base/memory/ref_counted_memory.h"
#include "base/threading/thread.h"
#include "media/filters/h264_bitstream_buffer.h"
-#include "media/gpu/h264_dpb.h"
#include "media/gpu/media_gpu_export.h"
+#include "media/gpu/vaapi/accelerated_video_encoder.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/video/video_encode_accelerator.h"
namespace media {
+class VaapiEncodeJob;
// A VideoEncodeAccelerator implementation that uses VA-API
-// (http://www.freedesktop.org/wiki/Software/vaapi) for HW-accelerated
-// video encode.
+// (https://01.org/vaapi) for HW-accelerated video encode.
class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
: public VideoEncodeAccelerator {
public:
@@ -48,36 +48,8 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
void Flush(FlushCallback flush_callback) override;
private:
- // Reference picture list.
- typedef std::list<scoped_refptr<VASurface>> RefPicList;
-
- // Encode job for one frame. Created when an input frame is awaiting and
- // enough resources are available to proceed. Once the job is prepared and
- // submitted to the hardware, it awaits on the submitted_encode_jobs_ queue
- // for an output bitstream buffer to become available. Once one is ready,
- // the encoded bytes are downloaded to it and job resources are released
- // and become available for reuse.
- struct EncodeJob {
- // Input surface for video frame data.
- scoped_refptr<VASurface> input_surface;
- // Surface for a reconstructed picture, which is used for reference
- // for subsequent frames.
- scoped_refptr<VASurface> recon_surface;
- // Buffer that will contain output bitstream for this frame.
- VABufferID coded_buffer;
- // Reference surfaces required to encode this picture. We keep references
- // to them here, because we may discard some of them from ref_pic_list*
- // before the HW job is done.
- RefPicList reference_surfaces;
- // True if this job will produce a keyframe. Used to report
- // to BitstreamBufferReady().
- bool keyframe;
- // Source timestamp.
- base::TimeDelta timestamp;
-
- EncodeJob();
- ~EncodeJob();
- };
+ class H264Accelerator;
+ class VP8Accelerator;
// Encoder state.
enum State {
@@ -91,98 +63,87 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// Holds output buffers coming from the client ready to be filled.
struct BitstreamBufferRef;
+ //
// Tasks for each of the VEA interface calls to be executed on the
// encoder thread.
- void InitializeTask();
- void EncodeTask(const scoped_refptr<VideoFrame>& frame, bool force_keyframe);
+ //
+ void InitializeTask(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t bitrate);
+
+ // Enqueues |frame| onto the queue of pending inputs and attempts to continue
+ // encoding.
+ void EncodeTask(scoped_refptr<VideoFrame> frame, bool force_keyframe);
+
+ // Maps |buffer_ref|, push it onto the available_bitstream_buffers_, and
+ // attempts to return any pending encoded data in it, if any.
void UseOutputBitstreamBufferTask(
std::unique_ptr<BitstreamBufferRef> buffer_ref);
+
void RequestEncodingParametersChangeTask(uint32_t bitrate,
uint32_t framerate);
void DestroyTask();
void FlushTask();
- // Prepare and schedule an encode job if we have an input to encode
- // and enough resources to proceed.
- void EncodeFrameTask();
-
- // Fill current_sps_/current_pps_ with current values.
- void UpdateSPS();
- void UpdatePPS();
- void UpdateRates(uint32_t bitrate, uint32_t framerate);
-
- // Generate packed SPS and PPS in packed_sps_/packed_pps_, using
- // values in current_sps_/current_pps_.
- void GeneratePackedSPS();
- void GeneratePackedPPS();
-
- // Check if we have sufficient resources for a new encode job, claim them and
- // fill current_encode_job_ with them.
- // Return false if we cannot start a new job yet, true otherwise.
- bool PrepareNextJob(base::TimeDelta timestamp);
-
- // Begin a new frame, making it a keyframe if |force_keyframe| is true,
- // updating current_pic_.
- void BeginFrame(bool force_keyframe);
+ // Checks if sufficient resources for a new encode job with |frame| as input
+ // are available, and if so, claims them by associating them with
+ // a VaapiEncodeJob, and returns the newly-created job, nullptr otherwise.
+ scoped_refptr<VaapiEncodeJob> CreateEncodeJob(scoped_refptr<VideoFrame> frame,
+ bool force_keyframe);
- // End current frame, updating reference picture lists and storing current
- // job in the jobs awaiting completion on submitted_encode_jobs_.
- void EndFrame();
+ // Continues encoding frames as long as input_queue_ is not empty, and we are
+ // able to create new EncodeJobs.
+ void EncodePendingInputs();
- // Submit parameters for the current frame to the hardware.
- bool SubmitFrameParameters();
- // Submit keyframe headers to the hardware if the current frame is a keyframe.
- bool SubmitHeadersIfNeeded();
+ // Uploads image data from |frame| to |va_surface_id|.
+ void UploadFrame(scoped_refptr<VideoFrame> frame, VASurfaceID va_surface_id);
- // Upload image data from |frame| to the input surface for current job.
- bool UploadFrame(const scoped_refptr<VideoFrame>& frame);
-
- // Execute encode in hardware. This does not block and will return before
+ // Executes encode in hardware. This does not block and may return before
// the job is finished.
- bool ExecuteEncode();
+ void ExecuteEncode(VASurfaceID va_surface_id);
// Callback that returns a no longer used VASurfaceID to
// available_va_surface_ids_ for reuse.
void RecycleVASurfaceID(VASurfaceID va_surface_id);
- // Tries to return a bitstream buffer if both a submitted job awaits to
- // be completed and we have bitstream buffers from the client available
- // to download the encoded data to.
+ // Returns a bitstream buffer to the client if both a previously executed job
+ // awaits to be completed and we have bitstream buffers available to download
+ // the encoded data into.
void TryToReturnBitstreamBuffer();
- // Puts the encoder into en error state and notifies client about the error.
+ // Downloads encoded data produced as a result of running |encode_job| into
+ // |buffer|, and returns it to the client.
+ void ReturnBitstreamBuffer(scoped_refptr<VaapiEncodeJob> encode_job,
+ std::unique_ptr<BitstreamBufferRef> buffer);
+
+ // Puts the encoder into en error state and notifies the client
+ // about the error.
void NotifyError(Error error);
- // Sets the encoder state on the correct thread.
+ // Sets the encoder state to |state| on the correct thread.
void SetState(State state);
+ // Submits |buffer| of |type| to the driver.
+ void SubmitBuffer(VABufferType type,
+ scoped_refptr<base::RefCountedBytes> buffer);
+
+ // Submits a VAEncMiscParameterBuffer |buffer| of type |type| to the driver.
+ void SubmitVAEncMiscParamBuffer(VAEncMiscParameterType type,
+ scoped_refptr<base::RefCountedBytes> buffer);
+
+ // Submits a H264BitstreamBuffer |buffer| to the driver.
+ void SubmitH264BitstreamBuffer(scoped_refptr<H264BitstreamBuffer> buffer);
+
// VaapiWrapper is the owner of all HW resources (surfaces and buffers)
// and will free them on destruction.
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
// Input profile and sizes.
- VideoCodecProfile profile_;
+ VideoCodec codec_;
gfx::Size visible_size_;
- gfx::Size coded_size_; // Macroblock-aligned.
- // Width/height in macroblocks.
- unsigned int mb_width_;
- unsigned int mb_height_;
-
- // Maximum size of the reference list 0.
- unsigned int max_ref_idx_l0_size_;
-
- // Initial QP.
- unsigned int qp_;
-
- // IDR frame period.
- unsigned int idr_period_;
- // I frame period.
- unsigned int i_period_;
- // IP period, i.e. how often do we need to have either an I or a P frame in
- // the stream. Period of 1 means we can have no B frames.
- unsigned int ip_period_;
-
- // Size in bytes required for input bitstream buffers.
+ gfx::Size coded_size_;
+
+ // Size in bytes required for output bitstream buffers.
size_t output_buffer_byte_size_;
// All of the members below must be accessed on the encoder_thread_,
@@ -191,34 +152,8 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// Encoder state. Encode tasks will only run in kEncoding state.
State state_;
- // frame_num to be used for the next frame.
- unsigned int frame_num_;
- // idr_pic_id to be used for the next frame.
- unsigned int idr_pic_id_;
-
- // Current bitrate in bps.
- unsigned int bitrate_;
- // Current fps.
- unsigned int framerate_;
- // CPB size in bits, i.e. bitrate in kbps * window size in ms/1000.
- unsigned int cpb_size_;
- // True if the parameters have changed and we need to submit a keyframe
- // with updated parameters.
- bool encoding_parameters_changed_;
-
- // Job currently being prepared for encode.
- std::unique_ptr<EncodeJob> current_encode_job_;
-
- // Current SPS, PPS and their packed versions. Packed versions are their NALUs
- // in AnnexB format *without* emulation prevention three-byte sequences
- // (those will be added by the driver).
- H264SPS current_sps_;
- H264BitstreamBuffer packed_sps_;
- H264PPS current_pps_;
- H264BitstreamBuffer packed_pps_;
-
- // Picture currently being prepared for encode.
- scoped_refptr<H264Picture> current_pic_;
+ // Encoder instance managing video codec state and preparing encode jobs.
+ std::unique_ptr<AcceleratedVideoEncoder> encoder_;
// VA surfaces available for reuse.
std::vector<VASurfaceID> available_va_surface_ids_;
@@ -226,22 +161,18 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// VA buffers for coded frames.
std::vector<VABufferID> available_va_buffer_ids_;
- // Currently active reference surfaces.
- RefPicList ref_pic_list0_;
-
// Callback via which finished VA surfaces are returned to us.
VASurface::ReleaseCB va_surface_release_cb_;
- // VideoFrames passed from the client, waiting to be encoded.
- base::queue<std::unique_ptr<InputFrameRef>> encoder_input_queue_;
+ // Queue of input frames to be encoded.
+ base::queue<std::unique_ptr<InputFrameRef>> input_queue_;
- // BitstreamBuffers mapped, ready to be filled.
+ // BitstreamBuffers mapped, ready to be filled with encoded stream data.
base::queue<std::unique_ptr<BitstreamBufferRef>> available_bitstream_buffers_;
- // Jobs submitted for encode, awaiting bitstream buffers to become available.
- // A pending flush command, indicated by a null job, will be also put in the
- // queue.
- base::queue<std::unique_ptr<EncodeJob>> submitted_encode_jobs_;
+ // Jobs submitted to driver for encode, awaiting bitstream buffers to become
+ // available.
+ base::queue<scoped_refptr<VaapiEncodeJob>> submitted_encode_jobs_;
// Encoder thread. All tasks are executed on it.
base::Thread encoder_thread_;
diff --git a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
index c66a9304603..2e763ebdd19 100644
--- a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.cc
@@ -42,15 +42,13 @@ scoped_refptr<VP8Picture> VaapiVP8Accelerator::CreateVP8Picture() {
}
bool VaapiVP8Accelerator::SubmitDecode(
- const scoped_refptr<VP8Picture>& pic,
- const Vp8FrameHeader* frame_hdr,
- const scoped_refptr<VP8Picture>& last_frame,
- const scoped_refptr<VP8Picture>& golden_frame,
- const scoped_refptr<VP8Picture>& alt_frame) {
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
VAIQMatrixBufferVP8 iq_matrix_buf;
memset(&iq_matrix_buf, 0, sizeof(VAIQMatrixBufferVP8));
+ const auto& frame_hdr = pic->frame_hdr;
const Vp8SegmentationHeader& sgmnt_hdr = frame_hdr->segmentation_hdr;
const Vp8QuantizationHeader& quant_hdr = frame_hdr->quantization_hdr;
static_assert(arraysize(iq_matrix_buf.quantization_index) == kMaxMBSegments,
@@ -98,6 +96,7 @@ bool VaapiVP8Accelerator::SubmitDecode(
pic_param.frame_width = frame_hdr->width;
pic_param.frame_height = frame_hdr->height;
+ const auto last_frame = reference_frames.GetFrame(Vp8RefType::VP8_FRAME_LAST);
if (last_frame) {
pic_param.last_ref_frame =
last_frame->AsVaapiVP8Picture()->GetVASurfaceID();
@@ -105,6 +104,8 @@ bool VaapiVP8Accelerator::SubmitDecode(
pic_param.last_ref_frame = VA_INVALID_SURFACE;
}
+ const auto golden_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_GOLDEN);
if (golden_frame) {
pic_param.golden_ref_frame =
golden_frame->AsVaapiVP8Picture()->GetVASurfaceID();
@@ -112,6 +113,8 @@ bool VaapiVP8Accelerator::SubmitDecode(
pic_param.golden_ref_frame = VA_INVALID_SURFACE;
}
+ const auto alt_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_ALTREF);
if (alt_frame) {
pic_param.alt_ref_frame = alt_frame->AsVaapiVP8Picture()->GetVASurfaceID();
} else {
diff --git a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.h b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.h
index 2d251a5b9c4..0889ab7d5fb 100644
--- a/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_vp8_accelerator.h
@@ -23,11 +23,8 @@ class VaapiVP8Accelerator : public VP8Decoder::VP8Accelerator {
// VP8Decoder::VP8Accelerator implementation.
scoped_refptr<VP8Picture> CreateVP8Picture() override;
- bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
- const Vp8FrameHeader* frame_hdr,
- const scoped_refptr<VP8Picture>& last_frame,
- const scoped_refptr<VP8Picture>& golden_frame,
- const scoped_refptr<VP8Picture>& alt_frame) override;
+ bool SubmitDecode(scoped_refptr<VP8Picture> picture,
+ const Vp8ReferenceFrameVector& reference_frames) override;
bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
private:
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index c8ee25eeff7..4068c9f69f8 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -24,6 +24,8 @@
#include "base/sys_info.h"
#include "build/build_config.h"
+#include "media/base/media_switches.h"
+
// Auto-generated for dlopen libva libraries
#include "media/gpu/vaapi/va_stubs.h"
@@ -124,7 +126,7 @@ namespace {
// Maximum framerate of encoded profile. This value is an arbitary limit
// and not taken from HW documentation.
-const int kMaxEncoderFramerate = 30;
+constexpr int kMaxEncoderFramerate = 30;
// A map between VideoCodecProfile and VAProfile.
static const struct {
@@ -176,6 +178,14 @@ bool IsBlackListedDriver(const std::string& va_vendor_string,
return true;
}
}
+
+ // TODO(posciak): Remove once VP8 encoding is to be enabled by default.
+ if (mode == VaapiWrapper::CodecMode::kEncode &&
+ va_profile == VAProfileVP8Version0_3 &&
+ !base::FeatureList::IsEnabled(kVaapiVP8Encoder)) {
+ return true;
+ }
+
return false;
}
@@ -925,7 +935,7 @@ bool VaapiWrapper::SubmitBuffer(VABufferType va_buffer_type,
bool VaapiWrapper::SubmitVAEncMiscParamBuffer(
VAEncMiscParameterType misc_param_type,
size_t size,
- void* buffer) {
+ const void* buffer) {
base::AutoLock auto_lock(*va_lock_);
VABufferID buffer_id;
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index f77ced9968f..1dd6a3cf29e 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -132,7 +132,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// DestroyPendingBuffers() is used to cancel a pending job.
bool SubmitVAEncMiscParamBuffer(VAEncMiscParameterType misc_param_type,
size_t size,
- void* buffer);
+ const void* buffer);
// Cancel and destroy all buffers queued to the HW codec via SubmitBuffer().
// Useful when a pending job is to be cancelled (on reset or error).
diff --git a/chromium/media/gpu/vaapi/vp8_encoder.cc b/chromium/media/gpu/vaapi/vp8_encoder.cc
new file mode 100644
index 00000000000..a7b8cd413f5
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vp8_encoder.cc
@@ -0,0 +1,181 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/vp8_encoder.h"
+
+#include "base/bits.h"
+
+#define DVLOGF(level) DVLOG(level) << __func__ << "(): "
+
+namespace media {
+
+namespace {
+// Keyframe period.
+const size_t kKFPeriod = 3000;
+
+// Arbitrarily chosen bitrate window size for rate control, in ms.
+const int kCPBWindowSizeMs = 1500;
+
+// Based on WebRTC's defaults.
+const int kMinQP = 4;
+const int kMaxQP = 112;
+const int kDefaultQP = (3 * kMinQP + kMaxQP) / 4;
+} // namespace
+
+VP8Encoder::EncodeParams::EncodeParams()
+ : kf_period_frames(kKFPeriod),
+ bitrate_bps(0),
+ framerate(0),
+ cpb_window_size_ms(kCPBWindowSizeMs),
+ cpb_size_bits(0),
+ initial_qp(kDefaultQP),
+ min_qp(kMinQP),
+ max_qp(kMaxQP),
+ error_resilient_mode(false) {}
+
+void VP8Encoder::Reset() {
+ current_params_ = EncodeParams();
+ reference_frames_.Clear();
+ frame_num_ = 0;
+
+ InitializeFrameHeader();
+}
+
+VP8Encoder::VP8Encoder(std::unique_ptr<Accelerator> accelerator)
+ : accelerator_(std::move(accelerator)) {}
+
+VP8Encoder::~VP8Encoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+}
+
+bool VP8Encoder::Initialize(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t initial_bitrate,
+ uint32_t initial_framerate) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX);
+
+ DCHECK(!visible_size.IsEmpty());
+ // 4:2:0 format has to be 2-aligned.
+ DCHECK_EQ(visible_size.width() % 2, 0);
+ DCHECK_EQ(visible_size.height() % 2, 0);
+
+ visible_size_ = visible_size;
+ coded_size_ = gfx::Size(base::bits::Align(visible_size_.width(), 16),
+ base::bits::Align(visible_size_.height(), 16));
+
+ Reset();
+
+ return UpdateRates(initial_bitrate, initial_framerate);
+}
+
+gfx::Size VP8Encoder::GetCodedSize() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!coded_size_.IsEmpty());
+
+ return coded_size_;
+}
+
+size_t VP8Encoder::GetBitstreamBufferSize() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!coded_size_.IsEmpty());
+
+ return coded_size_.GetArea();
+}
+
+size_t VP8Encoder::GetMaxNumOfRefFrames() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ return kNumVp8ReferenceBuffers;
+}
+
+bool VP8Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (encode_job->IsKeyframeRequested())
+ frame_num_ = 0;
+
+ if (frame_num_ == 0)
+ encode_job->ProduceKeyframe();
+
+ frame_num_++;
+ frame_num_ %= current_params_.kf_period_frames;
+
+ scoped_refptr<VP8Picture> picture = accelerator_->GetPicture(encode_job);
+ DCHECK(picture);
+
+ UpdateFrameHeader(encode_job->IsKeyframeRequested());
+ *picture->frame_hdr = current_frame_hdr_;
+
+ if (!accelerator_->SubmitFrameParameters(encode_job, current_params_, picture,
+ reference_frames_)) {
+ LOG(ERROR) << "Failed submitting frame parameters";
+ return false;
+ }
+
+ UpdateReferenceFrames(picture);
+ return true;
+}
+
+bool VP8Encoder::UpdateRates(uint32_t bitrate, uint32_t framerate) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (bitrate == 0 || framerate == 0)
+ return false;
+
+ if (current_params_.bitrate_bps == bitrate &&
+ current_params_.framerate == framerate) {
+ return true;
+ }
+
+ current_params_.bitrate_bps = bitrate;
+ current_params_.framerate = framerate;
+
+ current_params_.cpb_size_bits =
+ current_params_.bitrate_bps * current_params_.cpb_window_size_ms / 1000;
+
+ return true;
+}
+
+void VP8Encoder::InitializeFrameHeader() {
+ current_frame_hdr_ = {};
+ DCHECK(!visible_size_.IsEmpty());
+ current_frame_hdr_.width = visible_size_.width();
+ current_frame_hdr_.height = visible_size_.height();
+ current_frame_hdr_.quantization_hdr.y_ac_qi = current_params_.initial_qp;
+ current_frame_hdr_.show_frame = true;
+ // TODO(sprang): Make this dynamic. Value based on reference implementation
+ // in libyami (https://github.com/intel/libyami).
+ current_frame_hdr_.loopfilter_hdr.level = 19;
+}
+
+void VP8Encoder::UpdateFrameHeader(bool keyframe) {
+ current_frame_hdr_.frame_type =
+ keyframe ? Vp8FrameHeader::KEYFRAME : Vp8FrameHeader::INTERFRAME;
+}
+
+void VP8Encoder::UpdateReferenceFrames(scoped_refptr<VP8Picture> picture) {
+ if (current_frame_hdr_.IsKeyframe()) {
+ current_frame_hdr_.refresh_last = true;
+ current_frame_hdr_.refresh_golden_frame = true;
+ current_frame_hdr_.refresh_alternate_frame = true;
+ current_frame_hdr_.copy_buffer_to_golden =
+ Vp8FrameHeader::NO_GOLDEN_REFRESH;
+ current_frame_hdr_.copy_buffer_to_alternate =
+ Vp8FrameHeader::NO_ALT_REFRESH;
+ } else {
+ // TODO(sprang): Add temporal layer support.
+ current_frame_hdr_.refresh_last = true;
+ current_frame_hdr_.refresh_golden_frame = false;
+ current_frame_hdr_.refresh_alternate_frame = false;
+ current_frame_hdr_.copy_buffer_to_golden =
+ Vp8FrameHeader::COPY_LAST_TO_GOLDEN;
+ current_frame_hdr_.copy_buffer_to_alternate =
+ Vp8FrameHeader::COPY_GOLDEN_TO_ALT;
+ }
+
+ reference_frames_.Refresh(picture);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp8_encoder.h b/chromium/media/gpu/vaapi/vp8_encoder.h
new file mode 100644
index 00000000000..66abde9ce8d
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vp8_encoder.h
@@ -0,0 +1,109 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_VP8_ENCODER_H_
+#define MEDIA_GPU_VAAPI_VP8_ENCODER_H_
+
+#include <list>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/sequence_checker.h"
+#include "media/filters/vp8_parser.h"
+#include "media/gpu/vaapi/accelerated_video_encoder.h"
+#include "media/gpu/vp8_picture.h"
+#include "media/gpu/vp8_reference_frame_vector.h"
+
+namespace media {
+
+class VP8Encoder : public AcceleratedVideoEncoder {
+ public:
+ struct EncodeParams {
+ EncodeParams();
+
+ // Produce a keyframe at least once per this many frames.
+ size_t kf_period_frames;
+
+ // Bitrate in bps.
+ uint32_t bitrate_bps;
+
+ // Framerate in FPS.
+ uint32_t framerate;
+
+ // Bitrate window size in ms.
+ unsigned int cpb_window_size_ms;
+
+ // Coded picture buffer size in bits.
+ unsigned int cpb_size_bits;
+
+ int initial_qp;
+ int min_qp;
+ int max_qp;
+
+ bool error_resilient_mode;
+ };
+
+ // An accelerator interface. The client must provide an appropriate
+ // implementation on creation.
+ class Accelerator {
+ public:
+ Accelerator() = default;
+ virtual ~Accelerator() = default;
+
+ // Returns the VP8Picture to be used as output for |job|.
+ virtual scoped_refptr<VP8Picture> GetPicture(EncodeJob* job) = 0;
+
+ // Initializes |job| to use the provided |encode_params| as its parameters,
+ // and |pic| as the target, as well as |ref_frames| as reference frames for
+ // it. Returns true on success.
+ virtual bool SubmitFrameParameters(
+ EncodeJob* job,
+ const media::VP8Encoder::EncodeParams& encode_params,
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& ref_frames) = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(Accelerator);
+ };
+
+ explicit VP8Encoder(std::unique_ptr<Accelerator> accelerator);
+ ~VP8Encoder() override;
+
+ // AcceleratedVideoEncoder implementation.
+ bool Initialize(const gfx::Size& visible_size,
+ VideoCodecProfile profile,
+ uint32_t initial_bitrate,
+ uint32_t initial_framerate) override;
+ bool UpdateRates(uint32_t bitrate, uint32_t framerate) override;
+ gfx::Size GetCodedSize() const override;
+ size_t GetBitstreamBufferSize() const override;
+ size_t GetMaxNumOfRefFrames() const override;
+ bool PrepareEncodeJob(EncodeJob* encode_job) override;
+
+ private:
+ void InitializeFrameHeader();
+ void UpdateFrameHeader(bool keyframe);
+ void UpdateReferenceFrames(scoped_refptr<VP8Picture> picture);
+ void Reset();
+
+ gfx::Size visible_size_;
+ gfx::Size coded_size_; // Macroblock-aligned.
+
+ // Frame count since last keyframe, reset to 0 every keyframe period.
+ size_t frame_num_ = 0;
+
+ EncodeParams current_params_;
+
+ Vp8FrameHeader current_frame_hdr_;
+ Vp8ReferenceFrameVector reference_frames_;
+
+ const std::unique_ptr<Accelerator> accelerator_;
+
+ SEQUENCE_CHECKER(sequence_checker_);
+ DISALLOW_COPY_AND_ASSIGN(VP8Encoder);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_VP8_ENCODER_H_
diff --git a/chromium/media/gpu/video_accelerator_unittest_helpers.h b/chromium/media/gpu/video_accelerator_unittest_helpers.h
deleted file mode 100644
index 9e0958a0a90..00000000000
--- a/chromium/media/gpu/video_accelerator_unittest_helpers.h
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-//
-// This file contains helper classes for video accelerator unittests.
-
-#ifndef MEDIA_GPU_VIDEO_ACCELERATOR_UNITTEST_HELPERS_H_
-#define MEDIA_GPU_VIDEO_ACCELERATOR_UNITTEST_HELPERS_H_
-
-#include "base/containers/queue.h"
-#include "base/synchronization/condition_variable.h"
-#include "base/synchronization/lock.h"
-
-namespace media {
-
-// Helper class allowing one thread to wait on a notification from another.
-// If notifications come in faster than they are Wait()'d for, they are
-// accumulated (so exactly as many Wait() calls will unblock as Notify() calls
-// were made, regardless of order).
-template <typename StateEnum>
-class ClientStateNotification {
- public:
- ClientStateNotification();
- ~ClientStateNotification();
-
- // Used to notify a single waiter of a ClientState.
- void Notify(StateEnum state);
- // Used by waiters to wait for the next ClientState Notification.
- StateEnum Wait();
-
- private:
- base::Lock lock_;
- base::ConditionVariable cv_;
- base::queue<StateEnum> pending_states_for_notification_;
-};
-
-template <typename StateEnum>
-ClientStateNotification<StateEnum>::ClientStateNotification() : cv_(&lock_) {}
-
-template <typename StateEnum>
-ClientStateNotification<StateEnum>::~ClientStateNotification() {}
-
-template <typename StateEnum>
-void ClientStateNotification<StateEnum>::Notify(StateEnum state) {
- base::AutoLock auto_lock(lock_);
- pending_states_for_notification_.push(state);
- cv_.Signal();
-}
-
-template <typename StateEnum>
-StateEnum ClientStateNotification<StateEnum>::Wait() {
- base::AutoLock auto_lock(lock_);
- while (pending_states_for_notification_.empty())
- cv_.Wait();
- StateEnum ret = pending_states_for_notification_.front();
- pending_states_for_notification_.pop();
- return ret;
-}
-
-} // namespace media
-
-#endif // MEDIA_GPU_VIDEO_ACCELERATOR_UNITTEST_HELPERS_H_
diff --git a/chromium/media/gpu/video_decode_accelerator_unittest.cc b/chromium/media/gpu/video_decode_accelerator_unittest.cc
index f66165da266..871c0378c5a 100644
--- a/chromium/media/gpu/video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_decode_accelerator_unittest.cc
@@ -63,8 +63,9 @@
#include "media/gpu/fake_video_decode_accelerator.h"
#include "media/gpu/format_utils.h"
#include "media/gpu/gpu_video_decode_accelerator_factory.h"
-#include "media/gpu/rendering_helper.h"
-#include "media/gpu/video_accelerator_unittest_helpers.h"
+#include "media/gpu/test/rendering_helper.h"
+#include "media/gpu/test/video_accelerator_unittest_helpers.h"
+#include "media/gpu/test/video_decode_accelerator_unittest_helpers.h"
#include "media/video/h264_parser.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/codec/png_codec.h"
@@ -79,10 +80,7 @@
#endif // BUILDFLAG(USE_VAAPI)
#if defined(OS_CHROMEOS)
-#include "ui/gfx/native_pixmap.h"
-#include "ui/ozone/public/ozone_gpu_test_helper.h"
#include "ui/ozone/public/ozone_platform.h"
-#include "ui/ozone/public/surface_factory_ozone.h"
#endif // defined(OS_CHROMEOS)
namespace media {
@@ -124,9 +122,10 @@ bool g_use_gl_renderer = true;
// the specified number of times. In different test cases, we have different
// values for |num_play_throughs|. This setting will override the value. A
// special value "0" means no override.
-int g_num_play_throughs = 0;
+size_t g_num_play_throughs = 0;
+
// Fake decode
-int g_fake_decoder = 0;
+bool g_fake_decoder = 0;
// Test buffer import into VDA, providing buffers allocated by us, instead of
// requesting the VDA itself to allocate buffers.
@@ -141,58 +140,83 @@ base::FilePath g_test_file_path;
base::FilePath g_thumbnail_output_dir;
// Environment to store rendering thread.
-class VideoDecodeAcceleratorTestEnvironment;
-VideoDecodeAcceleratorTestEnvironment* g_env;
+media::test::VideoDecodeAcceleratorTestEnvironment* g_env;
+
+constexpr size_t kMaxResetAfterFrameNum = 100;
+constexpr size_t kMaxFramesToDelayReuse = 64;
+const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
+// Simulate WebRTC and call VDA::Decode 30 times per second.
+constexpr size_t kWebRtcDecodeCallsPerSecond = 30;
+// Simulate an adjustment to a larger number of pictures to make sure the
+// decoder supports an upwards adjustment.
+constexpr size_t kExtraPictureBuffers = 2;
+constexpr size_t kNoMidStreamReset = std::numeric_limits<size_t>::max();
+
+const gfx::Size kThumbnailsPageSize(1600, 1200);
+const gfx::Size kThumbnailSize(160, 120);
+
+// We assert a minimal number of concurrent decoders we expect to succeed.
+// Different platforms can support more concurrent decoders, so we don't assert
+// failure above this.
+constexpr size_t kMinSupportedNumConcurrentDecoders = 3;
// Magic constants for differentiating the reasons for NotifyResetDone being
// called.
enum ResetPoint {
// Reset() right after calling Flush() (before getting NotifyFlushDone()).
- RESET_BEFORE_NOTIFY_FLUSH_DONE = -5,
+ RESET_BEFORE_NOTIFY_FLUSH_DONE,
// Reset() just after calling Decode() with a fragment containing config info.
- RESET_AFTER_FIRST_CONFIG_INFO = -4,
- START_OF_STREAM_RESET = -3,
- MID_STREAM_RESET = -2,
- END_OF_STREAM_RESET = -1
+ RESET_AFTER_FIRST_CONFIG_INFO,
+ // Reset() just after finishing Initialize().
+ START_OF_STREAM_RESET,
+ // Reset() after a specific number of Decode() are executed.
+ MID_STREAM_RESET,
+ // Reset() after NotifyFlushDone().
+ END_OF_STREAM_RESET,
+ // This is the state that Reset() by RESET_AFTER_FIRST_CONFIG_INFO
+ // is executed().
+ DONE_RESET_AFTER_FIRST_CONFIG_INFO,
};
-const int kMaxResetAfterFrameNum = 100;
-const int kMaxFramesToDelayReuse = 64;
-const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
-// Simulate WebRTC and call VDA::Decode 30 times per second.
-const int kWebRtcDecodeCallsPerSecond = 30;
-// Simulate an adjustment to a larger number of pictures to make sure the
-// decoder supports an upwards adjustment.
-const int kExtraPictureBuffers = 2;
+// State of the GLRenderingVDAClient below. Order matters here as the test
+// makes assumptions about it.
+enum ClientState {
+ CS_CREATED = 0,
+ CS_DECODER_SET = 1,
+ CS_INITIALIZED = 2,
+ CS_FLUSHING = 3,
+ CS_FLUSHED = 4,
+ CS_RESETTING = 5,
+ CS_RESET = 6,
+ CS_ERROR = 7,
+ CS_DESTROYED = 8,
+ CS_MAX, // Must be last entry.
+};
struct TestVideoFile {
explicit TestVideoFile(base::FilePath::StringType file_name)
: file_name(file_name),
- width(-1),
- height(-1),
- num_frames(-1),
- num_fragments(-1),
- min_fps_render(-1),
- min_fps_no_render(-1),
+ width(0),
+ height(0),
+ num_frames(0),
+ num_fragments(0),
+ min_fps_render(0),
+ min_fps_no_render(0),
profile(VIDEO_CODEC_PROFILE_UNKNOWN),
- reset_after_frame_num(END_OF_STREAM_RESET) {}
+ reset_after_frame_num(std::numeric_limits<size_t>::max()) {}
base::FilePath::StringType file_name;
int width;
int height;
- int num_frames;
- int num_fragments;
- int min_fps_render;
- int min_fps_no_render;
+ size_t num_frames;
+ size_t num_fragments;
+ double min_fps_render;
+ double min_fps_no_render;
VideoCodecProfile profile;
- int reset_after_frame_num;
+ size_t reset_after_frame_num;
std::string data_str;
};
-const gfx::Size kThumbnailsPageSize(1600, 1200);
-const gfx::Size kThumbnailSize(160, 120);
-const int kMD5StringLength = 32;
-
base::FilePath GetTestDataFile(const base::FilePath& input_file) {
if (input_file.IsAbsolute())
return input_file;
@@ -206,186 +230,6 @@ base::FilePath GetTestDataFile(const base::FilePath& input_file) {
return abs_path;
}
-// Read in golden MD5s for the thumbnailed rendering of this video
-void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
- std::vector<std::string>* md5_strings) {
- base::FilePath filepath(video_file->file_name);
- filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
- std::string all_md5s;
- base::ReadFileToString(GetTestDataFile(filepath), &all_md5s);
- *md5_strings = base::SplitString(all_md5s, "\n", base::TRIM_WHITESPACE,
- base::SPLIT_WANT_ALL);
- // Check these are legitimate MD5s.
- for (const std::string& md5_string : *md5_strings) {
- // Ignore the empty string added by SplitString
- if (!md5_string.length())
- continue;
- // Ignore comments
- if (md5_string.at(0) == '#')
- continue;
-
- LOG_IF(ERROR, static_cast<int>(md5_string.length()) != kMD5StringLength)
- << "MD5 length error: " << md5_string;
- bool hex_only = std::count_if(md5_string.begin(), md5_string.end(),
- isxdigit) == kMD5StringLength;
- LOG_IF(ERROR, !hex_only) << "MD5 includes non-hex char: " << md5_string;
- }
- LOG_IF(ERROR, md5_strings->empty()) << " MD5 checksum file ("
- << filepath.MaybeAsASCII()
- << ") missing or empty.";
-}
-
-// State of the GLRenderingVDAClient below. Order matters here as the test
-// makes assumptions about it.
-enum ClientState {
- CS_CREATED = 0,
- CS_DECODER_SET = 1,
- CS_INITIALIZED = 2,
- CS_FLUSHING = 3,
- CS_FLUSHED = 4,
- CS_RESETTING = 5,
- CS_RESET = 6,
- CS_ERROR = 7,
- CS_DESTROYED = 8,
- CS_MAX, // Must be last entry.
-};
-
-// Initialize the GPU thread for rendering. We only need to setup once
-// for all test cases.
-class VideoDecodeAcceleratorTestEnvironment : public ::testing::Environment {
- public:
- VideoDecodeAcceleratorTestEnvironment()
- : rendering_thread_("GLRenderingVDAClientThread") {}
-
- void SetUp() override {
- base::Thread::Options options;
- options.message_loop_type = base::MessageLoop::TYPE_UI;
- rendering_thread_.StartWithOptions(options);
-
- base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- rendering_thread_.task_runner()->PostTask(
- FROM_HERE, base::Bind(&RenderingHelper::InitializeOneOff,
- g_use_gl_renderer, &done));
- done.Wait();
-
-#if defined(OS_CHROMEOS)
- gpu_helper_.reset(new ui::OzoneGpuTestHelper());
- // Need to initialize after the rendering side since the rendering side
- // initializes the "GPU" parts of Ozone.
- //
- // This also needs to be done in the test environment since this shouldn't
- // be initialized multiple times for the same Ozone platform.
- gpu_helper_->Initialize(base::ThreadTaskRunnerHandle::Get());
-#endif
- }
-
- void TearDown() override {
-#if defined(OS_CHROMEOS)
- gpu_helper_.reset();
-#endif
- rendering_thread_.Stop();
- }
-
- scoped_refptr<base::SingleThreadTaskRunner> GetRenderingTaskRunner() const {
- return rendering_thread_.task_runner();
- }
-
- private:
- base::Thread rendering_thread_;
-#if defined(OS_CHROMEOS)
- std::unique_ptr<ui::OzoneGpuTestHelper> gpu_helper_;
-#endif
-
- DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTestEnvironment);
-};
-
-// A helper class used to manage the lifetime of a Texture. Can be backed by
-// either a buffer allocated by the VDA, or by a preallocated pixmap.
-class TextureRef : public base::RefCounted<TextureRef> {
- public:
- static scoped_refptr<TextureRef> Create(
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb);
-
- static scoped_refptr<TextureRef> CreatePreallocated(
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb,
- VideoPixelFormat pixel_format,
- const gfx::Size& size);
-
- gfx::GpuMemoryBufferHandle ExportGpuMemoryBufferHandle() const;
-
- int32_t texture_id() const { return texture_id_; }
-
- private:
- friend class base::RefCounted<TextureRef>;
-
- TextureRef(uint32_t texture_id, const base::Closure& no_longer_needed_cb)
- : texture_id_(texture_id), no_longer_needed_cb_(no_longer_needed_cb) {}
-
- ~TextureRef();
-
- uint32_t texture_id_;
- base::Closure no_longer_needed_cb_;
-#if defined(OS_CHROMEOS)
- scoped_refptr<gfx::NativePixmap> pixmap_;
-#endif
-};
-
-TextureRef::~TextureRef() {
- base::ResetAndReturn(&no_longer_needed_cb_).Run();
-}
-
-// static
-scoped_refptr<TextureRef> TextureRef::Create(
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb) {
- return base::WrapRefCounted(new TextureRef(texture_id, no_longer_needed_cb));
-}
-
-// static
-scoped_refptr<TextureRef> TextureRef::CreatePreallocated(
- uint32_t texture_id,
- const base::Closure& no_longer_needed_cb,
- VideoPixelFormat pixel_format,
- const gfx::Size& size) {
- scoped_refptr<TextureRef> texture_ref;
-#if defined(OS_CHROMEOS)
- texture_ref = TextureRef::Create(texture_id, no_longer_needed_cb);
- LOG_ASSERT(texture_ref);
-
- ui::OzonePlatform* platform = ui::OzonePlatform::GetInstance();
- ui::SurfaceFactoryOzone* factory = platform->GetSurfaceFactoryOzone();
- gfx::BufferFormat buffer_format =
- VideoPixelFormatToGfxBufferFormat(pixel_format);
- texture_ref->pixmap_ =
- factory->CreateNativePixmap(gfx::kNullAcceleratedWidget, size,
- buffer_format, gfx::BufferUsage::SCANOUT);
- LOG_ASSERT(texture_ref->pixmap_);
-#endif
-
- return texture_ref;
-}
-
-gfx::GpuMemoryBufferHandle TextureRef::ExportGpuMemoryBufferHandle() const {
- gfx::GpuMemoryBufferHandle handle;
-#if defined(OS_CHROMEOS)
- CHECK(pixmap_);
- handle.type = gfx::NATIVE_PIXMAP;
- for (size_t i = 0; i < pixmap_->GetDmaBufFdCount(); i++) {
- int duped_fd = HANDLE_EINTR(dup(pixmap_->GetDmaBufFd(i)));
- LOG_ASSERT(duped_fd != -1) << "Failed duplicating dmabuf fd";
- handle.native_pixmap_handle.fds.emplace_back(
- base::FileDescriptor(duped_fd, true));
- handle.native_pixmap_handle.planes.emplace_back(
- pixmap_->GetDmaBufPitch(i), pixmap_->GetDmaBufOffset(i), i,
- pixmap_->GetDmaBufModifier(i));
- }
-#endif
- return handle;
-}
-
// Client that can accept callbacks from a VideoDecodeAccelerator and is used by
// the TESTs below.
class GLRenderingVDAClient
@@ -393,37 +237,53 @@ class GLRenderingVDAClient
public base::SupportsWeakPtr<GLRenderingVDAClient> {
public:
// |window_id| the window_id of the client, which is used to identify the
- // rendering area in the |rendering_helper|.
- // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
- // |*this|.
+ // rendering area in the |rendering_helper_|.
+ // |num_in_flight_decodes| is the number of concurrent in-flight Decode()
+ // calls per decoder.
// |num_play_throughs| indicates how many times to play through the video.
+ // |reset_point| indicates the timing of executing Reset().
// |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
- // Reset() should be done after that frame number is delivered, or
- // END_OF_STREAM_RESET to indicate no mid-stream Reset().
+ // Reset() should be done. This member argument is only meaningful and must
+ // not be less than 0 if |reset_point| == MID_STREAM_RESET.
+ // Unless |reset_point| == MID_STREAM_RESET, it must be kNoMidStreamReset.
// |delete_decoder_state| indicates when the underlying decoder should be
// Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
// calls have been made, N>=0 means interpret as ClientState.
// Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
// last play-through (governed by |num_play_throughs|).
+ // |frame_size| is the frame size of the video file.
+ // |profile| is video codec profile of the video file.
+ // |fake_decoder| indicates decoder_ would be fake_video_decode_accelerator.
// After |delay_reuse_after_frame_num| frame has been delivered, the client
// will start delaying the call to ReusePictureBuffer() for kReuseDelay.
// |decode_calls_per_second| is the number of VDA::Decode calls per second.
// If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
- GLRenderingVDAClient(size_t window_id,
+ // |render_as_thumbnails| indicates if the decoded picture will be rendered
+ // as thumbnails at the end of tests.
+ struct Config {
+ size_t window_id = 0;
+ size_t num_in_flight_decodes = 1;
+ size_t num_play_throughs = 1;
+ ResetPoint reset_point = END_OF_STREAM_RESET;
+ size_t reset_after_frame_num = kNoMidStreamReset;
+ // TODO(hiroh): Refactor as delete_decoder_state can be enum class.
+ // This can be set to not only ClientState, but also an integer in
+ // TearDownTiming test case.
+ int delete_decoder_state = CS_RESET;
+ gfx::Size frame_size;
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ bool fake_decoder = false;
+ size_t delay_reuse_after_frame_num = std::numeric_limits<size_t>::max();
+ size_t decode_calls_per_second = 0;
+ bool render_as_thumbnails = false;
+ };
+
+ // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
+ // |*this|.
+ GLRenderingVDAClient(Config config,
+ std::string encoded_data,
RenderingHelper* rendering_helper,
- ClientStateNotification<ClientState>* note,
- const std::string& encoded_data,
- int num_in_flight_decodes,
- int num_play_throughs,
- int reset_after_frame_num,
- int delete_decoder_state,
- int frame_width,
- int frame_height,
- VideoCodecProfile profile,
- int fake_decoder,
- int delay_reuse_after_frame_num,
- int decode_calls_per_second,
- bool render_as_thumbnails);
+ ClientStateNotification<ClientState>* note);
~GLRenderingVDAClient() override;
void CreateAndStartDecoder();
@@ -445,17 +305,20 @@ class GLRenderingVDAClient
void OutputFrameDeliveryTimes(base::File* output);
// Simple getters for inspecting the state of the Client.
- int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
- int num_skipped_fragments() { return num_skipped_fragments_; }
- int num_queued_fragments() { return num_queued_fragments_; }
- int num_decoded_frames() { return num_decoded_frames_; }
+ size_t num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
+ size_t num_skipped_fragments() {
+ return encoded_data_helper_->num_skipped_fragments();
+ }
+ size_t num_queued_fragments() { return num_queued_fragments_; }
+ size_t num_decoded_frames() { return num_decoded_frames_; }
double frames_per_second();
// Return the median of the decode time of all decoded frames.
base::TimeDelta decode_time_median();
bool decoder_deleted() { return !decoder_.get(); }
private:
- typedef std::map<int32_t, scoped_refptr<TextureRef>> TextureRefMap;
+ typedef std::map<int32_t, scoped_refptr<media::test::TextureRef>>
+ TextureRefMap;
void SetState(ClientState new_state);
void FinishInitialization();
@@ -466,57 +329,34 @@ class GLRenderingVDAClient
// Reset the associated decoder after flushing.
void ResetDecoderAfterFlush();
- // Compute & return the first encoded bytes (including a start frame) to send
- // to the decoder, starting at |start_pos| and returning one fragment. Skips
- // to the first decodable position.
- std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
- // Compute & return the encoded bytes of next fragment to send to the decoder
- // (based on |start_pos|).
- std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
- // Helpers for GetBytesForNextFragment above.
- void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
- std::string GetBytesForNextFrame(size_t start_pos,
- size_t* end_pos); // For VP8/9.
-
// Request decode of the next fragment in the encoded data.
void DecodeNextFragment();
- size_t window_id_;
- RenderingHelper* rendering_helper_;
+ const Config config_;
+ RenderingHelper* const rendering_helper_;
gfx::Size frame_size_;
- std::string encoded_data_;
- const int num_in_flight_decodes_;
- int outstanding_decodes_;
- size_t encoded_data_next_pos_to_decode_;
+ size_t outstanding_decodes_;
int next_bitstream_buffer_id_;
- ClientStateNotification<ClientState>* note_;
+ ClientStateNotification<ClientState>* const note_;
std::unique_ptr<VideoDecodeAccelerator> decoder_;
base::WeakPtr<VideoDecodeAccelerator> weak_vda_;
std::unique_ptr<base::WeakPtrFactory<VideoDecodeAccelerator>>
weak_vda_ptr_factory_;
std::unique_ptr<GpuVideoDecodeAcceleratorFactory> vda_factory_;
- int remaining_play_throughs_;
- int reset_after_frame_num_;
- int delete_decoder_state_;
+ size_t remaining_play_throughs_;
+ ResetPoint reset_point_;
ClientState state_;
- int num_skipped_fragments_;
- int num_queued_fragments_;
- int num_decoded_frames_;
- int num_done_bitstream_buffers_;
+ size_t num_queued_fragments_;
+ size_t num_decoded_frames_;
+ size_t num_done_bitstream_buffers_;
base::TimeTicks initialize_done_ticks_;
- VideoCodecProfile profile_;
- int fake_decoder_;
GLenum texture_target_;
VideoPixelFormat pixel_format_;
std::vector<base::TimeTicks> frame_delivery_times_;
- int delay_reuse_after_frame_num_;
// A map from bitstream buffer id to the decode start time of the buffer.
std::map<int, base::TimeTicks> decode_start_time_;
// The decode time of all decoded frames.
std::vector<base::TimeDelta> decode_time_;
- // The number of VDA::Decode calls per second. This is to simulate webrtc.
- int decode_calls_per_second_;
- bool render_as_thumbnails_;
// A map of the textures that are currently active for the decoder, i.e.,
// have been created via AssignPictureBuffers() and not dismissed via
@@ -532,6 +372,8 @@ class GLRenderingVDAClient
int32_t next_picture_buffer_id_;
+ const std::unique_ptr<media::test::EncodedDataHelper> encoded_data_helper_;
+
base::WeakPtr<GLRenderingVDAClient> weak_this_;
base::WeakPtrFactory<GLRenderingVDAClient> weak_this_factory_;
@@ -546,57 +388,43 @@ static bool DummyBindImage(uint32_t client_texture_id,
}
GLRenderingVDAClient::GLRenderingVDAClient(
- size_t window_id,
+ Config config,
+ std::string encoded_data,
RenderingHelper* rendering_helper,
- ClientStateNotification<ClientState>* note,
- const std::string& encoded_data,
- int num_in_flight_decodes,
- int num_play_throughs,
- int reset_after_frame_num,
- int delete_decoder_state,
- int frame_width,
- int frame_height,
- VideoCodecProfile profile,
- int fake_decoder,
- int delay_reuse_after_frame_num,
- int decode_calls_per_second,
- bool render_as_thumbnails)
- : window_id_(window_id),
+ ClientStateNotification<ClientState>* note)
+ : config_(std::move(config)),
rendering_helper_(rendering_helper),
- frame_size_(frame_width, frame_height),
- encoded_data_(encoded_data),
- num_in_flight_decodes_(num_in_flight_decodes),
+ frame_size_(config_.frame_size),
outstanding_decodes_(0),
- encoded_data_next_pos_to_decode_(0),
next_bitstream_buffer_id_(0),
note_(note),
- remaining_play_throughs_(num_play_throughs),
- reset_after_frame_num_(reset_after_frame_num),
- delete_decoder_state_(delete_decoder_state),
+ remaining_play_throughs_(config_.num_play_throughs),
+ reset_point_(config_.reset_point),
state_(CS_CREATED),
- num_skipped_fragments_(0),
num_queued_fragments_(0),
num_decoded_frames_(0),
num_done_bitstream_buffers_(0),
- fake_decoder_(fake_decoder),
texture_target_(0),
pixel_format_(PIXEL_FORMAT_UNKNOWN),
- delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
- decode_calls_per_second_(decode_calls_per_second),
- render_as_thumbnails_(render_as_thumbnails),
next_picture_buffer_id_(1),
+ encoded_data_helper_(std::make_unique<media::test::EncodedDataHelper>(
+ std::move(encoded_data),
+ config_.profile)),
weak_this_factory_(this) {
- LOG_ASSERT(num_in_flight_decodes > 0);
- LOG_ASSERT(num_play_throughs > 0);
+ DCHECK_NE(config.profile, VIDEO_CODEC_PROFILE_UNKNOWN);
+ LOG_ASSERT(config_.num_in_flight_decodes > 0);
+ LOG_ASSERT(config_.num_play_throughs > 0);
// |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
- if (decode_calls_per_second_ > 0)
- LOG_ASSERT(1 == num_in_flight_decodes_);
-
- // Default to H264 baseline if no profile provided.
- profile_ =
- (profile != VIDEO_CODEC_PROFILE_UNKNOWN ? profile : H264PROFILE_BASELINE);
-
+ if (config_.decode_calls_per_second > 0)
+ LOG_ASSERT(1 == config_.num_in_flight_decodes);
weak_this_ = weak_this_factory_.GetWeakPtr();
+ if (config_.reset_point == MID_STREAM_RESET) {
+ EXPECT_NE(config_.reset_after_frame_num, kNoMidStreamReset)
+ << "reset_ater_frame_num_ must not be kNoMidStreamReset "
+ << "when reset_point = MID_STREAM_RESET";
+ } else {
+ EXPECT_EQ(config_.reset_after_frame_num, kNoMidStreamReset);
+ }
}
GLRenderingVDAClient::~GLRenderingVDAClient() {
@@ -609,12 +437,12 @@ void GLRenderingVDAClient::CreateAndStartDecoder() {
LOG_ASSERT(decoder_deleted());
LOG_ASSERT(!decoder_.get());
- VideoDecodeAccelerator::Config config(profile_);
+ VideoDecodeAccelerator::Config vda_config(config_.profile);
- if (fake_decoder_) {
+ if (config_.fake_decoder) {
decoder_.reset(new FakeVideoDecodeAccelerator(
frame_size_, base::Bind([]() { return true; })));
- LOG_ASSERT(decoder_->Initialize(config, this));
+ LOG_ASSERT(decoder_->Initialize(vda_config, this));
} else {
if (!vda_factory_) {
if (g_use_gl_renderer) {
@@ -630,12 +458,13 @@ void GLRenderingVDAClient::CreateAndStartDecoder() {
}
if (g_test_import) {
- config.output_mode = VideoDecodeAccelerator::Config::OutputMode::IMPORT;
+ vda_config.output_mode =
+ VideoDecodeAccelerator::Config::OutputMode::IMPORT;
}
gpu::GpuDriverBugWorkarounds workarounds;
gpu::GpuPreferences gpu_preferences;
decoder_ =
- vda_factory_->CreateVDA(this, config, workarounds, gpu_preferences);
+ vda_factory_->CreateVDA(this, vda_config, workarounds, gpu_preferences);
}
LOG_ASSERT(decoder_) << "Failed creating a VDA";
@@ -662,13 +491,14 @@ void GLRenderingVDAClient::ProvidePictureBuffers(
LOG_ASSERT(textures_per_buffer == 1u);
std::vector<PictureBuffer> buffers;
- requested_num_of_buffers += kExtraPictureBuffers;
+ requested_num_of_buffers += static_cast<uint32_t>(kExtraPictureBuffers);
if (pixel_format == PIXEL_FORMAT_UNKNOWN)
pixel_format = PIXEL_FORMAT_ARGB;
LOG_ASSERT((pixel_format_ == PIXEL_FORMAT_UNKNOWN) ||
(pixel_format_ == pixel_format));
pixel_format_ = pixel_format;
+ frame_size_ = dimensions;
texture_target_ = texture_target;
for (uint32_t i = 0; i < requested_num_of_buffers; ++i) {
@@ -679,16 +509,17 @@ void GLRenderingVDAClient::ProvidePictureBuffers(
&done);
done.Wait();
- scoped_refptr<TextureRef> texture_ref;
+ scoped_refptr<media::test::TextureRef> texture_ref;
base::Closure delete_texture_cb =
base::Bind(&RenderingHelper::DeleteTexture,
base::Unretained(rendering_helper_), texture_id);
if (g_test_import) {
- texture_ref = TextureRef::CreatePreallocated(
+ texture_ref = media::test::TextureRef::CreatePreallocated(
texture_id, delete_texture_cb, pixel_format, dimensions);
} else {
- texture_ref = TextureRef::Create(texture_id, delete_texture_cb);
+ texture_ref =
+ media::test::TextureRef::Create(texture_id, delete_texture_cb);
}
LOG_ASSERT(texture_ref);
@@ -731,7 +562,7 @@ void GLRenderingVDAClient::PictureReady(const Picture& picture) {
gfx::Rect visible_rect = picture.visible_rect();
if (!visible_rect.IsEmpty())
- EXPECT_EQ(gfx::Rect(frame_size_), visible_rect);
+ EXPECT_TRUE(gfx::Rect(frame_size_).Contains(visible_rect));
base::TimeTicks now = base::TimeTicks::Now();
@@ -749,13 +580,12 @@ void GLRenderingVDAClient::PictureReady(const Picture& picture) {
// Mid-stream reset applies only to the last play-through per constructor
// comment.
- if (remaining_play_throughs_ == 1 &&
- reset_after_frame_num_ == num_decoded_frames_) {
- reset_after_frame_num_ = MID_STREAM_RESET;
+ if (remaining_play_throughs_ == 1 && reset_point_ == MID_STREAM_RESET &&
+ config_.reset_after_frame_num == num_decoded_frames_) {
decoder_->Reset();
// Re-start decoding from the beginning of the stream to avoid needing to
// know how to find I-frames and so on in this test.
- encoded_data_next_pos_to_decode_ = 0;
+ encoded_data_helper_->Rewind();
}
TextureRefMap::iterator texture_it =
@@ -768,11 +598,11 @@ void GLRenderingVDAClient::PictureReady(const Picture& picture) {
picture.picture_buffer_id()));
ASSERT_TRUE(pending_textures_.insert(*texture_it).second);
- if (render_as_thumbnails_) {
+ if (config_.render_as_thumbnails) {
rendering_helper_->RenderThumbnail(video_frame->texture_target(),
video_frame->texture_id());
} else {
- rendering_helper_->QueueVideoFrame(window_id_, video_frame);
+ rendering_helper_->QueueVideoFrame(config_.window_id, video_frame);
}
}
@@ -787,7 +617,7 @@ void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
return;
}
- if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
+ if (num_decoded_frames_ > config_.delay_reuse_after_frame_num) {
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE, base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
weak_vda_, picture_buffer_id),
@@ -798,8 +628,8 @@ void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
}
void GLRenderingVDAClient::ResetDecoderAfterFlush() {
+ DCHECK_GE(remaining_play_throughs_, 1u);
--remaining_play_throughs_;
- DCHECK_GE(remaining_play_throughs_, 0);
// SetState(CS_RESETTING) should be called before decoder_->Reset(), because
// VDA can call NotifyFlushDone() from Reset().
// TODO(johnylin): call SetState() before all decoder Flush() and Reset().
@@ -820,20 +650,21 @@ void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
// forward progress during a Reset(). But the VDA::Reset() API doesn't
// guarantee this, so stop relying on it (and remove the notifications from
// VaapiVideoDecodeAccelerator::FinishReset()).
+ LOG_ASSERT(outstanding_decodes_ != 0);
++num_done_bitstream_buffers_;
--outstanding_decodes_;
// Flush decoder after all BitstreamBuffers are processed.
- if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
+ if (encoded_data_helper_->ReachEndOfStream()) {
if (state_ != CS_FLUSHING) {
decoder_->Flush();
SetState(CS_FLUSHING);
- if (reset_after_frame_num_ == RESET_BEFORE_NOTIFY_FLUSH_DONE) {
+ if (reset_point_ == RESET_BEFORE_NOTIFY_FLUSH_DONE) {
SetState(CS_FLUSHED);
ResetDecoderAfterFlush();
}
}
- } else if (decode_calls_per_second_ == 0) {
+ } else if (config_.decode_calls_per_second == 0) {
DecodeNextFragment();
}
}
@@ -842,7 +673,7 @@ void GLRenderingVDAClient::NotifyFlushDone() {
if (decoder_deleted())
return;
- if (reset_after_frame_num_ == RESET_BEFORE_NOTIFY_FLUSH_DONE) {
+ if (reset_point_ == RESET_BEFORE_NOTIFY_FLUSH_DONE) {
// In ResetBeforeNotifyFlushDone case client is not necessary to wait for
// NotifyFlushDone(). But if client gets here, it should be always before
// NotifyResetDone().
@@ -858,24 +689,32 @@ void GLRenderingVDAClient::NotifyResetDone() {
if (decoder_deleted())
return;
- if (reset_after_frame_num_ == MID_STREAM_RESET) {
- reset_after_frame_num_ = END_OF_STREAM_RESET;
- DecodeNextFragment();
- return;
- } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
- reset_after_frame_num_ = END_OF_STREAM_RESET;
- for (int i = 0; i < num_in_flight_decodes_; ++i)
+ switch (reset_point_) {
+ case DONE_RESET_AFTER_FIRST_CONFIG_INFO:
+ case MID_STREAM_RESET:
+ reset_point_ = END_OF_STREAM_RESET;
DecodeNextFragment();
- return;
+ return;
+ case START_OF_STREAM_RESET:
+ reset_point_ = END_OF_STREAM_RESET;
+ for (size_t i = 0; i < config_.num_in_flight_decodes; ++i)
+ DecodeNextFragment();
+ return;
+ case END_OF_STREAM_RESET:
+ case RESET_BEFORE_NOTIFY_FLUSH_DONE:
+ break;
+ case RESET_AFTER_FIRST_CONFIG_INFO:
+ NOTREACHED();
+ break;
}
if (remaining_play_throughs_) {
- encoded_data_next_pos_to_decode_ = 0;
+ encoded_data_helper_->Rewind();
FinishInitialization();
return;
}
- rendering_helper_->Flush(window_id_);
+ rendering_helper_->Flush(config_.window_id);
if (pending_textures_.empty()) {
SetState(CS_RESET);
@@ -900,15 +739,10 @@ void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
}
}
-static bool LookingAtNAL(const std::string& encoded, size_t pos) {
- return encoded[pos] == 0 && encoded[pos + 1] == 0 && encoded[pos + 2] == 0 &&
- encoded[pos + 3] == 1;
-}
-
void GLRenderingVDAClient::SetState(ClientState new_state) {
note_->Notify(new_state);
state_ = new_state;
- if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
+ if (!remaining_play_throughs_ && new_state == config_.delete_decoder_state) {
LOG_ASSERT(!decoder_deleted());
DeleteDecoder();
}
@@ -918,15 +752,14 @@ void GLRenderingVDAClient::FinishInitialization() {
SetState(CS_INITIALIZED);
initialize_done_ticks_ = base::TimeTicks::Now();
- if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
- reset_after_frame_num_ = MID_STREAM_RESET;
+ if (reset_point_ == START_OF_STREAM_RESET) {
decoder_->Reset();
return;
}
- for (int i = 0; i < num_in_flight_decodes_; ++i)
+ for (size_t i = 0; i < config_.num_in_flight_decodes; ++i)
DecodeNextFragment();
- DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
+ DCHECK_EQ(outstanding_decodes_, config_.num_in_flight_decodes);
}
void GLRenderingVDAClient::DeleteDecoder() {
@@ -934,121 +767,37 @@ void GLRenderingVDAClient::DeleteDecoder() {
return;
weak_vda_ptr_factory_->InvalidateWeakPtrs();
decoder_.reset();
- base::STLClearObject(&encoded_data_);
+
active_textures_.clear();
// Set state to CS_DESTROYED after decoder is deleted.
SetState(CS_DESTROYED);
}
-std::string GLRenderingVDAClient::GetBytesForFirstFragment(size_t start_pos,
- size_t* end_pos) {
- if (profile_ < H264PROFILE_MAX) {
- *end_pos = start_pos;
- while (*end_pos + 4 < encoded_data_.size()) {
- if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
- return GetBytesForNextFragment(*end_pos, end_pos);
- GetBytesForNextNALU(*end_pos, end_pos);
- num_skipped_fragments_++;
- }
- *end_pos = start_pos;
- return std::string();
- }
- DCHECK_LE(profile_, VP9PROFILE_MAX);
- return GetBytesForNextFragment(start_pos, end_pos);
-}
-
-std::string GLRenderingVDAClient::GetBytesForNextFragment(size_t start_pos,
- size_t* end_pos) {
- if (profile_ < H264PROFILE_MAX) {
- *end_pos = start_pos;
- GetBytesForNextNALU(*end_pos, end_pos);
- if (start_pos != *end_pos) {
- num_queued_fragments_++;
- }
- return encoded_data_.substr(start_pos, *end_pos - start_pos);
- }
- DCHECK_LE(profile_, VP9PROFILE_MAX);
- return GetBytesForNextFrame(start_pos, end_pos);
-}
-
-void GLRenderingVDAClient::GetBytesForNextNALU(size_t start_pos,
- size_t* end_pos) {
- *end_pos = start_pos;
- if (*end_pos + 4 > encoded_data_.size())
- return;
- LOG_ASSERT(LookingAtNAL(encoded_data_, start_pos));
- *end_pos += 4;
- while (*end_pos + 4 <= encoded_data_.size() &&
- !LookingAtNAL(encoded_data_, *end_pos)) {
- ++*end_pos;
- }
- if (*end_pos + 3 >= encoded_data_.size())
- *end_pos = encoded_data_.size();
-}
-
-std::string GLRenderingVDAClient::GetBytesForNextFrame(size_t start_pos,
- size_t* end_pos) {
- // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
- std::string bytes;
- if (start_pos == 0)
- start_pos = 32; // Skip IVF header.
- *end_pos = start_pos;
- uint32_t frame_size = *reinterpret_cast<uint32_t*>(&encoded_data_[*end_pos]);
- *end_pos += 12; // Skip frame header.
- bytes.append(encoded_data_.substr(*end_pos, frame_size));
- *end_pos += frame_size;
- num_queued_fragments_++;
- return bytes;
-}
-
-static bool FragmentHasConfigInfo(const uint8_t* data,
- size_t size,
- VideoCodecProfile profile) {
- if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) {
- H264Parser parser;
- parser.SetStream(data, size);
- H264NALU nalu;
- H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
- if (result != H264Parser::kOk) {
- // Let the VDA figure out there's something wrong with the stream.
- return false;
- }
-
- return nalu.nal_unit_type == H264NALU::kSPS;
- } else if (profile >= VP8PROFILE_MIN && profile <= VP9PROFILE_MAX) {
- return (size > 0 && !(data[0] & 0x01));
- }
- // Shouldn't happen at this point.
- LOG(FATAL) << "Invalid profile: " << GetProfileName(profile);
- return false;
-}
-
void GLRenderingVDAClient::DecodeNextFragment() {
if (decoder_deleted())
return;
- if (encoded_data_next_pos_to_decode_ == encoded_data_.size())
+ if (encoded_data_helper_->ReachEndOfStream())
return;
- size_t end_pos;
std::string next_fragment_bytes;
- if (encoded_data_next_pos_to_decode_ == 0) {
- next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
- } else {
- next_fragment_bytes =
- GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
- }
+ next_fragment_bytes = encoded_data_helper_->GetBytesForNextData();
size_t next_fragment_size = next_fragment_bytes.size();
+ if (next_fragment_size == 0)
+ return;
+ num_queued_fragments_++;
// Call Reset() just after Decode() if the fragment contains config info.
// This tests how the VDA behaves when it gets a reset request before it has
// a chance to ProvidePictureBuffers().
bool reset_here = false;
- if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
- reset_here = FragmentHasConfigInfo(
+ if (reset_point_ == RESET_AFTER_FIRST_CONFIG_INFO) {
+ reset_here = media::test::EncodedDataHelper::HasConfigInfo(
reinterpret_cast<const uint8_t*>(next_fragment_bytes.data()),
- next_fragment_size, profile_);
+ next_fragment_size, config_.profile);
+ // Set to DONE_RESET_AFTER_FIRST_CONFIG_INFO, to only Reset() for the first
+ // time.
if (reset_here)
- reset_after_frame_num_ = END_OF_STREAM_RESET;
+ reset_point_ = DONE_RESET_AFTER_FIRST_CONFIG_INFO;
}
// Populate the shared memory buffer w/ the fragment, duplicate its handle,
@@ -1069,24 +818,21 @@ void GLRenderingVDAClient::DecodeNextFragment() {
decoder_->Decode(bitstream_buffer);
++outstanding_decodes_;
if (!remaining_play_throughs_ &&
- -delete_decoder_state_ == next_bitstream_buffer_id_) {
+ -config_.delete_decoder_state == next_bitstream_buffer_id_) {
DeleteDecoder();
}
if (reset_here) {
- reset_after_frame_num_ = MID_STREAM_RESET;
decoder_->Reset();
// Restart from the beginning to re-Decode() the SPS we just sent.
- encoded_data_next_pos_to_decode_ = 0;
- } else {
- encoded_data_next_pos_to_decode_ = end_pos;
+ encoded_data_helper_->Rewind();
}
- if (decode_calls_per_second_ > 0) {
+ if (config_.decode_calls_per_second > 0) {
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE,
base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
- base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
+ base::TimeDelta::FromSeconds(1) / config_.decode_calls_per_second);
}
}
@@ -1099,7 +845,7 @@ base::TimeDelta GLRenderingVDAClient::decode_time_median() {
if (decode_time_.size() == 0)
return base::TimeDelta();
std::sort(decode_time_.begin(), decode_time_.end());
- int index = decode_time_.size() / 2;
+ size_t index = decode_time_.size() / 2;
if (decode_time_.size() % 2 != 0)
return decode_time_[index];
@@ -1124,7 +870,7 @@ class VideoDecodeAcceleratorTest : public ::testing::Test {
// |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
// frames should be adjusted if decoder is reset in the middle of the stream.
void UpdateTestVideoFileParams(size_t num_concurrent_decoders,
- int reset_point,
+ ResetPoint reset_point,
TestFilesVector* test_video_files);
void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
@@ -1209,14 +955,19 @@ void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
if (!fields[2].empty())
LOG_ASSERT(base::StringToInt(fields[2], &video_file->height));
if (!fields[3].empty())
- LOG_ASSERT(base::StringToInt(fields[3], &video_file->num_frames));
+ LOG_ASSERT(base::StringToSizeT(fields[3], &video_file->num_frames));
if (!fields[4].empty())
- LOG_ASSERT(base::StringToInt(fields[4], &video_file->num_fragments));
- if (!fields[5].empty())
- LOG_ASSERT(base::StringToInt(fields[5], &video_file->min_fps_render));
- if (!fields[6].empty())
- LOG_ASSERT(base::StringToInt(fields[6], &video_file->min_fps_no_render));
- int profile = -1;
+ LOG_ASSERT(base::StringToSizeT(fields[4], &video_file->num_fragments));
+ if (!fields[5].empty()) {
+ std::string field(fields[5].begin(), fields[5].end());
+ LOG_ASSERT(base::StringToDouble(field, &video_file->min_fps_render));
+ }
+ if (!fields[6].empty()) {
+ std::string field(fields[5].begin(), fields[5].end());
+ LOG_ASSERT(base::StringToDouble(field, &video_file->min_fps_no_render));
+ }
+ // Default to H264 baseline if no profile provided.
+ int profile = static_cast<int>(H264PROFILE_BASELINE);
if (!fields[7].empty())
LOG_ASSERT(base::StringToInt(fields[7], &profile));
video_file->profile = static_cast<VideoCodecProfile>(profile);
@@ -1233,7 +984,7 @@ void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
size_t num_concurrent_decoders,
- int reset_point,
+ ResetPoint reset_point,
TestFilesVector* test_video_files) {
for (size_t i = 0; i < test_video_files->size(); i++) {
TestVideoFile* video_file = (*test_video_files)[i].get();
@@ -1246,7 +997,7 @@ void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
video_file->num_frames += video_file->reset_after_frame_num;
} else {
- video_file->reset_after_frame_num = reset_point;
+ video_file->reset_after_frame_num = kNoMidStreamReset;
}
if (video_file->min_fps_render != -1)
@@ -1316,9 +1067,13 @@ void VideoDecodeAcceleratorTest::OutputLogFile(
// - whether the video frames are rendered as thumbnails.
class VideoDecodeAcceleratorParamTest
: public VideoDecodeAcceleratorTest,
- public ::testing::WithParamInterface<
- std::tuple<int, int, int, ResetPoint, ClientState, bool, bool>> {
-};
+ public ::testing::WithParamInterface<std::tuple<size_t,
+ size_t,
+ size_t,
+ ResetPoint,
+ ClientState,
+ bool,
+ bool>> {};
// Wait for |note| to report a state and if it's not |expected_state| then
// assert |client| has deleted its decoder.
@@ -1337,18 +1092,13 @@ static void AssertWaitForStateOrDeleted(
<< ", instead of " << expected_state;
}
-// We assert a minimal number of concurrent decoders we expect to succeed.
-// Different platforms can support more concurrent decoders, so we don't assert
-// failure above this.
-enum { kMinSupportedNumConcurrentDecoders = 3 };
-
// Test the most straightforward case possible: data is decoded from a single
// chunk and rendered to the screen.
TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
size_t num_concurrent_decoders = std::get<0>(GetParam());
const size_t num_in_flight_decodes = std::get<1>(GetParam());
- int num_play_throughs = std::get<2>(GetParam());
- const int reset_point = std::get<3>(GetParam());
+ size_t num_play_throughs = std::get<2>(GetParam());
+ const ResetPoint reset_point = std::get<3>(GetParam());
const int delete_decoder_state = std::get<4>(GetParam());
bool test_reuse_delay = std::get<5>(GetParam());
const bool render_as_thumbnails = std::get<6>(GetParam());
@@ -1380,21 +1130,28 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
std::make_unique<ClientStateNotification<ClientState>>();
notes_[index] = std::move(note);
- int delay_after_frame_num = std::numeric_limits<int>::max();
+ size_t delay_reuse_after_frame_num = std::numeric_limits<size_t>::max();
if (test_reuse_delay &&
kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
- delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
+ delay_reuse_after_frame_num =
+ video_file->num_frames - kMaxFramesToDelayReuse;
}
-
- std::unique_ptr<GLRenderingVDAClient> client =
- std::make_unique<GLRenderingVDAClient>(
- index, &rendering_helper_, notes_[index].get(),
- video_file->data_str, num_in_flight_decodes, num_play_throughs,
- video_file->reset_after_frame_num, delete_decoder_state,
- video_file->width, video_file->height, video_file->profile,
- g_fake_decoder, delay_after_frame_num, 0, render_as_thumbnails);
-
- clients_[index] = std::move(client);
+ GLRenderingVDAClient::Config config;
+ config.window_id = index;
+ config.num_in_flight_decodes = num_in_flight_decodes;
+ config.num_play_throughs = num_play_throughs;
+ config.reset_point = reset_point;
+ config.reset_after_frame_num = video_file->reset_after_frame_num;
+ config.delete_decoder_state = delete_decoder_state;
+ config.frame_size = gfx::Size(video_file->width, video_file->height);
+ config.profile = video_file->profile;
+ config.fake_decoder = g_fake_decoder;
+ config.delay_reuse_after_frame_num = delay_reuse_after_frame_num;
+ config.render_as_thumbnails = render_as_thumbnails;
+
+ clients_[index] = std::make_unique<GLRenderingVDAClient>(
+ std::move(config), video_file->data_str, &rendering_helper_,
+ notes_[index].get());
}
RenderingHelperParams helper_params;
@@ -1428,7 +1185,7 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
continue;
}
- for (int n = 0; n < num_play_throughs; ++n) {
+ for (size_t n = 0; n < num_play_throughs; ++n) {
// For play-throughs other than the first, we expect initialization to
// succeed unconditionally.
if (n > 0) {
@@ -1465,12 +1222,11 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
if (video_file->num_frames > 0) {
// Expect the decoded frames may be more than the video frames as frames
// could still be returned until resetting done.
- if (video_file->reset_after_frame_num > 0)
+ if (reset_point == MID_STREAM_RESET)
EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
// In ResetBeforeNotifyFlushDone case the decoded frames may be less than
// the video frames because decoder is reset before flush done.
- else if (video_file->reset_after_frame_num !=
- RESET_BEFORE_NOTIFY_FLUSH_DONE)
+ else if (reset_point != RESET_BEFORE_NOTIFY_FLUSH_DONE)
EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
}
if (reset_point == END_OF_STREAM_RESET) {
@@ -1481,8 +1237,8 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
}
LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
if (!render_as_thumbnails) {
- int min_fps = g_rendering_fps == 0 ? video_file->min_fps_no_render
- : video_file->min_fps_render;
+ double min_fps = g_rendering_fps == 0 ? video_file->min_fps_no_render
+ : video_file->min_fps_render;
if (min_fps > 0 && !test_reuse_delay)
EXPECT_GT(client->frames_per_second(), min_fps);
}
@@ -1499,60 +1255,51 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
done.Wait();
std::vector<unsigned char> rgb;
- size_t num_pixels = rgba.size() / 4;
-
- rgb.resize(num_pixels * 3);
- // Drop the alpha channel, but check as we go that it is all 0xff.
- bool solid = true;
- unsigned char* rgb_ptr = &rgb[0];
- unsigned char* rgba_ptr = &rgba[0];
- for (size_t i = 0; i < num_pixels; i++) {
- *rgb_ptr++ = *rgba_ptr++;
- *rgb_ptr++ = *rgba_ptr++;
- *rgb_ptr++ = *rgba_ptr++;
- solid = solid && (*rgba_ptr == 0xff);
- rgba_ptr++;
- }
-
- EXPECT_EQ(solid, true) << "RGBA frame had incorrect alpha";
+ EXPECT_EQ(media::test::ConvertRGBAToRGB(rgba, &rgb), true)
+ << "RGBA frame had incorrect alpha";
- std::vector<std::string> golden_md5s;
std::string md5_string = base::MD5String(
base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
- ReadGoldenThumbnailMD5s(test_video_files_[0].get(), &golden_md5s);
- std::vector<std::string>::iterator match =
- find(golden_md5s.begin(), golden_md5s.end(), md5_string);
- if (match == golden_md5s.end()) {
- // Convert raw RGBA into PNG for export.
- std::vector<unsigned char> png;
- gfx::PNGCodec::Encode(&rgba[0], gfx::PNGCodec::FORMAT_RGBA,
- kThumbnailsPageSize,
- kThumbnailsPageSize.width() * 4, true,
- std::vector<gfx::PNGCodec::Comment>(), &png);
-
- LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
-
- base::FilePath filepath(test_video_files_[0]->file_name);
- if (!g_thumbnail_output_dir.empty() &&
- base::DirectoryExists(g_thumbnail_output_dir)) {
- // Write bad thumbnails image to where --thumbnail_output_dir assigned.
- filepath = g_thumbnail_output_dir.Append(filepath.BaseName());
- } else {
- // Fallback to write to test data directory.
- // Note: test data directory is not writable by vda_unittest while
- // running by autotest. It should assign its resultsdir as output
- // directory.
- filepath = GetTestDataFile(filepath);
- }
- filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
- filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
+ base::FilePath filepath(test_video_files_[0]->file_name);
+ auto golden_md5s = media::test::ReadGoldenThumbnailMD5s(
+ filepath.AddExtension(FILE_PATH_LITERAL(".md5")));
+ bool is_valid_thumbnail = base::ContainsValue(golden_md5s, md5_string);
+
+ // Convert raw RGBA into PNG for export.
+ std::vector<unsigned char> png;
+ gfx::PNGCodec::Encode(&rgba[0], gfx::PNGCodec::FORMAT_RGBA,
+ kThumbnailsPageSize, kThumbnailsPageSize.width() * 4,
+ true, std::vector<gfx::PNGCodec::Comment>(), &png);
+
+ if (!g_thumbnail_output_dir.empty() &&
+ base::DirectoryExists(g_thumbnail_output_dir)) {
+ // Write thumbnails image to where --thumbnail_output_dir assigned.
+ filepath = g_thumbnail_output_dir.Append(filepath.BaseName());
+ } else {
+ // Fallback to write to test data directory.
+ // Note: test data directory is not writable by vda_unittest while
+ // running by autotest. It should assign its resultsdir as output
+ // directory.
+ filepath = GetTestDataFile(filepath);
+ }
+
+ if (is_valid_thumbnail) {
+ filepath =
+ filepath.AddExtension(FILE_PATH_LITERAL(".good_thumbnails.png"));
+ LOG(INFO) << "Write good thumbnails image to: "
+ << filepath.value().c_str();
+ } else {
+ filepath =
+ filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails.png"));
LOG(INFO) << "Write bad thumbnails image to: "
<< filepath.value().c_str();
- int num_bytes = base::WriteFile(
- filepath, reinterpret_cast<char*>(&png[0]), png.size());
- EXPECT_EQ(num_bytes, static_cast<int>(png.size()));
}
- EXPECT_NE(match, golden_md5s.end());
+ int num_bytes =
+ base::WriteFile(filepath, reinterpret_cast<char*>(&png[0]), png.size());
+ LOG_ASSERT(num_bytes != -1);
+ EXPECT_EQ(static_cast<size_t>(num_bytes), png.size());
+ EXPECT_EQ(is_valid_thumbnail, true)
+ << "Unknown thumbnails MD5: " << md5_string;
}
// Output the frame delivery time to file
@@ -1703,22 +1450,20 @@ INSTANTIATE_TEST_CASE_P(
INSTANTIATE_TEST_CASE_P(
ResourceExhaustion,
VideoDecodeAcceleratorParamTest,
- ::testing::Values(
- // +0 hack below to promote enum to int.
- std::make_tuple(kMinSupportedNumConcurrentDecoders + 0,
- 1,
- 1,
- END_OF_STREAM_RESET,
- CS_RESET,
- false,
- false),
- std::make_tuple(kMinSupportedNumConcurrentDecoders + 1,
- 1,
- 1,
- END_OF_STREAM_RESET,
- CS_RESET,
- false,
- false)));
+ ::testing::Values(std::make_tuple(kMinSupportedNumConcurrentDecoders,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ CS_RESET,
+ false,
+ false),
+ std::make_tuple(kMinSupportedNumConcurrentDecoders + 1,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ CS_RESET,
+ false,
+ false)));
// Allow MAYBE macro substitution.
#define WRAPPED_INSTANTIATE_TEST_CASE_P(a, b, c) \
@@ -1741,12 +1486,18 @@ WRAPPED_INSTANTIATE_TEST_CASE_P(
// second.
TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
notes_.push_back(std::make_unique<ClientStateNotification<ClientState>>());
+
+ const TestVideoFile* video_file = test_video_files_[0].get();
+ GLRenderingVDAClient::Config config;
+ EXPECT_EQ(video_file->reset_after_frame_num, kNoMidStreamReset);
+ config.frame_size = gfx::Size(video_file->width, video_file->height);
+ config.profile = video_file->profile;
+ config.fake_decoder = g_fake_decoder;
+ config.decode_calls_per_second = kWebRtcDecodeCallsPerSecond;
+
clients_.push_back(std::make_unique<GLRenderingVDAClient>(
- 0, &rendering_helper_, notes_[0].get(), test_video_files_[0]->data_str, 1,
- 1, test_video_files_[0]->reset_after_frame_num, CS_RESET,
- test_video_files_[0]->width, test_video_files_[0]->height,
- test_video_files_[0]->profile, g_fake_decoder,
- std::numeric_limits<int>::max(), kWebRtcDecodeCallsPerSecond, false));
+ std::move(config), video_file->data_str, &rendering_helper_,
+ notes_[0].get()));
RenderingHelperParams helper_params;
helper_params.num_windows = 1;
InitializeRenderingHelper(helper_params);
@@ -1769,12 +1520,17 @@ TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
// corrupted videos.
TEST_F(VideoDecodeAcceleratorTest, NoCrash) {
notes_.push_back(std::make_unique<ClientStateNotification<ClientState>>());
+
+ const TestVideoFile* video_file = test_video_files_[0].get();
+ GLRenderingVDAClient::Config config;
+ EXPECT_EQ(video_file->reset_after_frame_num, kNoMidStreamReset);
+ config.frame_size = gfx::Size(video_file->width, video_file->height);
+ config.profile = video_file->profile;
+ config.fake_decoder = g_fake_decoder;
+
clients_.push_back(std::make_unique<GLRenderingVDAClient>(
- 0, &rendering_helper_, notes_[0].get(), test_video_files_[0]->data_str, 1,
- 1, test_video_files_[0]->reset_after_frame_num, CS_RESET,
- test_video_files_[0]->width, test_video_files_[0]->height,
- test_video_files_[0]->profile, g_fake_decoder,
- std::numeric_limits<int>::max(), 0, false));
+ std::move(config), video_file->data_str, &rendering_helper_,
+ notes_[0].get()));
RenderingHelperParams helper_params;
helper_params.num_windows = 1;
InitializeRenderingHelper(helper_params);
@@ -1805,9 +1561,10 @@ class VDATestSuite : public base::TestSuite {
#endif // OS_WIN || OS_CHROMEOS
media::g_env =
- reinterpret_cast<media::VideoDecodeAcceleratorTestEnvironment*>(
+ reinterpret_cast<media::test::VideoDecodeAcceleratorTestEnvironment*>(
testing::AddGlobalTestEnvironment(
- new media::VideoDecodeAcceleratorTestEnvironment()));
+ new media::test::VideoDecodeAcceleratorTestEnvironment(
+ g_use_gl_renderer)));
#if defined(OS_CHROMEOS)
ui::OzonePlatform::InitParams params;
@@ -1864,11 +1621,11 @@ int main(int argc, char** argv) {
if (it->first == "num_play_throughs") {
std::string input(it->second.begin(), it->second.end());
- LOG_ASSERT(base::StringToInt(input, &media::g_num_play_throughs));
+ LOG_ASSERT(base::StringToSizeT(input, &media::g_num_play_throughs));
continue;
}
if (it->first == "fake_decoder") {
- media::g_fake_decoder = 1;
+ media::g_fake_decoder = true;
continue;
}
if (it->first == "v" || it->first == "vmodule")
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
index c3c393e67f9..d76b06d482f 100644
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_encode_accelerator_unittest.cc
@@ -13,6 +13,7 @@
#include "base/at_exit.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/bits.h"
#include "base/cancelable_callback.h"
#include "base/command_line.h"
@@ -22,7 +23,6 @@
#include "base/memory/aligned_memory.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
-#include "base/message_loop/message_loop.h"
#include "base/numerics/safe_conversions.h"
#include "base/process/process_handle.h"
#include "base/single_thread_task_runner.h"
@@ -30,6 +30,9 @@
#include "base/strings/string_split.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
+#include "base/test/launcher/unit_test_launcher.h"
+#include "base/test/scoped_task_environment.h"
+#include "base/test/test_suite.h"
#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
@@ -50,7 +53,7 @@
#include "media/gpu/gpu_video_encode_accelerator_factory.h"
#include "media/gpu/h264_decoder.h"
#include "media/gpu/h264_dpb.h"
-#include "media/gpu/video_accelerator_unittest_helpers.h"
+#include "media/gpu/test/video_accelerator_unittest_helpers.h"
#include "media/video/fake_video_encode_accelerator.h"
#include "media/video/h264_parser.h"
#include "media/video/video_encode_accelerator.h"
@@ -133,18 +136,38 @@ const unsigned int kFlushTimeoutMs = 2000;
// - |requested_subsequent_framerate| framerate to switch to in the middle
// of the stream.
// Bitrate is only forced for tests that test bitrate.
-const char* g_default_in_filename = "bear_320x192_40frames.yuv";
#if defined(OS_CHROMEOS) || defined(OS_LINUX)
+const char* g_default_in_filename = "bear_320x192_40frames.yuv";
const base::FilePath::CharType* g_default_in_parameters =
FILE_PATH_LITERAL(":320:192:1:out.h264:200000");
-#elif defined(OS_MACOSX) || defined(OS_WIN)
+#elif defined(OS_MACOSX)
+// VideoToolbox falls back to SW encoder with resolutions lower than this.
+const char* g_default_in_filename = "bear_640x384_40frames.yuv";
+const base::FilePath::CharType* g_default_in_parameters =
+ FILE_PATH_LITERAL(":640:384:1:out.h264:200000");
+#elif defined(OS_WIN)
+const char* g_default_in_filename = "bear_320x192_40frames.yuv";
const base::FilePath::CharType* g_default_in_parameters =
FILE_PATH_LITERAL(",320,192,0,out.h264,200000");
-#endif // defined(OS_CHROMEOS)
+#endif // defined(OS_CHROMEOS) || defined(OS_LINUX)
+
+// Default params that can be overriden via command line.
+std::unique_ptr<base::FilePath::StringType> g_test_stream_data(
+ new base::FilePath::StringType(
+ media::GetTestDataFilePath(media::g_default_in_filename).value() +
+ media::g_default_in_parameters));
+
+base::FilePath g_log_path;
+
+base::FilePath g_frame_stats_path;
+
+bool g_run_at_fps = false;
+
+bool g_needs_encode_latency = false;
+
+bool g_verify_all_output = false;
-// Enabled by including a --fake_encoder flag to the command line invoking the
-// test.
bool g_fake_encoder = false;
// Skip checking the flush functionality. Currently only Chrome OS devices
@@ -807,7 +830,7 @@ void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size,
base::Unretained(this)),
base::BindRepeating(&VideoFrameQualityValidator::VerifyOutputFrame,
base::Unretained(this)),
- VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
}
void VideoFrameQualityValidator::InitializeCB(bool success) {
@@ -2524,20 +2547,37 @@ INSTANTIATE_TEST_CASE_P(
// - multiple encoders + decoders
// - mid-stream encoder_->Destroy()
+class VEATestSuite : public base::TestSuite {
+ public:
+ VEATestSuite(int argc, char** argv) : base::TestSuite(argc, argv) {}
+
+ int Run() {
+ base::test::ScopedTaskEnvironment scoped_task_environment;
+ media::g_env =
+ reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>(
+ testing::AddGlobalTestEnvironment(
+ new media::VideoEncodeAcceleratorTestEnvironment(
+ std::move(media::g_test_stream_data), media::g_log_path,
+ media::g_frame_stats_path, media::g_run_at_fps,
+ media::g_needs_encode_latency,
+ media::g_verify_all_output)));
+
+#if BUILDFLAG(USE_VAAPI)
+ media::VaapiWrapper::PreSandboxInitialization();
+#elif defined(OS_WIN)
+ media::MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization();
+#endif
+ return base::TestSuite::Run();
+ }
+};
+
} // namespace
} // namespace media
int main(int argc, char** argv) {
- testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
- base::CommandLine::Init(argc, argv);
+ media::VEATestSuite test_suite(argc, argv);
base::ShadowingAtExitManager at_exit_manager;
- base::MessageLoop main_loop;
-
- std::unique_ptr<base::FilePath::StringType> test_stream_data(
- new base::FilePath::StringType(
- media::GetTestDataFilePath(media::g_default_in_filename).value()));
- test_stream_data->append(media::g_default_in_parameters);
// Needed to enable DVLOG through --vmodule.
logging::LoggingSettings settings;
@@ -2547,28 +2587,23 @@ int main(int argc, char** argv) {
const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
DCHECK(cmd_line);
- bool run_at_fps = false;
#if defined(OS_CHROMEOS)
// Currently only ARC++ uses flush function, we only verify it on Chrome OS.
media::g_disable_flush = false;
#else
media::g_disable_flush = true;
#endif
- bool needs_encode_latency = false;
- bool verify_all_output = false;
- base::FilePath log_path;
- base::FilePath frame_stats_path;
base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
if (it->first == "test_stream_data") {
- test_stream_data->assign(it->second.c_str());
+ media::g_test_stream_data->assign(it->second.c_str());
continue;
}
// Output machine-readable logs with fixed formats to a file.
if (it->first == "output_log") {
- log_path = base::FilePath(
+ media::g_log_path = base::FilePath(
base::FilePath::StringType(it->second.begin(), it->second.end()));
continue;
}
@@ -2578,7 +2613,7 @@ int main(int argc, char** argv) {
continue;
}
if (it->first == "measure_latency") {
- needs_encode_latency = true;
+ media::g_needs_encode_latency = true;
continue;
}
if (it->first == "fake_encoder") {
@@ -2586,7 +2621,7 @@ int main(int argc, char** argv) {
continue;
}
if (it->first == "run_at_fps") {
- run_at_fps = true;
+ media::g_run_at_fps = true;
continue;
}
if (it->first == "disable_flush") {
@@ -2594,7 +2629,7 @@ int main(int argc, char** argv) {
continue;
}
if (it->first == "verify_all_output") {
- verify_all_output = true;
+ media::g_verify_all_output = true;
continue;
}
if (it->first == "v" || it->first == "vmodule")
@@ -2604,32 +2639,20 @@ int main(int argc, char** argv) {
// Output per-frame metrics to a csv file.
if (it->first == "frame_stats") {
- frame_stats_path = base::FilePath(
+ media::g_frame_stats_path = base::FilePath(
base::FilePath::StringType(it->second.begin(), it->second.end()));
continue;
}
- LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
}
- if (needs_encode_latency && !run_at_fps) {
+ if (media::g_needs_encode_latency && !media::g_run_at_fps) {
// Encode latency can only be measured with --run_at_fps. Otherwise, we get
// skewed results since it may queue too many frames at once with the same
// encode start time.
LOG(FATAL) << "--measure_latency requires --run_at_fps enabled to work.";
}
-#if BUILDFLAG(USE_VAAPI)
- media::VaapiWrapper::PreSandboxInitialization();
-#elif defined(OS_WIN)
- media::MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization();
-#endif
-
- media::g_env =
- reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>(
- testing::AddGlobalTestEnvironment(
- new media::VideoEncodeAcceleratorTestEnvironment(
- std::move(test_stream_data), log_path, frame_stats_path,
- run_at_fps, needs_encode_latency, verify_all_output)));
-
- return RUN_ALL_TESTS();
+ return base::LaunchUnitTestsSerially(
+ argc, argv,
+ base::BindOnce(&media::VEATestSuite::Run, base::Unretained(&test_suite)));
}
diff --git a/chromium/media/gpu/vp8_decoder.cc b/chromium/media/gpu/vp8_decoder.cc
index 1ae037e8a7b..73800ce1ecc 100644
--- a/chromium/media/gpu/vp8_decoder.cc
+++ b/chromium/media/gpu/vp8_decoder.cc
@@ -27,9 +27,17 @@ bool VP8Decoder::Flush() {
return true;
}
-void VP8Decoder::SetStream(int32_t id, const uint8_t* ptr, size_t size) {
+void VP8Decoder::SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config) {
DCHECK(ptr);
DCHECK(size);
+ if (decrypt_config) {
+ NOTIMPLEMENTED();
+ state_ = kError;
+ return;
+ }
DVLOG(4) << "New input stream id: " << id << " at: " << (void*)ptr
<< " size: " << size;
@@ -39,14 +47,11 @@ void VP8Decoder::SetStream(int32_t id, const uint8_t* ptr, size_t size) {
}
void VP8Decoder::Reset() {
- curr_pic_ = nullptr;
curr_frame_hdr_ = nullptr;
curr_frame_start_ = nullptr;
frame_size_ = 0;
- last_frame_ = nullptr;
- golden_frame_ = nullptr;
- alt_frame_ = nullptr;
+ ref_frames_.Clear();
if (state_ == kDecoding)
state_ = kAfterReset;
@@ -75,10 +80,7 @@ VP8Decoder::DecodeResult VP8Decoder::Decode() {
DVLOG(2) << "New resolution: " << new_pic_size.ToString();
pic_size_ = new_pic_size;
- DCHECK(!curr_pic_);
- last_frame_ = nullptr;
- golden_frame_ = nullptr;
- alt_frame_ = nullptr;
+ ref_frames_.Clear();
return kAllocateNewSurfaces;
}
@@ -87,77 +89,31 @@ VP8Decoder::DecodeResult VP8Decoder::Decode() {
} else {
if (state_ != kDecoding) {
// Need a resume point.
- curr_frame_hdr_.reset();
+ curr_frame_hdr_ = nullptr;
return kRanOutOfStreamData;
}
}
- curr_pic_ = accelerator_->CreateVP8Picture();
- if (!curr_pic_)
+ scoped_refptr<VP8Picture> pic = accelerator_->CreateVP8Picture();
+ if (!pic)
return kRanOutOfSurfaces;
- curr_pic_->set_visible_rect(gfx::Rect(pic_size_));
- curr_pic_->set_bitstream_id(stream_id_);
-
- if (!DecodeAndOutputCurrentFrame())
+ if (!DecodeAndOutputCurrentFrame(std::move(pic))) {
+ state_ = kError;
return kDecodeError;
-
- return kRanOutOfStreamData;
-}
-
-void VP8Decoder::RefreshReferenceFrames() {
- if (curr_frame_hdr_->IsKeyframe()) {
- last_frame_ = curr_pic_;
- golden_frame_ = curr_pic_;
- alt_frame_ = curr_pic_;
- return;
}
- // Save current golden since we overwrite it here,
- // but may have to use it to update alt below.
- scoped_refptr<VP8Picture> curr_golden = golden_frame_;
-
- if (curr_frame_hdr_->refresh_golden_frame) {
- golden_frame_ = curr_pic_;
- } else {
- switch (curr_frame_hdr_->copy_buffer_to_golden) {
- case Vp8FrameHeader::COPY_LAST_TO_GOLDEN:
- DCHECK(last_frame_);
- golden_frame_ = last_frame_;
- break;
-
- case Vp8FrameHeader::COPY_ALT_TO_GOLDEN:
- DCHECK(alt_frame_);
- golden_frame_ = alt_frame_;
- break;
- }
- }
-
- if (curr_frame_hdr_->refresh_alternate_frame) {
- alt_frame_ = curr_pic_;
- } else {
- switch (curr_frame_hdr_->copy_buffer_to_alternate) {
- case Vp8FrameHeader::COPY_LAST_TO_ALT:
- DCHECK(last_frame_);
- alt_frame_ = last_frame_;
- break;
-
- case Vp8FrameHeader::COPY_GOLDEN_TO_ALT:
- DCHECK(curr_golden);
- alt_frame_ = curr_golden;
- break;
- }
- }
-
- if (curr_frame_hdr_->refresh_last)
- last_frame_ = curr_pic_;
+ return kRanOutOfStreamData;
}
-bool VP8Decoder::DecodeAndOutputCurrentFrame() {
+bool VP8Decoder::DecodeAndOutputCurrentFrame(scoped_refptr<VP8Picture> pic) {
+ DCHECK(pic);
DCHECK(!pic_size_.IsEmpty());
- DCHECK(curr_pic_);
DCHECK(curr_frame_hdr_);
+ pic->set_visible_rect(gfx::Rect(pic_size_));
+ pic->set_bitstream_id(stream_id_);
+
if (curr_frame_hdr_->IsKeyframe()) {
horizontal_scale_ = curr_frame_hdr_->horizontal_scale;
vertical_scale_ = curr_frame_hdr_->vertical_scale;
@@ -169,18 +125,17 @@ bool VP8Decoder::DecodeAndOutputCurrentFrame() {
curr_frame_hdr_->vertical_scale = vertical_scale_;
}
- if (!accelerator_->SubmitDecode(curr_pic_, curr_frame_hdr_.get(), last_frame_,
- golden_frame_, alt_frame_))
+ const bool show_frame = curr_frame_hdr_->show_frame;
+ pic->frame_hdr = std::move(curr_frame_hdr_);
+
+ if (!accelerator_->SubmitDecode(pic, ref_frames_))
return false;
- if (curr_frame_hdr_->show_frame)
- if (!accelerator_->OutputPicture(curr_pic_))
- return false;
+ if (show_frame && !accelerator_->OutputPicture(pic))
+ return false;
- RefreshReferenceFrames();
+ ref_frames_.Refresh(pic);
- curr_pic_ = nullptr;
- curr_frame_hdr_ = nullptr;
curr_frame_start_ = nullptr;
frame_size_ = 0;
return true;
diff --git a/chromium/media/gpu/vp8_decoder.h b/chromium/media/gpu/vp8_decoder.h
index 779722d9f02..fd3de24040e 100644
--- a/chromium/media/gpu/vp8_decoder.h
+++ b/chromium/media/gpu/vp8_decoder.h
@@ -15,6 +15,7 @@
#include "media/filters/vp8_parser.h"
#include "media/gpu/accelerated_video_decoder.h"
#include "media/gpu/vp8_picture.h"
+#include "media/gpu/vp8_reference_frame_vector.h"
namespace media {
@@ -40,16 +41,11 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
// this situation as normal and return from Decode() with kRanOutOfSurfaces.
virtual scoped_refptr<VP8Picture> CreateVP8Picture() = 0;
- // Submit decode for |pic|, taking as arguments |frame_hdr| with parsed
- // VP8 frame header information for current frame, and using |last_frame|,
- // |golden_frame| and |alt_frame| as references, as per VP8 specification.
- // Note that this runs the decode in hardware.
- // Return true if successful.
- virtual bool SubmitDecode(const scoped_refptr<VP8Picture>& pic,
- const Vp8FrameHeader* frame_hdr,
- const scoped_refptr<VP8Picture>& last_frame,
- const scoped_refptr<VP8Picture>& golden_frame,
- const scoped_refptr<VP8Picture>& alt_frame) = 0;
+ // Submits decode for |pic|, using |reference_frames| as references, as per
+ // VP8 specification. Returns true if successful.
+ virtual bool SubmitDecode(
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) = 0;
// Schedule output (display) of |pic|. Note that returning from this
// method does not mean that |pic| has already been outputted (displayed),
@@ -67,7 +63,10 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
~VP8Decoder() override;
// AcceleratedVideoDecoder implementation.
- void SetStream(int32_t id, const uint8_t* ptr, size_t size) override;
+ void SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config = nullptr) override;
bool Flush() override WARN_UNUSED_RESULT;
void Reset() override;
DecodeResult Decode() override WARN_UNUSED_RESULT;
@@ -75,8 +74,7 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
size_t GetRequiredNumOfPictures() const override;
private:
- bool DecodeAndOutputCurrentFrame();
- void RefreshReferenceFrames();
+ bool DecodeAndOutputCurrentFrame(scoped_refptr<VP8Picture> pic);
enum State {
kNeedStreamMetadata, // After initialization, need a keyframe.
@@ -90,10 +88,7 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
Vp8Parser parser_;
std::unique_ptr<Vp8FrameHeader> curr_frame_hdr_;
- scoped_refptr<VP8Picture> curr_pic_;
- scoped_refptr<VP8Picture> last_frame_;
- scoped_refptr<VP8Picture> golden_frame_;
- scoped_refptr<VP8Picture> alt_frame_;
+ Vp8ReferenceFrameVector ref_frames_;
// Current stream buffer id; to be assigned to pictures decoded from it.
int32_t stream_id_ = -1;
diff --git a/chromium/media/gpu/vp8_picture.cc b/chromium/media/gpu/vp8_picture.cc
index 26b66f7e278..366ad8354dd 100644
--- a/chromium/media/gpu/vp8_picture.cc
+++ b/chromium/media/gpu/vp8_picture.cc
@@ -6,7 +6,7 @@
namespace media {
-VP8Picture::VP8Picture() = default;
+VP8Picture::VP8Picture() : frame_hdr(new Vp8FrameHeader()) {}
VP8Picture::~VP8Picture() = default;
diff --git a/chromium/media/gpu/vp8_reference_frame_vector.cc b/chromium/media/gpu/vp8_reference_frame_vector.cc
new file mode 100644
index 00000000000..1012a1350c1
--- /dev/null
+++ b/chromium/media/gpu/vp8_reference_frame_vector.cc
@@ -0,0 +1,102 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vp8_reference_frame_vector.h"
+
+#include "media/gpu/vp8_picture.h"
+
+namespace media {
+
+Vp8ReferenceFrameVector::Vp8ReferenceFrameVector() {
+ // TODO(posciak): Remove this once VP8Decoder is created on the same thread
+ // as its methods are called on.
+ DETACH_FROM_SEQUENCE(sequence_checker_);
+}
+
+Vp8ReferenceFrameVector::~Vp8ReferenceFrameVector() {
+ // TODO(posciak): Add this once VP8Decoder is created on the same thread
+ // as its methods are called on.
+ // DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+}
+
+// Based on update_reference_frames() in libvpx: vp8/encoder/onyx_if.c
+void Vp8ReferenceFrameVector::Refresh(scoped_refptr<VP8Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(pic);
+
+ bool keyframe = pic->frame_hdr->IsKeyframe();
+ const auto& frame_hdr = pic->frame_hdr;
+
+ if (keyframe) {
+ reference_frames_[Vp8RefType::VP8_FRAME_LAST] = pic;
+ reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN] = pic;
+ reference_frames_[Vp8RefType::VP8_FRAME_ALTREF] = pic;
+ return;
+ }
+
+ if (frame_hdr->refresh_alternate_frame) {
+ reference_frames_[Vp8RefType::VP8_FRAME_ALTREF] = pic;
+ } else {
+ switch (frame_hdr->copy_buffer_to_alternate) {
+ case Vp8FrameHeader::COPY_LAST_TO_ALT:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_LAST]);
+ reference_frames_[Vp8RefType::VP8_FRAME_ALTREF] =
+ reference_frames_[Vp8RefType::VP8_FRAME_LAST];
+ break;
+
+ case Vp8FrameHeader::COPY_GOLDEN_TO_ALT:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN]);
+ reference_frames_[Vp8RefType::VP8_FRAME_ALTREF] =
+ reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN];
+ break;
+
+ case Vp8FrameHeader::NO_ALT_REFRESH:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_ALTREF]);
+ break;
+ }
+ }
+
+ if (frame_hdr->refresh_golden_frame) {
+ reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN] = pic;
+ } else {
+ switch (frame_hdr->copy_buffer_to_golden) {
+ case Vp8FrameHeader::COPY_LAST_TO_GOLDEN:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_LAST]);
+ reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN] =
+ reference_frames_[Vp8RefType::VP8_FRAME_LAST];
+ break;
+
+ case Vp8FrameHeader::COPY_ALT_TO_GOLDEN:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_ALTREF]);
+ reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN] =
+ reference_frames_[Vp8RefType::VP8_FRAME_ALTREF];
+ break;
+
+ case Vp8FrameHeader::NO_GOLDEN_REFRESH:
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_GOLDEN]);
+ break;
+ }
+ }
+
+ if (frame_hdr->refresh_last)
+ reference_frames_[Vp8RefType::VP8_FRAME_LAST] = pic;
+ else
+ DCHECK(reference_frames_[Vp8RefType::VP8_FRAME_LAST]);
+}
+
+void Vp8ReferenceFrameVector::Clear() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ for (auto& f : reference_frames_)
+ f = nullptr;
+}
+
+scoped_refptr<VP8Picture> Vp8ReferenceFrameVector::GetFrame(
+ Vp8RefType type) const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ return reference_frames_[type];
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vp8_reference_frame_vector.h b/chromium/media/gpu/vp8_reference_frame_vector.h
new file mode 100644
index 00000000000..63c71595693
--- /dev/null
+++ b/chromium/media/gpu/vp8_reference_frame_vector.h
@@ -0,0 +1,38 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VP8_REFERENCE_FRAME_VECTOR_H_
+#define MEDIA_GPU_VP8_REFERENCE_FRAME_VECTOR_H_
+
+#include <array>
+
+#include "base/memory/ref_counted.h"
+#include "base/sequence_checker.h"
+#include "media/filters/vp8_parser.h"
+
+namespace media {
+
+class VP8Picture;
+
+class Vp8ReferenceFrameVector {
+ public:
+ Vp8ReferenceFrameVector();
+ ~Vp8ReferenceFrameVector();
+
+ void Refresh(scoped_refptr<VP8Picture> pic);
+ void Clear();
+
+ scoped_refptr<VP8Picture> GetFrame(Vp8RefType type) const;
+
+ private:
+ std::array<scoped_refptr<VP8Picture>, kNumVp8ReferenceBuffers>
+ reference_frames_;
+
+ SEQUENCE_CHECKER(sequence_checker_);
+ DISALLOW_COPY_AND_ASSIGN(Vp8ReferenceFrameVector);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VP8_REFERENCE_FRAME_VECTOR_H_
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index d5053b843c7..83a6041cbeb 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -26,10 +26,17 @@ VP9Decoder::VP9Decoder(std::unique_ptr<VP9Accelerator> accelerator)
VP9Decoder::~VP9Decoder() = default;
-void VP9Decoder::SetStream(int32_t id, const uint8_t* ptr, size_t size) {
+void VP9Decoder::SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config) {
DCHECK(ptr);
DCHECK(size);
-
+ if (decrypt_config) {
+ NOTIMPLEMENTED();
+ state_ = kError;
+ return;
+ }
DVLOG(4) << "New input stream id: " << id << " at: " << (void*)ptr
<< " size: " << size;
stream_id_ = id;
diff --git a/chromium/media/gpu/vp9_decoder.h b/chromium/media/gpu/vp9_decoder.h
index 672c816c3b1..5190435a347 100644
--- a/chromium/media/gpu/vp9_decoder.h
+++ b/chromium/media/gpu/vp9_decoder.h
@@ -96,7 +96,10 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
~VP9Decoder() override;
// AcceleratedVideoDecoder implementation.
- void SetStream(int32_t id, const uint8_t* ptr, size_t size) override;
+ void SetStream(int32_t id,
+ const uint8_t* ptr,
+ size_t size,
+ const DecryptConfig* decrypt_config = nullptr) override;
bool Flush() override WARN_UNUSED_RESULT;
void Reset() override;
DecodeResult Decode() override WARN_UNUSED_RESULT;
diff --git a/chromium/media/gpu/vt_video_decode_accelerator_mac.cc b/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
index c6a58f6e08a..db101834837 100644
--- a/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
+++ b/chromium/media/gpu/vt_video_decode_accelerator_mac.cc
@@ -10,6 +10,7 @@
#include <stddef.h>
#include <algorithm>
+#include <iterator>
#include <memory>
#include "base/atomic_sequence_num.h"
@@ -18,11 +19,14 @@
#include "base/mac/mac_logging.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
+#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "base/sys_byteorder.h"
#include "base/sys_info.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "base/trace_event/memory_allocator_dump.h"
#include "base/trace_event/memory_dump_manager.h"
+#include "base/trace_event/process_memory_dump.h"
#include "base/version.h"
#include "media/base/limits.h"
#include "media/gpu/shared_memory_region.h"
@@ -67,11 +71,24 @@ const VideoCodecProfile kSupportedProfiles[] = {
// Size to use for NALU length headers in AVC format (can be 1, 2, or 4).
const int kNALUHeaderLength = 4;
-// We request 5 picture buffers from the client, each of which has a texture ID
-// that we can bind decoded frames to. We need enough to satisfy preroll, and
-// enough to avoid unnecessary stalling, but no more than that. The resource
-// requirements are low, as we don't need the textures to be backed by storage.
-const int kNumPictureBuffers = limits::kMaxVideoFrames + 1;
+// We request 16 picture buffers from the client, each of which has a texture ID
+// that we can bind decoded frames to. The resource requirements are low, as we
+// don't need the textures to be backed by storage.
+//
+// The lower limit is |limits::kMaxVideoFrames + 1|, enough to have one
+// composited frame plus |limits::kMaxVideoFrames| frames to satisfy preroll.
+//
+// However, there can be pathological behavior where VideoRendererImpl will
+// continue to call Decode() as long as it is willing to queue more output
+// frames, which is variable but starts at |limits::kMaxVideoFrames +
+// GetMaxDecodeRequests()|. If we don't have enough picture buffers, it will
+// continue to call Decode() until we stop calling NotifyEndOfBistreamBuffer(),
+// which for VTVDA is when the reorder queue is full. In testing this results in
+// ~20 extra frames held by VTVDA.
+//
+// Allocating more picture buffers than VideoRendererImpl is willing to queue
+// counterintuitively reduces memory usage in this case.
+const int kNumPictureBuffers = limits::kMaxVideoFrames * 4;
// Maximum number of frames to queue for reordering. (Also controls the maximum
// number of in-flight frames, since NotifyEndOfBitstreamBuffer() is called when
@@ -196,7 +213,7 @@ bool InitializeVideoToolboxInternal() {
const uint8_t pps_normal[] = {0x68, 0xe9, 0x7b, 0xcb};
if (!CreateVideoToolboxSession(sps_normal, arraysize(sps_normal), pps_normal,
arraysize(pps_normal), true)) {
- DLOG(WARNING) << "Failed to create hardware VideoToolbox session";
+ DLOG(WARNING) << "Hardware decoding with VideoToolbox is not supported";
return false;
}
@@ -208,7 +225,7 @@ bool InitializeVideoToolboxInternal() {
const uint8_t pps_small[] = {0x68, 0xe9, 0x79, 0x72, 0xc0};
if (!CreateVideoToolboxSession(sps_small, arraysize(sps_small), pps_small,
arraysize(pps_small), false)) {
- DLOG(WARNING) << "Failed to create software VideoToolbox session";
+ DLOG(WARNING) << "Software decoding with VideoToolbox is not supported";
return false;
}
@@ -403,7 +420,8 @@ VTVideoDecodeAccelerator::Frame::~Frame() {}
VTVideoDecodeAccelerator::PictureInfo::PictureInfo(uint32_t client_texture_id,
uint32_t service_texture_id)
- : client_texture_id(client_texture_id),
+ : bitstream_id(0),
+ client_texture_id(client_texture_id),
service_texture_id(service_texture_id) {}
VTVideoDecodeAccelerator::PictureInfo::~PictureInfo() {}
@@ -420,11 +438,15 @@ bool VTVideoDecodeAccelerator::FrameOrder::operator()(
}
VTVideoDecodeAccelerator::VTVideoDecodeAccelerator(
- const BindGLImageCallback& bind_image_cb)
+ const BindGLImageCallback& bind_image_cb,
+ MediaLog* media_log)
: bind_image_cb_(bind_image_cb),
+ media_log_(media_log),
gpu_task_runner_(base::ThreadTaskRunnerHandle::Get()),
decoder_thread_("VTDecoderThread"),
weak_this_factory_(this) {
+ DCHECK(!bind_image_cb_.is_null());
+
callback_.decompressionOutputCallback = OutputThunk;
callback_.decompressionOutputRefCon = this;
weak_this_ = weak_this_factory_.GetWeakPtr();
@@ -445,16 +467,70 @@ VTVideoDecodeAccelerator::~VTVideoDecodeAccelerator() {
bool VTVideoDecodeAccelerator::OnMemoryDump(
const base::trace_event::MemoryDumpArgs& args,
base::trace_event::ProcessMemoryDump* pmd) {
+ DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+
+ // Dump output pictures (decoded frames for which PictureReady() has been
+ // called already).
for (const auto& it : picture_info_map_) {
- int32_t picture_id = it.first;
PictureInfo* picture_info = it.second.get();
if (picture_info->gl_image) {
std::string dump_name =
base::StringPrintf("media/vt_video_decode_accelerator_%d/picture_%d",
- memory_dump_id_, picture_id);
+ memory_dump_id_, picture_info->bitstream_id);
picture_info->gl_image->OnMemoryDump(pmd, 0, dump_name);
}
}
+
+ // Dump the output queue (decoded frames for which
+ // NotifyEndOfBitstreamBuffer() has not been called yet).
+ {
+ uint64_t total_count = 0;
+ uint64_t total_size = 0;
+ for (const auto& it : base::GetUnderlyingContainer(task_queue_)) {
+ if (it.frame.get() && it.frame->image) {
+ IOSurfaceRef io_surface = CVPixelBufferGetIOSurface(it.frame->image);
+ if (io_surface) {
+ ++total_count;
+ total_size += IOSurfaceGetAllocSize(io_surface);
+ }
+ }
+ }
+ base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(
+ base::StringPrintf("media/vt_video_decode_accelerator_%d/output_queue",
+ memory_dump_id_));
+ dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount,
+ base::trace_event::MemoryAllocatorDump::kUnitsObjects,
+ total_count);
+ dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
+ base::trace_event::MemoryAllocatorDump::kUnitsBytes,
+ total_size);
+ }
+
+ // Dump the reorder queue (decoded frames for which
+ // NotifyEndOfBitstreamBuffer() has been called already).
+ {
+ uint64_t total_count = 0;
+ uint64_t total_size = 0;
+ for (const auto& it : base::GetUnderlyingContainer(reorder_queue_)) {
+ if (it.get() && it->image) {
+ IOSurfaceRef io_surface = CVPixelBufferGetIOSurface(it->image);
+ if (io_surface) {
+ ++total_count;
+ total_size += IOSurfaceGetAllocSize(io_surface);
+ }
+ }
+ }
+ base::trace_event::MemoryAllocatorDump* dump = pmd->CreateAllocatorDump(
+ base::StringPrintf("media/vt_video_decode_accelerator_%d/reorder_queue",
+ memory_dump_id_));
+ dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameObjectCount,
+ base::trace_event::MemoryAllocatorDump::kUnitsObjects,
+ total_count);
+ dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
+ base::trace_event::MemoryAllocatorDump::kUnitsBytes,
+ total_size);
+ }
+
return true;
}
@@ -463,39 +539,36 @@ bool VTVideoDecodeAccelerator::Initialize(const Config& config,
DVLOG(1) << __func__;
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
- if (bind_image_cb_.is_null()) {
- NOTREACHED() << "GL callbacks are required for this VDA";
+ // All of these checks should be handled by the caller inspecting
+ // SupportedProfiles(). PPAPI does not do that, however.
+ if (config.output_mode != Config::OutputMode::ALLOCATE) {
+ DVLOG(2) << "Output mode must be ALLOCATE";
return false;
}
if (config.is_encrypted()) {
- NOTREACHED() << "Encrypted streams are not supported for this VDA";
+ DVLOG(2) << "Encrypted streams are not supported";
return false;
}
- if (config.output_mode != Config::OutputMode::ALLOCATE) {
- NOTREACHED() << "Only ALLOCATE OutputMode is supported by this VDA";
+ if (std::find(std::begin(kSupportedProfiles), std::end(kSupportedProfiles),
+ config.profile) == std::end(kSupportedProfiles)) {
+ DVLOG(2) << "Unsupported profile";
return false;
}
- client_ = client;
-
- if (!InitializeVideoToolbox())
+ if (!InitializeVideoToolbox()) {
+ DVLOG(2) << "VideoToolbox is unavailable";
return false;
-
- bool profile_supported = false;
- for (const auto& supported_profile : kSupportedProfiles) {
- if (config.profile == supported_profile) {
- profile_supported = true;
- break;
- }
}
- if (!profile_supported)
- return false;
+
+ client_ = client;
// Spawn a thread to handle parsing and calling VideoToolbox.
- if (!decoder_thread_.Start())
+ if (!decoder_thread_.Start()) {
+ DLOG(ERROR) << "Failed to start decoder thread";
return false;
+ }
// Count the session as successfully initialized.
UMA_HISTOGRAM_ENUMERATION("Media.VTVDA.SessionFailureReason",
@@ -623,20 +696,11 @@ bool VTVideoDecodeAccelerator::ConfigureDecoder() {
return true;
}
-void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
+void VTVideoDecodeAccelerator::DecodeTask(scoped_refptr<DecoderBuffer> buffer,
Frame* frame) {
DVLOG(2) << __func__ << "(" << frame->bitstream_id << ")";
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
- // Map the bitstream buffer.
- SharedMemoryRegion memory(bitstream, true);
- if (!memory.Map()) {
- DLOG(ERROR) << "Failed to map bitstream buffer";
- NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
- return;
- }
- const uint8_t* buf = static_cast<uint8_t*>(memory.memory());
-
// NALUs are stored with Annex B format in the bitstream buffer (start codes),
// but VideoToolbox expects AVC format (length headers), so we must rewrite
// the data.
@@ -645,19 +709,19 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
// record parameter sets for VideoToolbox initialization.
size_t data_size = 0;
std::vector<H264NALU> nalus;
- parser_.SetStream(buf, memory.size());
+ parser_.SetStream(buffer->data(), buffer->data_size());
H264NALU nalu;
while (true) {
H264Parser::Result result = parser_.AdvanceToNextNALU(&nalu);
if (result == H264Parser::kEOStream)
break;
if (result == H264Parser::kUnsupportedStream) {
- DLOG(ERROR) << "Unsupported H.264 stream";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Unsupported H.264 stream");
NotifyError(PLATFORM_FAILURE, SFT_UNSUPPORTED_STREAM);
return;
}
if (result != H264Parser::kOk) {
- DLOG(ERROR) << "Failed to parse H.264 stream";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Failed to parse H.264 stream");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -665,12 +729,12 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
case H264NALU::kSPS:
result = parser_.ParseSPS(&last_sps_id_);
if (result == H264Parser::kUnsupportedStream) {
- DLOG(ERROR) << "Unsupported SPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Unsupported SPS");
NotifyError(PLATFORM_FAILURE, SFT_UNSUPPORTED_STREAM);
return;
}
if (result != H264Parser::kOk) {
- DLOG(ERROR) << "Could not parse SPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Could not parse SPS");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -685,12 +749,12 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
case H264NALU::kPPS:
result = parser_.ParsePPS(&last_pps_id_);
if (result == H264Parser::kUnsupportedStream) {
- DLOG(ERROR) << "Unsupported PPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Unsupported PPS");
NotifyError(PLATFORM_FAILURE, SFT_UNSUPPORTED_STREAM);
return;
}
if (result != H264Parser::kOk) {
- DLOG(ERROR) << "Could not parse PPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Could not parse PPS");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -709,12 +773,14 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
H264SliceHeader slice_hdr;
result = parser_.ParseSliceHeader(nalu, &slice_hdr);
if (result == H264Parser::kUnsupportedStream) {
- DLOG(ERROR) << "Unsupported slice header";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Unsupported slice header");
NotifyError(PLATFORM_FAILURE, SFT_UNSUPPORTED_STREAM);
return;
}
if (result != H264Parser::kOk) {
- DLOG(ERROR) << "Could not parse slice header";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Could not parse slice header");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -723,7 +789,8 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
DCHECK_EQ(slice_hdr.pic_parameter_set_id, last_pps_id_);
const H264PPS* pps = parser_.GetPPS(slice_hdr.pic_parameter_set_id);
if (!pps) {
- DLOG(ERROR) << "Mising PPS referenced by slice";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Missing PPS referenced by slice");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -731,7 +798,8 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
DCHECK_EQ(pps->seq_parameter_set_id, last_sps_id_);
const H264SPS* sps = parser_.GetSPS(pps->seq_parameter_set_id);
if (!sps) {
- DLOG(ERROR) << "Mising SPS referenced by PPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Missing SPS referenced by PPS");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -748,7 +816,7 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
base::Optional<int32_t> pic_order_cnt =
poc_.ComputePicOrderCnt(sps, slice_hdr);
if (!pic_order_cnt.has_value()) {
- DLOG(ERROR) << "Unable to compute POC";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR, "Unable to compute POC");
NotifyError(UNREADABLE_INPUT, SFT_INVALID_STREAM);
return;
}
@@ -776,8 +844,9 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
// error messages for those.
if (frame->has_slice && waiting_for_idr_) {
if (!missing_idr_logged_) {
- LOG(ERROR) << "Illegal attempt to decode without IDR. "
- << "Discarding decode requests until next IDR.";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ ("Illegal attempt to decode without IDR. "
+ "Discarding decode requests until the next IDR."));
missing_idr_logged_ = true;
}
frame->has_slice = false;
@@ -798,12 +867,14 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
(configured_sps_ != active_sps_ || configured_spsext_ != active_spsext_ ||
configured_pps_ != active_pps_)) {
if (active_sps_.empty()) {
- DLOG(ERROR) << "Invalid configuration; no SPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Invalid configuration (no SPS)");
NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
return;
}
if (active_pps_.empty()) {
- DLOG(ERROR) << "Invalid configuration; no PPS";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Invalid configuration (no PPS)");
NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
return;
}
@@ -815,7 +886,8 @@ void VTVideoDecodeAccelerator::DecodeTask(const BitstreamBuffer& bitstream,
// If the session is not configured by this point, fail.
if (!session_) {
- DLOG(ERROR) << "Cannot decode without configuration";
+ WriteToMediaLog(MediaLog::MEDIALOG_ERROR,
+ "Cannot decode without configuration");
NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
return;
}
@@ -998,25 +1070,33 @@ void VTVideoDecodeAccelerator::FlushDone(TaskType type) {
}
void VTVideoDecodeAccelerator::Decode(const BitstreamBuffer& bitstream) {
- DVLOG(2) << __func__ << "(" << bitstream.id() << ")";
+ Decode(bitstream.ToDecoderBuffer(), bitstream.id());
+}
+
+void VTVideoDecodeAccelerator::Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) {
+ DVLOG(2) << __func__ << "(" << bitstream_id << ")";
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
- if (bitstream.id() < 0) {
- DLOG(ERROR) << "Invalid bitstream, id: " << bitstream.id();
- if (base::SharedMemory::IsHandleValid(bitstream.handle()))
- base::SharedMemory::CloseHandle(bitstream.handle());
+ if (bitstream_id < 0) {
+ DLOG(ERROR) << "Invalid bitstream, id: " << bitstream_id;
NotifyError(INVALID_ARGUMENT, SFT_INVALID_STREAM);
return;
}
- DCHECK_EQ(0u, assigned_bitstream_ids_.count(bitstream.id()));
- assigned_bitstream_ids_.insert(bitstream.id());
+ if (!buffer) {
+ client_->NotifyEndOfBitstreamBuffer(bitstream_id);
+ return;
+ }
+
+ DCHECK_EQ(0u, assigned_bitstream_ids_.count(bitstream_id));
+ assigned_bitstream_ids_.insert(bitstream_id);
- Frame* frame = new Frame(bitstream.id());
- pending_frames_[frame->bitstream_id] = make_linked_ptr(frame);
+ Frame* frame = new Frame(bitstream_id);
+ pending_frames_[bitstream_id] = make_linked_ptr(frame);
decoder_thread_.task_runner()->PostTask(
FROM_HERE, base::Bind(&VTVideoDecodeAccelerator::DecodeTask,
- base::Unretained(this), bitstream, frame));
+ base::Unretained(this), std::move(buffer), frame));
}
void VTVideoDecodeAccelerator::AssignPictureBuffers(
@@ -1049,20 +1129,24 @@ void VTVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_id) {
DVLOG(2) << __func__ << "(" << picture_id << ")";
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
- auto it = picture_info_map_.find(picture_id);
- if (it != picture_info_map_.end()) {
- PictureInfo* picture_info = it->second.get();
- picture_info->cv_image.reset();
- picture_info->gl_image = nullptr;
- }
-
// It's possible there was a ReusePictureBuffer() request in flight when we
// called DismissPictureBuffer(), in which case we won't find it. In that case
// we should just drop the ReusePictureBuffer() request.
- if (assigned_picture_ids_.count(picture_id)) {
- available_picture_ids_.push_back(picture_id);
- ProcessWorkQueues();
- }
+ auto it = picture_info_map_.find(picture_id);
+ if (it == picture_info_map_.end())
+ return;
+
+ // Drop references to allow the underlying buffer to be released.
+ PictureInfo* picture_info = it->second.get();
+ bind_image_cb_.Run(picture_info->client_texture_id, GL_TEXTURE_RECTANGLE_ARB,
+ nullptr, false);
+ picture_info->gl_image = nullptr;
+ picture_info->bitstream_id = 0;
+
+ // Mark the picture as available and try to complete pending output work.
+ DCHECK(assigned_picture_ids_.count(picture_id));
+ available_picture_ids_.push_back(picture_id);
+ ProcessWorkQueues();
}
void VTVideoDecodeAccelerator::ProcessWorkQueues() {
@@ -1203,6 +1287,7 @@ bool VTVideoDecodeAccelerator::ProcessFrame(const Frame& frame) {
client_->DismissPictureBuffer(picture_id);
}
assigned_picture_ids_.clear();
+ picture_info_map_.clear();
available_picture_ids_.clear();
// Request new pictures.
@@ -1234,7 +1319,6 @@ bool VTVideoDecodeAccelerator::SendFrame(const Frame& frame) {
auto it = picture_info_map_.find(picture_id);
DCHECK(it != picture_info_map_.end());
PictureInfo* picture_info = it->second.get();
- DCHECK(!picture_info->cv_image);
DCHECK(!picture_info->gl_image);
scoped_refptr<gl::GLImageIOSurface> gl_image(
@@ -1246,6 +1330,8 @@ bool VTVideoDecodeAccelerator::SendFrame(const Frame& frame) {
NOTIFY_STATUS("Failed to initialize GLImageIOSurface", PLATFORM_FAILURE,
SFT_PLATFORM_ERROR);
}
+ gfx::ColorSpace color_space = GetImageBufferColorSpace(frame.image);
+ gl_image->SetColorSpaceForYUVToRGBConversion(color_space);
if (!bind_image_cb_.Run(picture_info->client_texture_id,
GL_TEXTURE_RECTANGLE_ARB, gl_image, false)) {
@@ -1253,12 +1339,8 @@ bool VTVideoDecodeAccelerator::SendFrame(const Frame& frame) {
NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
return false;
}
- gfx::ColorSpace color_space = GetImageBufferColorSpace(frame.image);
- gl_image->SetColorSpaceForYUVToRGBConversion(color_space);
-
- // Assign the new image(s) to the the picture info.
picture_info->gl_image = gl_image;
- picture_info->cv_image = frame.image;
+ picture_info->bitstream_id = frame.bitstream_id;
available_picture_ids_.pop_back();
DVLOG(3) << "PictureReady(picture_id=" << picture_id << ", "
@@ -1286,6 +1368,21 @@ void VTVideoDecodeAccelerator::NotifyError(
}
}
+void VTVideoDecodeAccelerator::WriteToMediaLog(MediaLog::MediaLogLevel level,
+ const std::string& message) {
+ if (!gpu_task_runner_->BelongsToCurrentThread()) {
+ gpu_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VTVideoDecodeAccelerator::WriteToMediaLog,
+ weak_this_, level, message));
+ return;
+ }
+
+ DVLOG(1) << __func__ << "(" << level << ") " << message;
+
+ if (media_log_)
+ media_log_->AddLogEvent(level, message);
+}
+
void VTVideoDecodeAccelerator::QueueFlush(TaskType type) {
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
pending_flush_tasks_.push(type);
@@ -1327,6 +1424,9 @@ void VTVideoDecodeAccelerator::Destroy() {
assigned_bitstream_ids_.clear();
state_ = STATE_DESTROYING;
QueueFlush(TASK_DESTROY);
+
+ // Prevent calling into a deleted MediaLog.
+ media_log_ = nullptr;
}
bool VTVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
@@ -1339,6 +1439,9 @@ bool VTVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
VideoDecodeAccelerator::SupportedProfiles
VTVideoDecodeAccelerator::GetSupportedProfiles() {
SupportedProfiles profiles;
+ if (!InitializeVideoToolbox())
+ return profiles;
+
for (const auto& supported_profile : kSupportedProfiles) {
SupportedProfile profile;
profile.profile = supported_profile;
diff --git a/chromium/media/gpu/vt_video_decode_accelerator_mac.h b/chromium/media/gpu/vt_video_decode_accelerator_mac.h
index d363468a3a9..aa3953b6509 100644
--- a/chromium/media/gpu/vt_video_decode_accelerator_mac.h
+++ b/chromium/media/gpu/vt_video_decode_accelerator_mac.h
@@ -17,10 +17,10 @@
#include "base/macros.h"
#include "base/memory/linked_ptr.h"
#include "base/memory/weak_ptr.h"
-#include "base/message_loop/message_loop.h"
#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
#include "base/trace_event/memory_dump_provider.h"
+#include "media/base/media_log.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/h264_parser.h"
@@ -39,13 +39,16 @@ MEDIA_GPU_EXPORT bool InitializeVideoToolbox();
class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
public base::trace_event::MemoryDumpProvider {
public:
- explicit VTVideoDecodeAccelerator(const BindGLImageCallback& bind_image_cb);
+ VTVideoDecodeAccelerator(const BindGLImageCallback& bind_image_cb,
+ MediaLog* media_log);
~VTVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
bool Initialize(const Config& config, Client* client) override;
void Decode(const BitstreamBuffer& bitstream) override;
+ void Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) override;
void AssignPictureBuffers(
const std::vector<PictureBuffer>& pictures) override;
void ReusePictureBuffer(int32_t picture_id) override;
@@ -129,13 +132,9 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
PictureInfo(uint32_t client_texture_id, uint32_t service_texture_id);
~PictureInfo();
- // Image buffer, kept alive while they are bound to pictures.
- base::ScopedCFTypeRef<CVImageBufferRef> cv_image;
-
- // The GLImage representation of |cv_image|. This is kept around to ensure
- // that Destroy is called on it before it hits its destructor (there is a
- // DCHECK that requires this).
+ // Information about the currently bound image, for OnMemoryDump().
scoped_refptr<gl::GLImageIOSurface> gl_image;
+ int32_t bitstream_id;
// Texture IDs for the image buffer.
const uint32_t client_texture_id;
@@ -163,7 +162,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
bool FinishDelayedFrames();
// |frame| is owned by |pending_frames_|.
- void DecodeTask(const BitstreamBuffer&, Frame* frame);
+ void DecodeTask(scoped_refptr<DecoderBuffer> buffer, Frame* frame);
void DecodeDone(Frame* frame);
//
@@ -172,6 +171,12 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
void NotifyError(Error vda_error_type,
VTVDASessionFailureType session_failure_type);
+ // Since |media_log_| is invalidated in Destroy() on the GPU thread, the easy
+ // thing to do is post to the GPU thread to use it. This helper handles the
+ // thread hop if necessary.
+ void WriteToMediaLog(MediaLog::MediaLogLevel level,
+ const std::string& message);
+
// |type| is the type of task that the flush will complete, one of TASK_FLUSH,
// TASK_RESET, or TASK_DESTROY.
void QueueFlush(TaskType type);
@@ -192,6 +197,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
// GPU thread state.
//
BindGLImageCallback bind_image_cb_;
+ MediaLog* media_log_;
VideoDecodeAccelerator::Client* client_ = nullptr;
State state_ = STATE_DECODING;
diff --git a/chromium/media/gpu/vt_video_encode_accelerator_mac.cc b/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
index 6a6e741a2d6..29f733e3ba9 100644
--- a/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
+++ b/chromium/media/gpu/vt_video_encode_accelerator_mac.cc
@@ -8,7 +8,6 @@
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/mac/video_frame_mac.h"
-#include "third_party/webrtc/system_wrappers/include/clock.h"
namespace media {
@@ -93,7 +92,7 @@ struct VTVideoEncodeAccelerator::BitstreamBufferRef {
VTVideoEncodeAccelerator::VTVideoEncodeAccelerator()
: target_bitrate_(0),
h264_profile_(H264PROFILE_BASELINE),
- bitrate_adjuster_(webrtc::Clock::GetRealTimeClock(), .5, .95),
+ bitrate_adjuster_(.5, .95),
client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
encoder_thread_("VTEncoderThread"),
encoder_task_weak_factory_(this) {
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
index ab68be5cf7a..bf92dd0e9b2 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
@@ -9,8 +9,10 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/cdm_proxy_context.h"
+#include "media/gpu/windows/d3d11_decryptor.h"
namespace media {
@@ -94,6 +96,7 @@ class D3D11CdmContext : public CdmContext {
const std::vector<uint8_t>& key_id,
const std::vector<uint8_t>& key_blob) {
cdm_proxy_context_.SetKey(crypto_session, key_id, key_blob);
+ new_key_callbacks_.Notify();
}
void RemoveKey(ID3D11CryptoSession* crypto_session,
const std::vector<uint8_t>& key_id) {
@@ -105,11 +108,26 @@ class D3D11CdmContext : public CdmContext {
}
// CdmContext implementation.
+ std::unique_ptr<CallbackRegistration> RegisterNewKeyCB(
+ base::RepeatingClosure new_key_cb) override {
+ return new_key_callbacks_.Register(std::move(new_key_cb));
+ }
CdmProxyContext* GetCdmProxyContext() override { return &cdm_proxy_context_; }
+ Decryptor* GetDecryptor() override {
+ if (!decryptor_)
+ decryptor_.reset(new D3D11Decryptor(&cdm_proxy_context_));
+
+ return decryptor_.get();
+ }
+
private:
D3D11CdmProxyContext cdm_proxy_context_;
+ std::unique_ptr<D3D11Decryptor> decryptor_;
+
+ ClosureRegistry new_key_callbacks_;
+
base::WeakPtrFactory<D3D11CdmContext> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(D3D11CdmContext);
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.cc b/chromium/media/gpu/windows/d3d11_decryptor.cc
new file mode 100644
index 00000000000..87b064ca314
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_decryptor.cc
@@ -0,0 +1,71 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/windows/d3d11_decryptor.h"
+
+#include "base/logging.h"
+#include "media/base/decoder_buffer.h"
+
+namespace media {
+
+D3D11Decryptor::D3D11Decryptor(CdmProxyContext* cdm_proxy_context)
+ : cdm_proxy_context_(cdm_proxy_context), weak_factory_(this) {
+ DCHECK(cdm_proxy_context_);
+}
+
+D3D11Decryptor::~D3D11Decryptor() {}
+
+void D3D11Decryptor::RegisterNewKeyCB(StreamType stream_type,
+ const NewKeyCB& new_key_cb) {
+ // TODO(xhwang): Use RegisterNewKeyCB() on CdmContext, and remove
+ // RegisterNewKeyCB from Decryptor interface.
+ NOTREACHED();
+}
+
+void D3D11Decryptor::Decrypt(StreamType stream_type,
+ scoped_refptr<DecoderBuffer> encrypted,
+ const DecryptCB& decrypt_cb) {
+ // TODO(rkuroiwa): Implemented this function using |cdm_proxy_context_|.
+ NOTIMPLEMENTED();
+}
+
+void D3D11Decryptor::CancelDecrypt(StreamType stream_type) {
+ // Decrypt() calls the DecryptCB synchronously so there's nothing to cancel.
+}
+
+void D3D11Decryptor::InitializeAudioDecoder(const AudioDecoderConfig& config,
+ const DecoderInitCB& init_cb) {
+ // D3D11Decryptor does not support audio decoding.
+ init_cb.Run(false);
+}
+
+void D3D11Decryptor::InitializeVideoDecoder(const VideoDecoderConfig& config,
+ const DecoderInitCB& init_cb) {
+ // D3D11Decryptor does not support video decoding.
+ init_cb.Run(false);
+}
+
+void D3D11Decryptor::DecryptAndDecodeAudio(
+ scoped_refptr<DecoderBuffer> encrypted,
+ const AudioDecodeCB& audio_decode_cb) {
+ NOTREACHED() << "D3D11Decryptor does not support audio decoding";
+}
+
+void D3D11Decryptor::DecryptAndDecodeVideo(
+ scoped_refptr<DecoderBuffer> encrypted,
+ const VideoDecodeCB& video_decode_cb) {
+ NOTREACHED() << "D3D11Decryptor does not support video decoding";
+}
+
+void D3D11Decryptor::ResetDecoder(StreamType stream_type) {
+ NOTREACHED() << "D3D11Decryptor does not support audio/video decoding";
+}
+
+void D3D11Decryptor::DeinitializeDecoder(StreamType stream_type) {
+ // D3D11Decryptor does not support audio/video decoding, but since this can be
+ // called any time after InitializeAudioDecoder/InitializeVideoDecoder,
+ // nothing to be done here.
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.h b/chromium/media/gpu/windows/d3d11_decryptor.h
new file mode 100644
index 00000000000..ef4d5ae30f6
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_decryptor.h
@@ -0,0 +1,50 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_WINDOWS_D3D11_DECRYPTOR_H_
+#define MEDIA_GPU_WINDOWS_D3D11_DECRYPTOR_H_
+
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "media/base/decryptor.h"
+#include "media/gpu/media_gpu_export.h"
+
+namespace media {
+
+class CdmProxyContext;
+
+class MEDIA_GPU_EXPORT D3D11Decryptor : public Decryptor {
+ public:
+ explicit D3D11Decryptor(CdmProxyContext* cdm_proxy_context);
+ ~D3D11Decryptor() final;
+
+ // Decryptor implementation.
+ void RegisterNewKeyCB(StreamType stream_type,
+ const NewKeyCB& key_added_cb) final;
+ void Decrypt(StreamType stream_type,
+ scoped_refptr<DecoderBuffer> encrypted,
+ const DecryptCB& decrypt_cb) final;
+ void CancelDecrypt(StreamType stream_type) final;
+ void InitializeAudioDecoder(const AudioDecoderConfig& config,
+ const DecoderInitCB& init_cb) final;
+ void InitializeVideoDecoder(const VideoDecoderConfig& config,
+ const DecoderInitCB& init_cb) final;
+ void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
+ const AudioDecodeCB& audio_decode_cb) final;
+ void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
+ const VideoDecodeCB& video_decode_cb) final;
+ void ResetDecoder(StreamType stream_type) final;
+ void DeinitializeDecoder(StreamType stream_type) final;
+
+ private:
+ CdmProxyContext* cdm_proxy_context_;
+
+ base::WeakPtrFactory<D3D11Decryptor> weak_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(D3D11Decryptor);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_WINDOWS_D3D11_DECRYPTOR_H_
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index 826a99119fa..ab4a0512f8e 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -45,15 +45,32 @@ base::Callback<void(Args...)> BindToCurrentThreadIfWeakPtr(
namespace media {
-D3D11VideoDecoder::D3D11VideoDecoder(
+std::unique_ptr<VideoDecoder> D3D11VideoDecoder::Create(
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
- base::RepeatingCallback<gpu::CommandBufferStub*()> get_stub_cb)
- : impl_task_runner_(std::move(gpu_task_runner)), weak_factory_(this) {
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ base::RepeatingCallback<gpu::CommandBufferStub*()> get_stub_cb) {
// We create |impl_| on the wrong thread, but we never use it here.
// Note that the output callback will hop to our thread, post the video
// frame, and along with a callback that will hop back to the impl thread
// when it's released.
- impl_ = std::make_unique<D3D11VideoDecoderImpl>(get_stub_cb);
+ // Note that we WrapUnique<VideoDecoder> rather than D3D11VideoDecoder to make
+ // this castable; the deleters have to match.
+ return base::WrapUnique<VideoDecoder>(new D3D11VideoDecoder(
+ std::move(gpu_task_runner), gpu_preferences, gpu_workarounds,
+ std::make_unique<D3D11VideoDecoderImpl>(get_stub_cb)));
+}
+
+D3D11VideoDecoder::D3D11VideoDecoder(
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ std::unique_ptr<D3D11VideoDecoderImpl> impl)
+ : impl_(std::move(impl)),
+ impl_task_runner_(std::move(gpu_task_runner)),
+ gpu_preferences_(gpu_preferences),
+ gpu_workarounds_(gpu_workarounds),
+ weak_factory_(this) {
impl_weak_ = impl_->GetWeakPtr();
}
@@ -75,15 +92,16 @@ void D3D11VideoDecoder::Initialize(
const InitCB& init_cb,
const OutputCB& output_cb,
const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) {
- bool is_h264 = config.profile() >= H264PROFILE_MIN &&
- config.profile() <= H264PROFILE_MAX;
- if (!is_h264) {
+ if (!IsPotentiallySupported(config)) {
+ DVLOG(3) << "D3D11 video decoder not supported for the config.";
init_cb.Run(false);
return;
}
// Bind our own init / output cb that hop to this thread, so we don't call the
// originals on some other thread.
+ // Important but subtle note: base::Bind will copy |config_| since it's a
+ // const ref.
// TODO(liberato): what's the lifetime of |cdm_context|?
impl_task_runner_->PostTask(
FROM_HERE,
@@ -126,4 +144,48 @@ int D3D11VideoDecoder::GetMaxDecodeRequests() const {
return impl_->GetMaxDecodeRequests();
}
+bool D3D11VideoDecoder::IsPotentiallySupported(
+ const VideoDecoderConfig& config) {
+ // TODO(liberato): All of this could be moved into MojoVideoDecoder, so that
+ // it could run on the client side and save the IPC hop.
+
+ // Must be H264.
+ const bool is_h264 = config.profile() >= H264PROFILE_MIN &&
+ config.profile() <= H264PROFILE_MAX;
+
+ if (!is_h264) {
+ DVLOG(2) << "Profile is not H264.";
+ return false;
+ }
+
+ // Must use NV12, which excludes HDR.
+ if (config.profile() == H264PROFILE_HIGH10PROFILE) {
+ DVLOG(2) << "High 10 profile is not supported.";
+ return false;
+ }
+
+ // TODO(liberato): dxva checks IsHDR() in the target colorspace, but we don't
+ // have the target colorspace. It's commented as being for vpx, though, so
+ // we skip it here for now.
+
+ // Must use the validating decoder.
+ if (gpu_preferences_.use_passthrough_cmd_decoder) {
+ DVLOG(2) << "Must use validating decoder.";
+ return false;
+ }
+
+ // Must allow zero-copy of nv12 textures.
+ if (!gpu_preferences_.enable_zero_copy_dxgi_video) {
+ DVLOG(2) << "Must allow zero-copy NV12.";
+ return false;
+ }
+
+ if (gpu_workarounds_.disable_dxgi_zero_copy_video) {
+ DVLOG(2) << "Must allow zero-copy video.";
+ return false;
+ }
+
+ return true;
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index 7bff20d955f..8f37514dbc9 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -11,6 +11,9 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
+#include "base/single_thread_task_runner.h"
+#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/base/video_decoder.h"
#include "media/gpu/media_gpu_export.h"
@@ -18,6 +21,7 @@
namespace media {
class D3D11VideoDecoderImpl;
+class D3D11VideoDecoderTest;
// Thread-hopping implementation of D3D11VideoDecoder. It's meant to run on
// a random thread, and hop to the gpu main thread. It does this so that it
@@ -27,10 +31,11 @@ class D3D11VideoDecoderImpl;
// now, it's easier to hop threads.
class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
public:
- D3D11VideoDecoder(
+ static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
base::RepeatingCallback<gpu::CommandBufferStub*()> get_stub_cb);
- ~D3D11VideoDecoder() override;
// VideoDecoder implementation:
std::string GetDisplayName() const override;
@@ -48,7 +53,24 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
bool CanReadWithoutStalling() const override;
int GetMaxDecodeRequests() const override;
+ // Return false |config| definitely isn't going to work, so that we can fail
+ // init without bothering with a thread hop.
+ bool IsPotentiallySupported(const VideoDecoderConfig& config);
+
+ protected:
+ // Owners should call Destroy(). This is automatic via
+ // std::default_delete<media::VideoDecoder> when held by a
+ // std::unique_ptr<media::VideoDecoder>.
+ ~D3D11VideoDecoder() override;
+
private:
+ friend class D3D11VideoDecoderTest;
+
+ D3D11VideoDecoder(scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ std::unique_ptr<D3D11VideoDecoderImpl> impl);
+
// The implementation, which we trampoline to the impl thread.
// This must be freed on the impl thread.
std::unique_ptr<D3D11VideoDecoderImpl> impl_;
@@ -59,6 +81,9 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder {
// Task runner for |impl_|. This must be the GPU main thread.
scoped_refptr<base::SequencedTaskRunner> impl_task_runner_;
+ gpu::GpuPreferences gpu_preferences_;
+ gpu::GpuDriverBugWorkarounds gpu_workarounds_;
+
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
index a9db6b10d03..9597ab64254 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
@@ -8,11 +8,15 @@
#include "base/threading/sequenced_task_runner_handle.h"
#include "gpu/command_buffer/service/mailbox_manager.h"
+#include "gpu/command_buffer/service/scheduler.h"
#include "gpu/command_buffer/service/texture_manager.h"
+#include "gpu/ipc/service/gpu_channel.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
#include "media/gpu/windows/d3d11_picture_buffer.h"
#include "ui/gl/gl_angle_util_win.h"
#include "ui/gl/gl_bindings.h"
@@ -34,6 +38,9 @@ D3D11VideoDecoderImpl::D3D11VideoDecoderImpl(
D3D11VideoDecoderImpl::~D3D11VideoDecoderImpl() {
// TODO(liberato): be sure to clear |picture_buffers_| on the main thread.
// For now, we always run on the main thread anyway.
+
+ if (stub_ && !wait_sequence_id_.is_null())
+ stub_->channel()->scheduler()->DestroySequence(wait_sequence_id_);
}
std::string D3D11VideoDecoderImpl::GetDisplayName() const {
@@ -50,6 +57,7 @@ void D3D11VideoDecoderImpl::Initialize(
const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb) {
init_cb_ = init_cb;
output_cb_ = output_cb;
+ is_encrypted_ = config.is_encrypted();
stub_ = get_stub_cb_.Run();
if (!MakeContextCurrent(stub_)) {
@@ -58,6 +66,8 @@ void D3D11VideoDecoderImpl::Initialize(
}
// TODO(liberato): see GpuVideoFrameFactory.
// stub_->AddDestructionObserver(this);
+ wait_sequence_id_ = stub_->channel()->scheduler()->CreateSequence(
+ gpu::SchedulingPriority::kNormal);
// Use the ANGLE device, rather than create our own. It would be nice if we
// could use our own device, and run on the mojo thread, but texture sharing
@@ -67,7 +77,8 @@ void D3D11VideoDecoderImpl::Initialize(
HRESULT hr;
- // TODO(liberato): Handle cleanup better.
+ // TODO(liberato): Handle cleanup better. Also consider being less chatty in
+ // the logs, since this will fall back.
hr = device_context_.CopyTo(video_context_.GetAddressOf());
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get device context");
@@ -145,6 +156,9 @@ void D3D11VideoDecoderImpl::Initialize(
return;
}
+ if (is_encrypted_)
+ dec_config.guidConfigBitstreamEncryption = D3D11_DECODER_ENCRYPTION_HW_CENC;
+
memcpy(&decoder_guid_, &decoder_guid, sizeof decoder_guid_);
Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder;
@@ -159,6 +173,13 @@ void D3D11VideoDecoderImpl::Initialize(
std::make_unique<H264Decoder>(std::make_unique<D3D11H264Accelerator>(
this, video_decoder, video_device_, video_context_));
+ // |cdm_context| could be null for clear playback.
+ if (cdm_context) {
+ new_key_callback_registration_ =
+ cdm_context->RegisterNewKeyCB(base::BindRepeating(
+ &D3D11VideoDecoderImpl::NotifyNewKey, weak_factory_.GetWeakPtr()));
+ }
+
state_ = State::kRunning;
std::move(init_cb_).Run(true);
}
@@ -189,7 +210,6 @@ void D3D11VideoDecoderImpl::DoDecode() {
}
current_buffer_ = std::move(input_buffer_queue_.front().first);
current_decode_cb_ = input_buffer_queue_.front().second;
- current_timestamp_ = current_buffer_->timestamp();
input_buffer_queue_.pop_front();
if (current_buffer_->end_of_stream()) {
// Flush, then signal the decode cb once all pictures have been output.
@@ -204,9 +224,13 @@ void D3D11VideoDecoderImpl::DoDecode() {
std::move(current_decode_cb_).Run(DecodeStatus::OK);
return;
}
- accelerated_video_decoder_->SetStream(
- -1, (const uint8_t*)current_buffer_->data(),
- current_buffer_->data_size());
+ // This must be after checking for EOS because there is no timestamp for an
+ // EOS buffer.
+ current_timestamp_ = current_buffer_->timestamp();
+
+ accelerated_video_decoder_->SetStream(-1, current_buffer_->data(),
+ current_buffer_->data_size(),
+ current_buffer_->decrypt_config());
}
while (true) {
@@ -230,6 +254,10 @@ void D3D11VideoDecoderImpl::DoDecode() {
CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kAllocateNewSurfaces) {
CreatePictureBuffers();
+ } else if (result == media::AcceleratedVideoDecoder::kNoKey) {
+ state_ = State::kWaitingForNewKey;
+ // Note that another DoDecode() task would be posted in NotifyNewKey().
+ return;
} else {
LOG(ERROR) << "VDA Error " << result;
NotifyError("Accelerated decode failed");
@@ -289,6 +317,8 @@ void D3D11VideoDecoderImpl::CreatePictureBuffers() {
texture_desc.Usage = D3D11_USAGE_DEFAULT;
texture_desc.BindFlags = D3D11_BIND_DECODER | D3D11_BIND_SHADER_RESOURCE;
texture_desc.MiscFlags = D3D11_RESOURCE_MISC_SHARED;
+ if (is_encrypted_)
+ texture_desc.MiscFlags |= D3D11_RESOURCE_MISC_HW_PROTECTED;
Microsoft::WRL::ComPtr<ID3D11Texture2D> out_texture;
HRESULT hr = device_->CreateTexture2D(&texture_desc, nullptr,
@@ -332,18 +362,27 @@ void D3D11VideoDecoderImpl::OutputResult(D3D11PictureBuffer* buffer) {
// Note: The pixel format doesn't matter.
gfx::Rect visible_rect(buffer->size());
- gfx::Size natural_size = buffer->size();
+ // TODO(liberato): Pixel aspect ratio should come from the VideoDecoderConfig
+ // (except when it should come from the SPS).
+ // https://crbug.com/837337
+ double pixel_aspect_ratio = 1.0;
base::TimeDelta timestamp = buffer->timestamp_;
auto frame = VideoFrame::WrapNativeTextures(
PIXEL_FORMAT_NV12, buffer->mailbox_holders(),
- VideoFrame::ReleaseMailboxCB(), buffer->size(), visible_rect,
- natural_size, timestamp);
+ VideoFrame::ReleaseMailboxCB(), visible_rect.size(), visible_rect,
+ GetNaturalSize(visible_rect, pixel_aspect_ratio), timestamp);
frame->SetReleaseMailboxCB(media::BindToCurrentLoop(base::BindOnce(
&D3D11VideoDecoderImpl::OnMailboxReleased, weak_factory_.GetWeakPtr(),
scoped_refptr<D3D11PictureBuffer>(buffer))));
frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+ // For NV12, overlay is allowed by default. If the decoder is going to support
+ // non-NV12 textures, then this may have to be conditionally set. Also note
+ // that ALLOW_OVERLAY is required for encrypted video path.
+ frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
+ if (is_encrypted_)
+ frame->metadata()->SetBoolean(VideoFrameMetadata::PROTECTED_VIDEO, true);
output_cb_.Run(frame);
}
@@ -353,7 +392,16 @@ void D3D11VideoDecoderImpl::OnMailboxReleased(
// Note that |buffer| might no longer be in |picture_buffers_| if we've
// replaced them. That's okay.
- // TODO(liberato): what about the sync token?
+ stub_->channel()->scheduler()->ScheduleTask(gpu::Scheduler::Task(
+ wait_sequence_id_,
+ base::BindOnce(&D3D11VideoDecoderImpl::OnSyncTokenReleased, GetWeakPtr(),
+ std::move(buffer)),
+ std::vector<gpu::SyncToken>({sync_token})));
+}
+
+void D3D11VideoDecoderImpl::OnSyncTokenReleased(
+ scoped_refptr<D3D11PictureBuffer> buffer) {
+ // Note that |buffer| might no longer be in |picture_buffers_|.
buffer->set_in_client_use(false);
// Also re-start decoding in case it was waiting for more pictures.
@@ -361,13 +409,28 @@ void D3D11VideoDecoderImpl::OnMailboxReleased(
// probably check.
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
- base::Bind(&D3D11VideoDecoderImpl::DoDecode, weak_factory_.GetWeakPtr()));
+ base::BindOnce(&D3D11VideoDecoderImpl::DoDecode, GetWeakPtr()));
}
base::WeakPtr<D3D11VideoDecoderImpl> D3D11VideoDecoderImpl::GetWeakPtr() {
return weak_factory_.GetWeakPtr();
}
+void D3D11VideoDecoderImpl::NotifyNewKey() {
+ if (state_ != State::kWaitingForNewKey) {
+ // Note that this method may be called before DoDecode() because the key
+ // acquisition stack may be running independently of the media decoding
+ // stack. So if this isn't in kWaitingForNewKey state no "resuming" is
+ // required therefore no special action taken here.
+ return;
+ }
+
+ state_ = State::kRunning;
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::BindOnce(&D3D11VideoDecoderImpl::DoDecode,
+ weak_factory_.GetWeakPtr()));
+}
+
void D3D11VideoDecoderImpl::NotifyError(const char* reason) {
state_ = State::kError;
DLOG(ERROR) << reason;
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
index b280503037d..e7e34de38d8 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
@@ -15,7 +15,9 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "gpu/command_buffer/service/sequence_id.h"
#include "gpu/ipc/service/command_buffer_stub.h"
+#include "media/base/callback_registry.h"
#include "media/base/video_decoder.h"
#include "media/gpu/gles2_decoder_helper.h"
#include "media/gpu/media_gpu_export.h"
@@ -27,7 +29,7 @@ namespace media {
class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
public D3D11VideoDecoderClient {
public:
- D3D11VideoDecoderImpl(
+ explicit D3D11VideoDecoderImpl(
base::RepeatingCallback<gpu::CommandBufferStub*()> get_stub_cb);
~D3D11VideoDecoderImpl() override;
@@ -62,8 +64,13 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
// in which |codec_| might be non-null. If |codec_| is null, a codec
// creation is pending.
kRunning,
+ // The decoder cannot make progress because it doesn't have the key to
+ // decrypt the buffer. Waiting for a new key to be available.
+ // This should only be transitioned from kRunning, and should only
+ // transition to kRunning.
+ kWaitingForNewKey,
// A fatal error occurred. A terminal state.
- kError
+ kError,
};
void DoDecode();
@@ -71,6 +78,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
void OnMailboxReleased(scoped_refptr<D3D11PictureBuffer> buffer,
const gpu::SyncToken& sync_token);
+ void OnSyncTokenReleased(scoped_refptr<D3D11PictureBuffer> buffer);
+
+ // Callback to notify that new usable key is available.
+ void NotifyNewKey();
// Enter the kError state. This will fail any pending |init_cb_| and / or
// pending decode as well.
@@ -97,6 +108,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
// During init, these will be set.
InitCB init_cb_;
OutputCB output_cb_;
+ bool is_encrypted_ = false;
// It would be nice to unique_ptr these, but we give a ref to the VideoFrame
// so that the texture is retained until the mailbox is opened.
@@ -104,6 +116,12 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl : public VideoDecoder,
State state_ = State::kInitializing;
+ // Callback registration to keep the new key callback registered.
+ std::unique_ptr<CallbackRegistration> new_key_callback_registration_;
+
+ // Wait sequence for sync points.
+ gpu::SequenceId wait_sequence_id_;
+
base::WeakPtrFactory<D3D11VideoDecoderImpl> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoderImpl);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
new file mode 100644
index 00000000000..dc0f2f44796
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -0,0 +1,147 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/windows/d3d11_video_decoder.h"
+
+#include <d3d11.h>
+#include <d3d11_1.h>
+#include <initguid.h>
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/run_loop.h"
+#include "base/single_thread_task_runner.h"
+#include "base/test/scoped_task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/test_helpers.h"
+#include "media/gpu/windows/d3d11_mocks.h"
+#include "media/gpu/windows/d3d11_video_decoder_impl.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+
+namespace media {
+
+class MockD3D11VideoDecoderImpl : public D3D11VideoDecoderImpl {
+ public:
+ MockD3D11VideoDecoderImpl()
+ : D3D11VideoDecoderImpl(
+ base::RepeatingCallback<gpu::CommandBufferStub*()>()) {}
+
+ MOCK_METHOD6(
+ Initialize,
+ void(const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ const InitCB& init_cb,
+ const OutputCB& output_cb,
+ const WaitingForDecryptionKeyCB& waiting_for_decryption_key_cb));
+
+ MOCK_METHOD2(Decode,
+ void(scoped_refptr<DecoderBuffer> buffer,
+ const DecodeCB& decode_cb));
+ MOCK_METHOD1(Reset, void(const base::RepeatingClosure& closure));
+};
+
+class D3D11VideoDecoderTest : public ::testing::Test {
+ public:
+ void SetUp() override {
+ // Set up some sane defaults.
+ gpu_preferences_.enable_zero_copy_dxgi_video = true;
+ gpu_preferences_.use_passthrough_cmd_decoder = false;
+ gpu_workarounds_.disable_dxgi_zero_copy_video = false;
+ supported_config_ = TestVideoConfig::NormalH264(H264PROFILE_MAIN);
+ }
+
+ void TearDown() override {
+ decoder_.reset();
+ // Run the gpu thread runner to tear down |impl_|.
+ base::RunLoop().RunUntilIdle();
+ }
+
+ void CreateDecoder() {
+ std::unique_ptr<MockD3D11VideoDecoderImpl> impl =
+ std::make_unique<MockD3D11VideoDecoderImpl>();
+ impl_ = impl.get();
+
+ gpu_task_runner_ = base::ThreadTaskRunnerHandle::Get();
+
+ decoder_ = base::WrapUnique<VideoDecoder>(new D3D11VideoDecoder(
+ gpu_task_runner_, gpu_preferences_, gpu_workarounds_, std::move(impl)));
+ }
+
+ enum InitExpectation {
+ kExpectFailure = false,
+ kExpectSuccess = true,
+ };
+
+ void InitializeDecoder(const VideoDecoderConfig& config,
+ InitExpectation expectation) {
+ const bool low_delay = false;
+ CdmContext* cdm_context = nullptr;
+
+ if (expectation == kExpectSuccess) {
+ EXPECT_CALL(*this, MockInitCB(_)).Times(0);
+ EXPECT_CALL(*impl_, Initialize(_, low_delay, cdm_context, _, _, _));
+ } else {
+ EXPECT_CALL(*this, MockInitCB(false));
+ }
+
+ decoder_->Initialize(config, low_delay, cdm_context,
+ base::BindRepeating(&D3D11VideoDecoderTest::MockInitCB,
+ base::Unretained(this)),
+ VideoDecoder::OutputCB(), base::NullCallback());
+ base::RunLoop().RunUntilIdle();
+ }
+
+ base::test::ScopedTaskEnvironment env_;
+
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
+
+ std::unique_ptr<VideoDecoder> decoder_;
+ gpu::GpuPreferences gpu_preferences_;
+ gpu::GpuDriverBugWorkarounds gpu_workarounds_;
+ MockD3D11VideoDecoderImpl* impl_ = nullptr;
+ VideoDecoderConfig supported_config_;
+
+ MOCK_METHOD1(MockInitCB, void(bool));
+};
+
+TEST_F(D3D11VideoDecoderTest, SupportsH264) {
+ CreateDecoder();
+ // Make sure that we're testing H264.
+ ASSERT_EQ(supported_config_.profile(), H264PROFILE_MAIN);
+ InitializeDecoder(supported_config_, kExpectSuccess);
+}
+
+TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP8) {
+ CreateDecoder();
+ InitializeDecoder(TestVideoConfig::Normal(kCodecVP8), kExpectFailure);
+}
+
+TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9) {
+ CreateDecoder();
+ InitializeDecoder(TestVideoConfig::Normal(kCodecVP9), kExpectFailure);
+}
+
+TEST_F(D3D11VideoDecoderTest, RequiresZeroCopyPreference) {
+ gpu_preferences_.enable_zero_copy_dxgi_video = false;
+ CreateDecoder();
+ InitializeDecoder(supported_config_, kExpectFailure);
+}
+
+TEST_F(D3D11VideoDecoderTest, FailsIfUsingPassthroughDecoder) {
+ gpu_preferences_.use_passthrough_cmd_decoder = true;
+ CreateDecoder();
+ InitializeDecoder(supported_config_, kExpectFailure);
+}
+
+TEST_F(D3D11VideoDecoderTest, FailsIfZeroCopyWorkaround) {
+ gpu_workarounds_.disable_dxgi_zero_copy_video = true;
+ CreateDecoder();
+ InitializeDecoder(supported_config_, kExpectFailure);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
index 4f867dc849d..0a3075e49d8 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
@@ -49,7 +49,8 @@ class DummyGLImage : public gl::GLImage {
gfx::OverlayTransform transform,
const gfx::Rect& bounds_rect,
const gfx::RectF& crop_rect,
- bool enable_blend) override {
+ bool enable_blend,
+ gfx::GpuFence* gpu_fence) override {
return false;
}
void SetColorSpace(const gfx::ColorSpace& color_space) override {}
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index d8f1898a085..93e65ae651f 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -46,6 +46,7 @@
#include "build/build_config.h"
#include "gpu/command_buffer/service/gpu_preferences.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/win/mf_helpers.h"
#include "media/base/win/mf_initializer.h"
@@ -477,9 +478,7 @@ class H264ConfigChangeDetector : public ConfigChangeDetector {
};
H264ConfigChangeDetector::H264ConfigChangeDetector()
- : last_sps_id_(0),
- last_pps_id_(0),
- pending_config_changed_(false) {}
+ : last_sps_id_(0), last_pps_id_(0), pending_config_changed_(false) {}
H264ConfigChangeDetector::~H264ConfigChangeDetector() {}
@@ -700,7 +699,8 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb,
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences)
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log)
: client_(NULL),
dev_manager_reset_token_(0),
dx11_dev_manager_reset_token_(0),
@@ -712,6 +712,7 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
get_gl_context_cb_(get_gl_context_cb),
make_context_current_cb_(make_context_current_cb),
bind_image_cb_(bind_image_cb),
+ media_log_(media_log),
codec_(kUnknownVideoCodec),
decoder_thread_("DXVAVideoDecoderThread"),
pending_flush_(false),
@@ -892,6 +893,9 @@ bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
if (d3d9_.Get())
return true;
+ if (media_log_)
+ MEDIA_LOG(INFO, media_log_) << __func__ << ": Creating D3D9 device.";
+
HRESULT hr = E_FAIL;
hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.GetAddressOf());
@@ -1039,6 +1043,10 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
// The device may exist if the last state was a config change.
if (D3D11Device())
return true;
+
+ if (media_log_)
+ MEDIA_LOG(INFO, media_log_) << __func__ << ": Creating D3D11 device.";
+
HRESULT hr = create_dxgi_device_manager_(
&dx11_dev_manager_reset_token_, d3d11_device_manager_.GetAddressOf());
RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false);
@@ -1146,44 +1154,40 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
return true;
}
-void DXVAVideoDecodeAccelerator::Decode(
- const BitstreamBuffer& bitstream_buffer) {
+void DXVAVideoDecodeAccelerator::Decode(const BitstreamBuffer& bitstream) {
+ Decode(bitstream.ToDecoderBuffer(), bitstream.id());
+}
+
+void DXVAVideoDecodeAccelerator::Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) {
TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode");
DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
- // SharedMemory will take over the ownership of handle.
- base::SharedMemory shm(bitstream_buffer.handle(), true);
+ RETURN_AND_NOTIFY_ON_FAILURE(bitstream_id >= 0,
+ "Invalid bitstream, id: " << bitstream_id,
+ INVALID_ARGUMENT, );
+
+ if (!buffer) {
+ if (client_)
+ client_->NotifyEndOfBitstreamBuffer(bitstream_id);
+ return;
+ }
State state = GetState();
RETURN_AND_NOTIFY_ON_FAILURE(
(state == kNormal || state == kStopped || state == kFlushing),
"Invalid state: " << state, ILLEGAL_STATE, );
- if (bitstream_buffer.id() < 0) {
- RETURN_AND_NOTIFY_ON_FAILURE(
- false, "Invalid bitstream_buffer, id: " << bitstream_buffer.id(),
- INVALID_ARGUMENT, );
- }
-
- if (bitstream_buffer.size() == 0) {
- if (client_)
- client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id());
- return;
- }
Microsoft::WRL::ComPtr<IMFSample> sample;
- RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()),
- "Failed in base::SharedMemory::Map",
- PLATFORM_FAILURE, );
-
sample = CreateInputSample(
- reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(),
- std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize),
+ buffer->data(), buffer->data_size(),
+ std::min<uint32_t>(buffer->data_size(), input_stream_info_.cbSize),
input_stream_info_.cbAlignment);
RETURN_AND_NOTIFY_ON_FAILURE(sample.Get(), "Failed to create input sample",
PLATFORM_FAILURE, );
RETURN_AND_NOTIFY_ON_HR_FAILURE(
- sample->SetSampleTime(bitstream_buffer.id()),
+ sample->SetSampleTime(bitstream_id),
"Failed to associate input buffer id with sample", PLATFORM_FAILURE, );
decoder_thread_task_runner_->PostTask(
@@ -1618,7 +1622,7 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
enable_accelerated_vpx_decode_ & gpu::GpuPreferences::VPX_VENDOR_AMD &&
profile == VP9PROFILE_PROFILE0) {
base::FilePath dll_path;
- if (PathService::Get(program_files_key, &dll_path)) {
+ if (base::PathService::Get(program_files_key, &dll_path)) {
codec_ = media::kCodecVP9;
dll_path = dll_path.Append(kAMDVPXDecoderDLLPath);
dll_path = dll_path.Append(kAMDVP9DecoderDLLName);
@@ -1633,8 +1637,8 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
RETURN_ON_FAILURE(false, "Unsupported codec.", false);
}
- HRESULT hr = CreateCOMObjectFromDll(decoder_dll, clsid,
- IID_PPV_ARGS(&decoder_));
+ HRESULT hr =
+ CreateCOMObjectFromDll(decoder_dll, clsid, IID_PPV_ARGS(&decoder_));
RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
@@ -1761,6 +1765,8 @@ bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
UINT32 dx11_aware = 0;
attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware);
use_dx11_ = !!dx11_aware;
+ if (media_log_)
+ MEDIA_LOG(INFO, media_log_) << __func__ << ": Using DX11? " << use_dx11_;
}
use_keyed_mutex_ =
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index 8c4f4891b3c..c5998d033a3 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -47,7 +47,7 @@ class GLContext;
namespace gpu {
class GpuDriverBugWorkarounds;
struct GpuPreferences;
-}
+} // namespace gpu
typedef HRESULT(WINAPI* CreateDXGIDeviceManager)(
UINT* reset_token,
@@ -57,6 +57,7 @@ namespace media {
class DXVAPictureBuffer;
class EGLStreamCopyPictureBuffer;
class EGLStreamPictureBuffer;
+class MediaLog;
class PbufferPictureBuffer;
class ConfigChangeDetector {
@@ -94,12 +95,15 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
const MakeGLContextCurrentCallback& make_context_current_cb,
const BindGLImageCallback& bind_image_cb,
const gpu::GpuDriverBugWorkarounds& workarounds,
- const gpu::GpuPreferences& gpu_preferences);
+ const gpu::GpuPreferences& gpu_preferences,
+ MediaLog* media_log);
~DXVAVideoDecodeAccelerator() override;
// VideoDecodeAccelerator implementation.
bool Initialize(const Config& config, Client* client) override;
- void Decode(const BitstreamBuffer& bitstream_buffer) override;
+ void Decode(const BitstreamBuffer& bitstream) override;
+ void Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) override;
void AssignPictureBuffers(const std::vector<PictureBuffer>& buffers) override;
void ReusePictureBuffer(int32_t picture_buffer_id) override;
void Flush() override;
@@ -479,6 +483,9 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
MakeGLContextCurrentCallback make_context_current_cb_;
BindGLImageCallback bind_image_cb_;
+ // This may be null, e.g. when not using MojoVideoDecoder.
+ MediaLog* const media_log_;
+
// Which codec we are decoding with hardware acceleration.
VideoCodec codec_;
// Thread on which the decoder operations like passing input frames,
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index 3ea9aa20fbc..0279aa5c0e5 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -55,12 +55,14 @@ declare_args() {
# pass-through to HDMI sink on Chromecast.
enable_ac3_eac3_audio_demuxing = proprietary_codecs && is_chromecast
+ enable_mpeg_h_audio_demuxing = proprietary_codecs && is_chromecast
+
enable_mse_mpeg2ts_stream_parser =
(proprietary_codecs && is_chromecast) || use_fuzzing_engine
- # Enable support for the 'cbcs' encryption scheme added by MPEG Common
+ # Enable parsing for the 'cbcs' encryption scheme added by MPEG Common
# Encryption 3rd Edition (ISO/IEC 23001-7), published 02/15/2016.
- enable_cbcs_encryption_scheme = is_chromecast
+ enable_cbcs_encryption_scheme = is_chromecast || is_mac || is_win || is_linux
# Enable HEVC/H265 demuxing. Actual decoding must be provided by the
# platform. Enable by default for Chromecast.
@@ -71,8 +73,6 @@ declare_args() {
# which are encoded using HEVC require |enable_hevc_demuxing| to be enabled.
enable_dolby_vision_demuxing = proprietary_codecs && is_chromecast
- enable_webrtc = !is_cast_audio_only
-
# Enable HLS with SAMPLE-AES decryption.
enable_hls_sample_aes = proprietary_codecs && is_chromecast
@@ -129,7 +129,7 @@ declare_args() {
# the media pipeline and corresponding services will hosted in the selected
# remote process (e.g. "utility" process, see |mojo_media_host|).
enable_mojo_media =
- is_android || is_chromecast || enable_library_cdms || is_win
+ is_android || is_chromecast || is_mac || enable_library_cdms || is_win
# Enable the TestMojoMediaClient to be used in mojo MediaService. This is for
# testing only and will override the default platform MojoMediaClient, if any.
@@ -216,7 +216,7 @@ if (enable_mojo_media) {
"video_decoder",
]
mojo_media_host = "gpu"
- } else if (is_win) {
+ } else if (is_mac || is_win) {
mojo_media_services += [ "video_decoder" ]
mojo_media_host = "gpu"
}
diff --git a/chromium/media/midi/midi_manager_alsa.cc b/chromium/media/midi/midi_manager_alsa.cc
index 629fe1634f5..9d322cb67e5 100644
--- a/chromium/media/midi/midi_manager_alsa.cc
+++ b/chromium/media/midi/midi_manager_alsa.cc
@@ -17,7 +17,6 @@
#include "base/json/json_string_value_serializer.h"
#include "base/logging.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
#include "base/posix/eintr_wrapper.h"
#include "base/posix/safe_strerror.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/midi/midi_manager_mac.cc b/chromium/media/midi/midi_manager_mac.cc
index 1039cd5cc79..ff6ebc51ef1 100644
--- a/chromium/media/midi/midi_manager_mac.cc
+++ b/chromium/media/midi/midi_manager_mac.cc
@@ -13,7 +13,6 @@
#include <CoreAudio/HostTime.h>
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/sys_string_conversions.h"
diff --git a/chromium/media/midi/midi_manager_unittest.cc b/chromium/media/midi/midi_manager_unittest.cc
index 4a645a6541b..691d70cdce1 100644
--- a/chromium/media/midi/midi_manager_unittest.cc
+++ b/chromium/media/midi/midi_manager_unittest.cc
@@ -325,7 +325,13 @@ TEST_F(MidiManagerTest, AbortSession) {
run_loop.RunUntilIdle();
}
-TEST_F(MidiManagerTest, CreatePlatformMidiManager) {
+#if defined(OS_ANDROID)
+// The test sometimes fails on Android. https://crbug.com/844027
+#define MAYBE_CreatePlatformMidiManager DISABLED_CreatePlatformMidiManager
+#else
+#define MAYBE_CreatePlatformMidiManager CreatePlatformMidiManager
+#endif
+TEST_F(MidiManagerTest, MAYBE_CreatePlatformMidiManager) {
// SystemMonitor is needed on Windows.
base::SystemMonitor system_monitor;
diff --git a/chromium/media/midi/midi_manager_usb.cc b/chromium/media/midi/midi_manager_usb.cc
index 60128e34568..ba5ad73bfaa 100644
--- a/chromium/media/midi/midi_manager_usb.cc
+++ b/chromium/media/midi/midi_manager_usb.cc
@@ -8,7 +8,6 @@
#include <utility>
#include "base/logging.h"
-#include "base/message_loop/message_loop.h"
#include "base/strings/stringprintf.h"
#include "media/midi/midi_service.h"
#include "media/midi/task_service.h"
diff --git a/chromium/media/midi/usb_midi_device_factory_android.cc b/chromium/media/midi/usb_midi_device_factory_android.cc
index faf5cbf5e46..9c9eb391465 100644
--- a/chromium/media/midi/usb_midi_device_factory_android.cc
+++ b/chromium/media/midi/usb_midi_device_factory_android.cc
@@ -9,7 +9,6 @@
#include "base/bind.h"
#include "base/containers/hash_tables.h"
-#include "base/message_loop/message_loop.h"
#include "base/synchronization/lock.h"
#include "jni/UsbMidiDeviceFactoryAndroid_jni.h"
#include "media/midi/usb_midi_device_android.h"
diff --git a/chromium/media/mojo/DEPS b/chromium/media/mojo/DEPS
index 285448dff79..7216ea6714a 100644
--- a/chromium/media/mojo/DEPS
+++ b/chromium/media/mojo/DEPS
@@ -1,7 +1,5 @@
include_rules = [
"+components/ukm/test_ukm_recorder.h",
-
- "+mojo/common",
"+mojo/converters",
"+mojo/logging",
"+mojo/public",
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index 92cdfd0fafe..5100e5c976a 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -67,7 +67,6 @@ source_set("clients") {
"//media/gpu",
"//media/mojo/common",
"//media/mojo/common:mojo_shared_buffer_video_frame",
- "//mojo/common",
"//services/service_manager/public/cpp",
]
@@ -90,7 +89,6 @@ source_set("jpeg_decode_accelerator") {
deps = [
"//base",
"//media/mojo/interfaces",
- "//mojo/common",
]
}
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
index 2991646507f..44319b12fd1 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
@@ -5,6 +5,7 @@
#include <memory>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
@@ -65,7 +66,7 @@ class MojoAudioDecoderTest : public ::testing::Test {
message_loop_.task_runner(), std::move(remote_audio_decoder)));
}
- virtual ~MojoAudioDecoderTest() {
+ ~MojoAudioDecoderTest() override {
// Destroy |mojo_audio_decoder_| first so that the service will be
// destructed. Then stop the service thread. Otherwise we'll leak memory.
mojo_audio_decoder_.reset();
@@ -140,7 +141,7 @@ class MojoAudioDecoderTest : public ::testing::Test {
base::Bind(&MojoAudioDecoderTest::OnInitialized,
base::Unretained(this)),
base::Bind(&MojoAudioDecoderTest::OnOutput, base::Unretained(this)),
- AudioDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
RunLoop();
}
diff --git a/chromium/media/mojo/clients/mojo_cdm_unittest.cc b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
index aea9c8a536e..3fc9bc586cd 100644
--- a/chromium/media/mojo/clients/mojo_cdm_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
@@ -42,14 +42,6 @@ ACTION_P2(CdmCreated, cdm, error_message) {
arg0.Run(cdm, error_message);
}
-ACTION_P3(InvokeFunction, classPointer, memberFunc, p1) {
- (classPointer->*memberFunc)(arg0, p1);
-}
-
-ACTION_P4(InvokeFunction2, classPointer, memberFunc, p1, p2) {
- (classPointer->*memberFunc)(arg0, p1, p2);
-}
-
namespace media {
namespace {
@@ -82,7 +74,7 @@ class MojoCdmTest : public ::testing::Test {
&mojo_cdm_service_context_)),
cdm_binding_(mojo_cdm_service_.get()) {}
- virtual ~MojoCdmTest() = default;
+ ~MojoCdmTest() override = default;
void Initialize(ExpectedResult expected_result) {
// TODO(xhwang): Add pending init support.
@@ -153,9 +145,10 @@ class MojoCdmTest : public ::testing::Test {
// never called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnSetServerCertificate(certificate, _))
- .WillOnce(WithArg<1>(InvokeFunction(this, &MojoCdmTest::HandlePromise,
- expected_result)));
+ EXPECT_CALL(*remote_cdm_, SetServerCertificate(certificate, _))
+ .WillOnce([&](const auto& certificate, auto promise) {
+ HandlePromise(std::move(promise), expected_result);
+ });
}
mojo_cdm_->SetServerCertificate(
@@ -178,11 +171,13 @@ class MojoCdmTest : public ::testing::Test {
// CreateSessionAndGenerateRequest() is never called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnCreateSessionAndGenerateRequest(
+ EXPECT_CALL(*remote_cdm_, CreateSessionAndGenerateRequest(
session_type, data_type, key_id, _))
- .WillOnce(WithArg<3>(
- InvokeFunction2(this, &MojoCdmTest::HandleSessionPromise,
- session_id, expected_result)));
+ .WillOnce([&](auto session_type, auto init_data_type,
+ const auto& init_data, auto promise) {
+ HandleSessionPromise(std::move(promise), session_id,
+ expected_result);
+ });
}
// Note that although it's called CreateSessionAndGenerateRequest, no
@@ -211,10 +206,11 @@ class MojoCdmTest : public ::testing::Test {
// Break the connection before the call, so LoadSession() is never called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnLoadSession(session_type, session_id, _))
- .WillOnce(WithArg<2>(
- InvokeFunction2(this, &MojoCdmTest::HandleSessionPromise,
- session_id, expected_result)));
+ EXPECT_CALL(*remote_cdm_, LoadSession(session_type, session_id, _))
+ .WillOnce([&](auto session_type, auto session_id, auto promise) {
+ HandleSessionPromise(std::move(promise), session_id,
+ expected_result);
+ });
}
mojo_cdm_->LoadSession(session_type, session_id,
@@ -246,9 +242,10 @@ class MojoCdmTest : public ::testing::Test {
// called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnUpdateSession(session_id, response, _))
- .WillOnce(WithArg<2>(InvokeFunction(this, &MojoCdmTest::HandlePromise,
- expected_result)));
+ EXPECT_CALL(*remote_cdm_, UpdateSession(session_id, response, _))
+ .WillOnce([&](auto session_id, auto response, auto promise) {
+ HandlePromise(std::move(promise), expected_result);
+ });
}
mojo_cdm_->UpdateSession(
@@ -264,9 +261,10 @@ class MojoCdmTest : public ::testing::Test {
// called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnCloseSession(session_id, _))
- .WillOnce(WithArg<1>(InvokeFunction(this, &MojoCdmTest::HandlePromise,
- expected_result)));
+ EXPECT_CALL(*remote_cdm_, CloseSession(session_id, _))
+ .WillOnce([&](auto session_id, auto promise) {
+ HandlePromise(std::move(promise), expected_result);
+ });
}
mojo_cdm_->CloseSession(session_id, std::make_unique<MockCdmPromise>(
@@ -281,9 +279,10 @@ class MojoCdmTest : public ::testing::Test {
// called.
ForceConnectionError();
} else {
- EXPECT_CALL(*remote_cdm_, OnRemoveSession(session_id, _))
- .WillOnce(WithArg<1>(InvokeFunction(this, &MojoCdmTest::HandlePromise,
- expected_result)));
+ EXPECT_CALL(*remote_cdm_, RemoveSession(session_id, _))
+ .WillOnce([&](auto session_id, auto promise) {
+ HandlePromise(std::move(promise), expected_result);
+ });
}
mojo_cdm_->RemoveSession(session_id, std::make_unique<MockCdmPromise>(
@@ -291,7 +290,7 @@ class MojoCdmTest : public ::testing::Test {
base::RunLoop().RunUntilIdle();
}
- void HandlePromise(std::unique_ptr<SimpleCdmPromise>& promise,
+ void HandlePromise(std::unique_ptr<SimpleCdmPromise> promise,
ExpectedResult expected_result) {
switch (expected_result) {
case SUCCESS:
@@ -326,7 +325,7 @@ class MojoCdmTest : public ::testing::Test {
}
}
- void HandleSessionPromise(std::unique_ptr<NewSessionCdmPromise>& promise,
+ void HandleSessionPromise(std::unique_ptr<NewSessionCdmPromise> promise,
const std::string& session_id,
ExpectedResult expected_result) {
switch (expected_result) {
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.cc b/chromium/media/mojo/clients/mojo_decoder_factory.cc
index db90344e76c..4f694e05055 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.cc
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.cc
@@ -29,6 +29,7 @@ MojoDecoderFactory::~MojoDecoderFactory() = default;
void MojoDecoderFactory::CreateAudioDecoders(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {
#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
mojom::AudioDecoderPtr audio_decoder_ptr;
@@ -44,6 +45,7 @@ void MojoDecoderFactory::CreateVideoDecoders(
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) {
#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
// If MojoVideoDecoder is not enabled, then return without adding anything.
@@ -54,7 +56,7 @@ void MojoDecoderFactory::CreateVideoDecoders(
video_decoders->push_back(std::make_unique<MojoVideoDecoder>(
task_runner, gpu_factories, media_log, std::move(video_decoder_ptr),
- request_overlay_info_cb));
+ request_overlay_info_cb, target_color_space));
#endif
}
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.h b/chromium/media/mojo/clients/mojo_decoder_factory.h
index 11d0c64c6a9..c884dd65b83 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.h
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.h
@@ -23,6 +23,7 @@ class MojoDecoderFactory : public DecoderFactory {
void CreateAudioDecoders(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
void CreateVideoDecoders(
@@ -30,6 +31,7 @@ class MojoDecoderFactory : public DecoderFactory {
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) final;
private:
diff --git a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
index 007e01ac916..634c989857a 100644
--- a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
@@ -20,6 +20,7 @@
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
#include "media/mojo/interfaces/decryptor.mojom.h"
#include "media/mojo/services/mojo_decryptor_service.h"
+#include "mojo/public/cpp/bindings/binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -44,10 +45,12 @@ class MojoDecryptorTest : public ::testing::Test {
decryptor_.reset(new StrictMock<MockDecryptor>());
mojom::DecryptorPtr remote_decryptor;
- mojo_decryptor_service_.reset(new MojoDecryptorService(
- decryptor_.get(), mojo::MakeRequest(&remote_decryptor),
- base::Bind(&MojoDecryptorTest::OnConnectionClosed,
- base::Unretained(this))));
+ mojo_decryptor_service_.reset(
+ new MojoDecryptorService(decryptor_.get(), nullptr));
+ binding_ = std::make_unique<mojo::Binding<mojom::Decryptor>>(
+ mojo_decryptor_service_.get(), MakeRequest(&remote_decryptor));
+ binding_->set_connection_error_handler(base::BindOnce(
+ &MojoDecryptorTest::OnConnectionClosed, base::Unretained(this)));
mojo_decryptor_.reset(
new MojoDecryptor(std::move(remote_decryptor), writer_capacity_));
@@ -61,6 +64,7 @@ class MojoDecryptorTest : public ::testing::Test {
void DestroyService() {
// MojoDecryptor has no way to notify callers that the connection is closed.
// TODO(jrummell): Determine if notification is needed.
+ binding_.reset();
mojo_decryptor_service_.reset();
}
@@ -119,6 +123,7 @@ class MojoDecryptorTest : public ::testing::Test {
// The matching MojoDecryptorService for |mojo_decryptor_|.
std::unique_ptr<MojoDecryptorService> mojo_decryptor_service_;
+ std::unique_ptr<mojo::Binding<mojom::Decryptor>> binding_;
// The actual Decryptor object used by |mojo_decryptor_service_|.
std::unique_ptr<StrictMock<MockDecryptor>> decryptor_;
diff --git a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
index 8783eb41407..88d3908eb98 100644
--- a/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_jpeg_decode_accelerator.cc
@@ -10,6 +10,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/shared_memory_handle.h"
+#include "base/single_thread_task_runner.h"
#include "build/build_config.h"
#include "mojo/public/cpp/system/platform_handle.h"
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.cc b/chromium/media/mojo/clients/mojo_renderer_factory.cc
index 2601efdebd0..4a2096a0615 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.cc
@@ -27,6 +27,11 @@ MojoRendererFactory::MojoRendererFactory(
MojoRendererFactory::~MojoRendererFactory() = default;
+void MojoRendererFactory::SetGetTypeSpecificIdCB(
+ const GetTypeSpecificIdCB& get_type_specific_id) {
+ get_type_specific_id_ = get_type_specific_id;
+}
+
std::unique_ptr<Renderer> MojoRendererFactory::CreateRenderer(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
const scoped_refptr<base::TaskRunner>& /* worker_task_runner */,
@@ -52,7 +57,10 @@ mojom::RendererPtr MojoRendererFactory::GetRendererPtr() {
mojom::RendererPtr renderer_ptr;
if (interface_factory_) {
- interface_factory_->CreateRenderer(hosted_renderer_type_, std::string(),
+ interface_factory_->CreateRenderer(hosted_renderer_type_,
+ get_type_specific_id_.is_null()
+ ? std::string()
+ : get_type_specific_id_.Run(),
mojo::MakeRequest(&renderer_ptr));
} else {
NOTREACHED();
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.h b/chromium/media/mojo/clients/mojo_renderer_factory.h
index f0fa1f33a23..5096df53eec 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.h
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.h
@@ -24,6 +24,7 @@ class GpuVideoAcceleratorFactories;
class MojoRendererFactory : public RendererFactory {
public:
using GetGpuFactoriesCB = base::Callback<GpuVideoAcceleratorFactories*()>;
+ using GetTypeSpecificIdCB = base::Callback<std::string()>;
MojoRendererFactory(mojom::HostedRendererType type,
const GetGpuFactoriesCB& get_gpu_factories_cb,
@@ -39,10 +40,19 @@ class MojoRendererFactory : public RendererFactory {
const RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) final;
+ // Sets the callback that will fetch the TypeSpecificId when
+ // InterfaceFactory::CreateRenderer() is called. What the string represents
+ // depends on the value of |hosted_renderer_type_|. Currently, we only use it
+ // with mojom::HostedRendererType::kFlinging, in which case
+ // |get_type_specific_id| should return the presentation ID to be given to the
+ // FlingingRenderer in the browser process.
+ void SetGetTypeSpecificIdCB(const GetTypeSpecificIdCB& get_type_specific_id);
+
private:
mojom::RendererPtr GetRendererPtr();
GetGpuFactoriesCB get_gpu_factories_cb_;
+ GetTypeSpecificIdCB get_type_specific_id_;
// InterfaceFactory or InterfaceProvider used to create or connect to remote
// renderer.
diff --git a/chromium/media/mojo/clients/mojo_renderer_unittest.cc b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
index fc2b93daafa..fb66d05c113 100644
--- a/chromium/media/mojo/clients/mojo_renderer_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
@@ -12,13 +12,11 @@
#include "base/threading/platform_thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/timer/elapsed_timer.h"
-#include "media/base/audio_renderer_sink.h"
#include "media/base/cdm_config.h"
#include "media/base/cdm_context.h"
#include "media/base/gmock_callback_support.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
-#include "media/base/video_renderer_sink.h"
#include "media/cdm/default_cdm_factory.h"
#include "media/mojo/clients/mojo_renderer.h"
#include "media/mojo/common/media_type_converters.h"
@@ -69,7 +67,7 @@ class MojoRendererTest : public ::testing::Test {
mojom::RendererPtr remote_renderer;
renderer_binding_ = MojoRendererService::Create(
- &mojo_cdm_service_context_, nullptr, nullptr, std::move(mock_renderer),
+ &mojo_cdm_service_context_, std::move(mock_renderer),
MojoRendererService::InitiateSurfaceRequestCB(),
mojo::MakeRequest(&remote_renderer));
@@ -86,7 +84,7 @@ class MojoRendererTest : public ::testing::Test {
.WillRepeatedly(Return(base::TimeDelta()));
}
- virtual ~MojoRendererTest() = default;
+ ~MojoRendererTest() override = default;
void Destroy() {
mojo_renderer_.reset();
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index 21f62d57460..7d84d38c81c 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -7,25 +7,53 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
+#include "base/feature_list.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/single_thread_task_runner.h"
#include "base/unguessable_token.h"
+#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
+#include "media/base/media_switches.h"
#include "media/base/overlay_info.h"
#include "media/base/video_frame.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/interfaces/media_types.mojom.h"
#include "media/video/gpu_video_accelerator_factories.h"
+#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/interface_request.h"
namespace media {
+namespace {
+
+bool IsSupportedConfig(
+ const VideoDecodeAccelerator::SupportedProfiles& supported_profiles,
+ const VideoDecoderConfig& config) {
+ for (const auto& supported_profile : supported_profiles) {
+ if (config.profile() == supported_profile.profile &&
+ (!supported_profile.encrypted_only || config.is_encrypted()) &&
+ config.coded_size().width() >=
+ supported_profile.min_resolution.width() &&
+ config.coded_size().width() <=
+ supported_profile.max_resolution.width() &&
+ config.coded_size().height() >=
+ supported_profile.min_resolution.height() &&
+ config.coded_size().height() <=
+ supported_profile.max_resolution.height()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+} // namespace
+
// Provides a thread-safe channel for VideoFrame destruction events.
class MojoVideoFrameHandleReleaser
: public base::RefCountedThreadSafe<MojoVideoFrameHandleReleaser> {
@@ -76,7 +104,8 @@ MojoVideoDecoder::MojoVideoDecoder(
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojom::VideoDecoderPtr remote_decoder,
- const RequestOverlayInfoCB& request_overlay_info_cb)
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space)
: task_runner_(task_runner),
remote_decoder_info_(remote_decoder.PassInterface()),
gpu_factories_(gpu_factories),
@@ -86,8 +115,10 @@ MojoVideoDecoder::MojoVideoDecoder(
media_log_service_(media_log),
media_log_binding_(&media_log_service_),
request_overlay_info_cb_(request_overlay_info_cb),
+ target_color_space_(target_color_space),
weak_factory_(this) {
DVLOG(1) << __func__;
+ weak_this_ = weak_factory_.GetWeakPtr();
}
MojoVideoDecoder::~MojoVideoDecoder() {
@@ -110,25 +141,39 @@ void MojoVideoDecoder::Initialize(
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
- if (!weak_this_)
- weak_this_ = weak_factory_.GetWeakPtr();
+ // Fail immediately if we know that the remote side cannot support |config|.
+ //
+ // TODO(sandersd): Implement a generic mechanism for communicating supported
+ // profiles. https://crbug.com/839951
+ if (gpu_factories_) {
+ VideoDecodeAccelerator::Capabilities capabilities =
+ gpu_factories_->GetVideoDecodeAcceleratorCapabilities();
+ if (!base::FeatureList::IsEnabled(kD3D11VideoDecoder) &&
+ !IsSupportedConfig(capabilities.supported_profiles, config)) {
+ task_runner_->PostTask(FROM_HERE, base::BindRepeating(init_cb, false));
+ return;
+ }
+ }
- if (!remote_decoder_bound_)
- BindRemoteDecoder();
+ int cdm_id =
+ cdm_context ? cdm_context->GetCdmId() : CdmContext::kInvalidCdmId;
- if (has_connection_error_) {
+ // Fail immediately if the stream is encrypted but |cdm_id| is invalid.
+ // This check is needed to avoid unnecessary IPC to the remote process.
+ // Note that we do not support unsetting a CDM, so it should never happen
+ // that a valid CDM ID is available on first initialization but an invalid
+ // is passed for reinitialization.
+ if (config.is_encrypted() && CdmContext::kInvalidCdmId == cdm_id) {
+ DVLOG(1) << __func__ << ": Invalid CdmContext.";
task_runner_->PostTask(FROM_HERE, base::Bind(init_cb, false));
return;
}
- // Fail immediately if the stream is encrypted but |cdm_context| is invalid.
- int cdm_id = (config.is_encrypted() && cdm_context)
- ? cdm_context->GetCdmId()
- : CdmContext::kInvalidCdmId;
+ if (!remote_decoder_bound_)
+ BindRemoteDecoder();
- if (config.is_encrypted() && CdmContext::kInvalidCdmId == cdm_id) {
- DVLOG(1) << __func__ << ": Invalid CdmContext.";
- task_runner_->PostTask(FROM_HERE, base::Bind(init_cb, false));
+ if (has_connection_error_) {
+ task_runner_->PostTask(FROM_HERE, base::BindRepeating(init_cb, false));
return;
}
@@ -143,7 +188,7 @@ void MojoVideoDecoder::Initialize(
void MojoVideoDecoder::OnInitializeDone(bool status,
bool needs_bitstream_conversion,
int32_t max_decode_requests) {
- DVLOG(1) << __func__;
+ DVLOG(1) << __func__ << ": status = " << status;
DCHECK(task_runner_->BelongsToCurrentThread());
initialized_ = status;
needs_bitstream_conversion_ = needs_bitstream_conversion;
@@ -153,7 +198,7 @@ void MojoVideoDecoder::OnInitializeDone(bool status,
void MojoVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
const DecodeCB& decode_cb) {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__ << ": " << buffer->AsHumanReadableString();
DCHECK(task_runner_->BelongsToCurrentThread());
if (has_connection_error_) {
@@ -181,7 +226,7 @@ void MojoVideoDecoder::OnVideoFrameDecoded(
const scoped_refptr<VideoFrame>& frame,
bool can_read_without_stalling,
const base::Optional<base::UnguessableToken>& release_token) {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
// TODO(sandersd): Prove that all paths read this value again after running
@@ -199,7 +244,7 @@ void MojoVideoDecoder::OnVideoFrameDecoded(
}
void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id, DecodeStatus status) {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
auto it = pending_decodes_.find(decode_id);
@@ -214,7 +259,7 @@ void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id, DecodeStatus status) {
}
void MojoVideoDecoder::Reset(const base::Closure& reset_cb) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
if (has_connection_error_) {
@@ -228,7 +273,7 @@ void MojoVideoDecoder::Reset(const base::Closure& reset_cb) {
}
void MojoVideoDecoder::OnResetDone() {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
base::ResetAndReturn(&reset_cb_).Run();
}
@@ -294,10 +339,11 @@ void MojoVideoDecoder::BindRemoteDecoder() {
}
}
- remote_decoder_->Construct(
- std::move(client_ptr_info), std::move(media_log_ptr_info),
- std::move(video_frame_handle_releaser_request),
- std::move(remote_consumer_handle), std::move(command_buffer_id));
+ remote_decoder_->Construct(std::move(client_ptr_info),
+ std::move(media_log_ptr_info),
+ std::move(video_frame_handle_releaser_request),
+ std::move(remote_consumer_handle),
+ std::move(command_buffer_id), target_color_space_);
}
void MojoVideoDecoder::RequestOverlayInfo(bool restart_for_transitions) {
@@ -306,8 +352,8 @@ void MojoVideoDecoder::RequestOverlayInfo(bool restart_for_transitions) {
overlay_info_requested_ = true;
request_overlay_info_cb_.Run(
restart_for_transitions,
- BindToCurrentLoop(base::Bind(&MojoVideoDecoder::OnOverlayInfoChanged,
- weak_factory_.GetWeakPtr())));
+ BindToCurrentLoop(base::BindRepeating(
+ &MojoVideoDecoder::OnOverlayInfoChanged, weak_this_)));
}
void MojoVideoDecoder::OnOverlayInfoChanged(const OverlayInfo& overlay_info) {
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index 5f0d556af64..f3fe82ae196 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -14,6 +14,7 @@
#include "media/mojo/interfaces/video_decoder.mojom.h"
#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
+#include "ui/gfx/color_space.h"
namespace base {
class SingleThreadTaskRunner;
@@ -37,7 +38,8 @@ class MojoVideoDecoder final : public VideoDecoder,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojom::VideoDecoderPtr remote_decoder,
- const RequestOverlayInfoCB& request_overlay_info_cb);
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space);
~MojoVideoDecoder() final;
// VideoDecoder implementation.
@@ -112,6 +114,7 @@ class MojoVideoDecoder final : public VideoDecoder,
mojo::AssociatedBinding<mojom::MediaLog> media_log_binding_;
RequestOverlayInfoCB request_overlay_info_cb_;
bool overlay_info_requested_ = false;
+ gfx::ColorSpace target_color_space_;
bool initialized_ = false;
bool needs_bitstream_conversion_ = false;
diff --git a/chromium/media/mojo/common/BUILD.gn b/chromium/media/mojo/common/BUILD.gn
index 72d6a25dc94..23dfe6c13aa 100644
--- a/chromium/media/mojo/common/BUILD.gn
+++ b/chromium/media/mojo/common/BUILD.gn
@@ -18,7 +18,6 @@ source_set("common") {
"//gpu/ipc/common:struct_traits",
"//media",
"//media/mojo/interfaces",
- "//mojo/common",
"//mojo/public/cpp/bindings",
"//mojo/public/cpp/system",
"//ui/gfx/geometry",
diff --git a/chromium/media/mojo/common/media_type_converters.cc b/chromium/media/mojo/common/media_type_converters.cc
index b876be08bb8..3841de42cbf 100644
--- a/chromium/media/mojo/common/media_type_converters.cc
+++ b/chromium/media/mojo/common/media_type_converters.cc
@@ -8,31 +8,18 @@
#include <stdint.h>
#include <memory>
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/audio_buffer.h"
-#include "media/base/audio_decoder_config.h"
#include "media/base/cdm_key_information.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
-#include "media/base/encryption_pattern.h"
-#include "media/base/encryption_scheme.h"
#include "media/base/subsample_entry.h"
#include "mojo/public/cpp/system/buffer.h"
namespace mojo {
-template <>
-struct TypeConverter<media::mojom::EncryptionPatternPtr,
- media::EncryptionPattern> {
- static media::mojom::EncryptionPatternPtr Convert(
- const media::EncryptionPattern& input);
-};
-template <>
-struct TypeConverter<media::EncryptionPattern,
- media::mojom::EncryptionPatternPtr> {
- static media::EncryptionPattern Convert(
- const media::mojom::EncryptionPatternPtr& input);
-};
+// TODO(crbug.com/611224): Stop using TypeConverters.
// static
media::mojom::DecryptConfigPtr
@@ -43,6 +30,8 @@ TypeConverter<media::mojom::DecryptConfigPtr, media::DecryptConfig>::Convert(
mojo_decrypt_config->key_id = input.key_id();
mojo_decrypt_config->iv = input.iv();
mojo_decrypt_config->subsamples = input.subsamples();
+ mojo_decrypt_config->encryption_mode = input.encryption_mode();
+ mojo_decrypt_config->encryption_pattern = input.encryption_pattern();
return mojo_decrypt_config;
}
@@ -52,8 +41,9 @@ std::unique_ptr<media::DecryptConfig>
TypeConverter<std::unique_ptr<media::DecryptConfig>,
media::mojom::DecryptConfigPtr>::
Convert(const media::mojom::DecryptConfigPtr& input) {
- return std::make_unique<media::DecryptConfig>(input->key_id, input->iv,
- input->subsamples);
+ return std::make_unique<media::DecryptConfig>(
+ input->encryption_mode, input->key_id, input->iv, input->subsamples,
+ input->encryption_pattern);
}
// static
@@ -129,35 +119,6 @@ TypeConverter<scoped_refptr<media::DecoderBuffer>,
}
// static
-media::mojom::AudioDecoderConfigPtr
-TypeConverter<media::mojom::AudioDecoderConfigPtr, media::AudioDecoderConfig>::
- Convert(const media::AudioDecoderConfig& input) {
- media::mojom::AudioDecoderConfigPtr config(
- media::mojom::AudioDecoderConfig::New());
- config->codec = input.codec();
- config->sample_format = input.sample_format();
- config->channel_layout = input.channel_layout();
- config->samples_per_second = input.samples_per_second();
- config->extra_data = input.extra_data();
- config->seek_preroll = input.seek_preroll();
- config->codec_delay = input.codec_delay();
- config->encryption_scheme = input.encryption_scheme();
- return config;
-}
-
-// static
-media::AudioDecoderConfig
-TypeConverter<media::AudioDecoderConfig, media::mojom::AudioDecoderConfigPtr>::
- Convert(const media::mojom::AudioDecoderConfigPtr& input) {
- media::AudioDecoderConfig config;
- config.Initialize(input->codec, input->sample_format, input->channel_layout,
- input->samples_per_second, input->extra_data,
- input->encryption_scheme, input->seek_preroll,
- input->codec_delay);
- return config;
-}
-
-// static
media::mojom::CdmKeyInformationPtr TypeConverter<
media::mojom::CdmKeyInformationPtr,
media::CdmKeyInformation>::Convert(const media::CdmKeyInformation& input) {
diff --git a/chromium/media/mojo/common/media_type_converters.h b/chromium/media/mojo/common/media_type_converters.h
index abc6929ed94..3a8d219c0b0 100644
--- a/chromium/media/mojo/common/media_type_converters.h
+++ b/chromium/media/mojo/common/media_type_converters.h
@@ -14,7 +14,6 @@
namespace media {
class AudioBuffer;
-class AudioDecoderConfig;
class DecoderBuffer;
class DecryptConfig;
struct CdmKeyInformation;
@@ -49,19 +48,6 @@ struct TypeConverter<scoped_refptr<media::DecoderBuffer>,
};
template <>
-struct TypeConverter<media::mojom::AudioDecoderConfigPtr,
- media::AudioDecoderConfig> {
- static media::mojom::AudioDecoderConfigPtr Convert(
- const media::AudioDecoderConfig& input);
-};
-template <>
-struct TypeConverter<media::AudioDecoderConfig,
- media::mojom::AudioDecoderConfigPtr> {
- static media::AudioDecoderConfig Convert(
- const media::mojom::AudioDecoderConfigPtr& input);
-};
-
-template <>
struct TypeConverter<media::mojom::CdmKeyInformationPtr,
media::CdmKeyInformation> {
static media::mojom::CdmKeyInformationPtr Convert(
diff --git a/chromium/media/mojo/common/media_type_converters_unittest.cc b/chromium/media/mojo/common/media_type_converters_unittest.cc
index a7163ea92a6..336b549a7ac 100644
--- a/chromium/media/mojo/common/media_type_converters_unittest.cc
+++ b/chromium/media/mojo/common/media_type_converters_unittest.cc
@@ -13,8 +13,9 @@
#include "media/base/audio_buffer.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
+#include "media/base/encryption_pattern.h"
#include "media/base/encryption_scheme.h"
-#include "media/base/media_util.h"
#include "media/base/sample_format.h"
#include "media/base/test_helpers.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -126,7 +127,7 @@ TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_KeyFrame) {
EXPECT_TRUE(result->is_key_frame());
}
-TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_EncryptedBuffer) {
+TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_CencEncryptedBuffer) {
const uint8_t kData[] = "hello, world";
const size_t kDataSize = arraysize(kData);
const char kKeyId[] = "00112233445566778899aabbccddeeff";
@@ -141,7 +142,7 @@ TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_EncryptedBuffer) {
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(&kData), kDataSize));
buffer->set_decrypt_config(
- std::make_unique<DecryptConfig>(kKeyId, kIv, subsamples));
+ DecryptConfig::CreateCencConfig(kKeyId, kIv, subsamples));
// Convert from and back.
mojom::DecoderBufferPtr ptr(mojom::DecoderBuffer::From(*buffer));
@@ -153,13 +154,51 @@ TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_EncryptedBuffer) {
EXPECT_EQ(kDataSize, result->data_size());
EXPECT_TRUE(buffer->decrypt_config()->Matches(*result->decrypt_config()));
- // Test empty IV. This is used for clear buffer in an encrypted stream.
- buffer->set_decrypt_config(std::make_unique<DecryptConfig>(
- kKeyId, "", std::vector<SubsampleEntry>()));
+ // Test without DecryptConfig. This is used for clear buffer in an
+ // encrypted stream.
+ buffer->set_decrypt_config(nullptr);
+ EXPECT_FALSE(buffer->decrypt_config());
result =
mojom::DecoderBuffer::From(*buffer).To<scoped_refptr<DecoderBuffer>>();
+ EXPECT_FALSE(result->decrypt_config());
+}
+
+TEST(MediaTypeConvertersTest, ConvertDecoderBuffer_CbcsEncryptedBuffer) {
+ const uint8_t kData[] = "hello, world";
+ const size_t kDataSize = arraysize(kData);
+ const char kKeyId[] = "00112233445566778899aabbccddeeff";
+ const char kIv[] = "0123456789abcdef";
+
+ std::vector<SubsampleEntry> subsamples;
+ subsamples.push_back(SubsampleEntry(10, 20));
+ subsamples.push_back(SubsampleEntry(30, 40));
+ subsamples.push_back(SubsampleEntry(50, 60));
+
+ EncryptionPattern pattern{1, 2};
+
+ // Original.
+ scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(&kData), kDataSize));
+ buffer->set_decrypt_config(
+ DecryptConfig::CreateCbcsConfig(kKeyId, kIv, subsamples, pattern));
+
+ // Convert from and back.
+ mojom::DecoderBufferPtr ptr(mojom::DecoderBuffer::From(*buffer));
+ scoped_refptr<DecoderBuffer> result(ptr.To<scoped_refptr<DecoderBuffer>>());
+
+ // Compare.
+ // Note: We intentionally do not serialize the data section of the
+ // DecoderBuffer; no need to check the data here.
+ EXPECT_EQ(kDataSize, result->data_size());
EXPECT_TRUE(buffer->decrypt_config()->Matches(*result->decrypt_config()));
- EXPECT_TRUE(buffer->decrypt_config()->iv().empty());
+
+ // Test without DecryptConfig. This is used for clear buffer in an
+ // encrypted stream.
+ buffer->set_decrypt_config(nullptr);
+ EXPECT_FALSE(buffer->decrypt_config());
+ result =
+ mojom::DecoderBuffer::From(*buffer).To<scoped_refptr<DecoderBuffer>>();
+ EXPECT_FALSE(result->decrypt_config());
}
TEST(MediaTypeConvertersTest, ConvertAudioBuffer_EOS) {
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
index 840b1f9e600..2a798e8e490 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
@@ -13,6 +13,7 @@
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
#include "media/base/decoder_buffer.h"
+#include "media/base/decrypt_config.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -124,15 +125,22 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_EncryptedBuffer) {
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(&kData), kDataSize));
buffer->set_decrypt_config(
- std::make_unique<DecryptConfig>(kKeyId, kIv, subsamples));
+ DecryptConfig::CreateCencConfig(kKeyId, kIv, subsamples));
{
MojoDecoderBufferConverter converter;
converter.ConvertAndVerify(buffer);
}
- // Test empty IV. This is used for clear buffer in an encrypted stream.
- buffer->set_decrypt_config(std::make_unique<DecryptConfig>(
- kKeyId, "", std::vector<SubsampleEntry>()));
+ // Test 'cbcs'.
+ buffer->set_decrypt_config(DecryptConfig::CreateCbcsConfig(
+ kKeyId, kIv, subsamples, EncryptionPattern(5, 6)));
+ {
+ MojoDecoderBufferConverter converter;
+ converter.ConvertAndVerify(buffer);
+ }
+
+ // Test unencrypted. This is used for clear buffer in an encrypted stream.
+ buffer->set_decrypt_config(nullptr);
{
MojoDecoderBufferConverter converter;
converter.ConvertAndVerify(buffer);
diff --git a/chromium/media/mojo/interfaces/BUILD.gn b/chromium/media/mojo/interfaces/BUILD.gn
index 16595ed7fb0..0b6fa72f858 100644
--- a/chromium/media/mojo/interfaces/BUILD.gn
+++ b/chromium/media/mojo/interfaces/BUILD.gn
@@ -52,6 +52,7 @@ mojom("interfaces") {
"//mojo/public/mojom/base",
"//services/service_manager/public/mojom",
"//ui/gfx/geometry/mojo",
+ "//ui/gfx/mojo",
"//url/mojom:url_mojom_gurl",
"//url/mojom:url_mojom_origin",
]
diff --git a/chromium/media/mojo/interfaces/audio_input_stream.mojom b/chromium/media/mojo/interfaces/audio_input_stream.mojom
index ff1ec0c01a6..fd5df22c263 100644
--- a/chromium/media/mojo/interfaces/audio_input_stream.mojom
+++ b/chromium/media/mojo/interfaces/audio_input_stream.mojom
@@ -29,6 +29,21 @@ interface AudioInputStreamClient {
// AudioInputStream. DidStartRecording() is invoked when the stream starts
// recording. Stream destruction is notified through binding connection error.
interface AudioInputStreamObserver {
+ // These values are persisted to logs. Entries should not be renumbered and
+ // numeric values should never be reused.
+ enum DisconnectReason {
+ // The Disconnect reason wasn't given explicitly. This probably means that
+ // the audio service crashed.
+ kDefault = 0,
+ // This code is used as disconnect reason when stream ended or failed to
+ // start due to an unrecoverable platform error, e.g. the hardware device is
+ // busy or disconnected.
+ kPlatformError = 1,
+ kTerminatedByClient = 2,
+ kStreamCreationFailed = 3,
+ kDocumentDestroyed = 4,
+ };
+
// It will be called only once when input stream starts recording.
DidStartRecording();
};
diff --git a/chromium/media/mojo/interfaces/audio_output_stream.mojom b/chromium/media/mojo/interfaces/audio_output_stream.mojom
index 2ccf4b752d9..7fa1fe4c934 100644
--- a/chromium/media/mojo/interfaces/audio_output_stream.mojom
+++ b/chromium/media/mojo/interfaces/audio_output_stream.mojom
@@ -25,21 +25,6 @@ interface AudioOutputStream {
SetVolume(double volume);
};
-interface AudioOutputStreamClient {
- // Called if the stream has an error such as failing to open/losing access to
- // a device. This renders the stream unusable.
- OnError();
-};
-
-interface AudioOutputStreamProvider {
- // Creates a new AudioOutputStream using |params|. |data_pipe| is used to
- // transfer audio data.
- // Can only be called once.
- Acquire(AudioOutputStream& output_stream, AudioOutputStreamClient client,
- AudioParameters params)
- => (AudioDataPipe data_pipe);
-};
-
// An AudioOutputStreamObserver gets notifications about events related to an
// AudioOutputStream. DidStartPlaying() is invoked when the stream starts
// playing and it is eventually followed by a DidStopPlaying() call. A stream
@@ -48,6 +33,21 @@ interface AudioOutputStreamProvider {
// Note: It is possible that DidStopPlaying() is not called in shutdown
// situations.
interface AudioOutputStreamObserver {
+ // These values are persisted to logs. Entries should not be renumbered and
+ // numeric values should never be reused.
+ enum DisconnectReason {
+ // The Disconnect reason wasn't given explicitly. This probably means that
+ // the audio service crashed.
+ kDefault = 0,
+ // This code is used as disconnect reason when stream ended or failed to
+ // start due to an unrecoverable platform error, e.g. the hardware device is
+ // busy or disconnected.
+ kPlatformError = 1,
+ kTerminatedByClient = 2,
+ kStreamCreationFailed = 3,
+ kDocumentDestroyed = 4,
+ };
+
// This notification indicates that the stream started playing. The stream
// should be considered non-audible until a DidChangeAudibleState() call says
// otherwise.
@@ -63,3 +63,20 @@ interface AudioOutputStreamObserver {
// DidStartPlaying() and before DidStopPlaying().
DidChangeAudibleState(bool is_audible);
};
+
+interface AudioOutputStreamProvider {
+ // Creates a new AudioOutputStream using |params|. |client| is notified when
+ // the stream is ready. The stream lifetime is bound by the lifetime of
+ // |client|. On error, the |client| will have a disconnect reason among the
+ // specified ones in AudioOutputStreamProviderClient.
+ // Can only be called once.
+ Acquire(AudioParameters params, AudioOutputStreamProviderClient client);
+};
+
+interface AudioOutputStreamProviderClient {
+ // |stream| is used to pass commands to the stream, and |data_pipe| is used
+ // to transfer the audio data.
+ // TODO(https://crbug.com/787806): Currently, this will be called at most
+ // once. In the future, it may be called several times.
+ Created(AudioOutputStream stream, AudioDataPipe data_pipe);
+};
diff --git a/chromium/media/mojo/interfaces/interface_factory.mojom b/chromium/media/mojo/interfaces/interface_factory.mojom
index b3101de054c..73b1e687e3e 100644
--- a/chromium/media/mojo/interfaces/interface_factory.mojom
+++ b/chromium/media/mojo/interfaces/interface_factory.mojom
@@ -6,6 +6,7 @@ module media.mojom;
import "media/mojo/interfaces/audio_decoder.mojom";
import "media/mojo/interfaces/cdm_proxy.mojom";
+import "media/mojo/interfaces/decryptor.mojom";
import "media/mojo/interfaces/content_decryption_module.mojom";
import "media/mojo/interfaces/renderer.mojom";
import "media/mojo/interfaces/video_decoder.mojom";
@@ -21,6 +22,14 @@ enum HostedRendererType {
// are better off using the native Android MediaPlayer.
[EnableIf=is_android]
kMediaPlayer,
+
+ // content::FlingingRenderer: Used to control a CastSession, in the context
+ // of RemotePlayback. The CastSession must have already been started via the
+ // PresentationService. This renderer does not actually render anything on the
+ // local device, but instead serves as a link to/from media content playing on
+ // a cast device.
+ [EnableIf=is_android]
+ kFlinging,
};
// A factory for creating media mojo interfaces. Renderers can only access
@@ -39,6 +48,9 @@ interface InterfaceFactory {
// media/audio/audio_device_description.h.
// If |type_specific_id| is empty, kDefaultDeviceId will be used.
// - kMediaPlayer: unused.
+ // - kFlinging: represents a PresentationID for a session that has already
+ // been started. If the ID cannot be found (e.g. the session has already
+ // ended), CreateRenderer will be a no-op.
CreateRenderer(HostedRendererType type, string type_specific_id,
Renderer& renderer);
@@ -49,6 +61,9 @@ interface InterfaceFactory {
// implementation must fully validate |key_system| before creating the CDM.
CreateCdm(string key_system, ContentDecryptionModule& cdm);
+ // Creates a Decryptor associated with the |cdm_id|.
+ CreateDecryptor(int32 cdm_id, Decryptor& decryptor);
+
// Creates a CdmProxy that proxies part of CDM functionalities to a different
// entity, e.g. hardware CDM modules. The created |cdm_proxy| must match the
// type of the CDM, identified by |cdm_guid|.
diff --git a/chromium/media/mojo/interfaces/jpeg_decode_accelerator_typemap_traits.cc b/chromium/media/mojo/interfaces/jpeg_decode_accelerator_typemap_traits.cc
index e1fd29b7d5f..3297e8fcb79 100644
--- a/chromium/media/mojo/interfaces/jpeg_decode_accelerator_typemap_traits.cc
+++ b/chromium/media/mojo/interfaces/jpeg_decode_accelerator_typemap_traits.cc
@@ -116,8 +116,9 @@ bool StructTraits<
input.id(), memory_handle, input.size(),
base::checked_cast<off_t>(input.offset()), timestamp);
if (key_id.size()) {
- bitstream_buffer.SetDecryptConfig(
- media::DecryptConfig(key_id, iv, subsamples));
+ // Note that BitstreamBuffer currently ignores how each buffer is
+ // encrypted and uses the settings from the Audio/VideoDecoderConfig.
+ bitstream_buffer.SetDecryptionSettings(key_id, iv, subsamples);
}
*output = bitstream_buffer;
diff --git a/chromium/media/mojo/interfaces/key_system_support.mojom b/chromium/media/mojo/interfaces/key_system_support.mojom
index ac46e558d7d..628c4bdf6e0 100644
--- a/chromium/media/mojo/interfaces/key_system_support.mojom
+++ b/chromium/media/mojo/interfaces/key_system_support.mojom
@@ -9,14 +9,16 @@ import "media/mojo/interfaces/media_types.mojom";
interface KeySystemSupport {
// Query to determine if the browser supports the key system |key_system|.
// If supported, |is_supported| = true and the remaining properties indicate
- // the codecs supported and if the key system supports persistent licenses.
- // KeySystemSupport implementation is in the browser process, as it maintains
- // the list of installed key systems. Clients run in the renderer process.
+ // the codecs supported, if the key system supports persistent licenses, and
+ // the set of encryption schemes supported. KeySystemSupport implementation
+ // is in the browser process, as it maintains the list of installed key
+ // systems. Clients run in the renderer process.
// TODO(crbug.com/796725) Find a way to include profiles and levels for
// |supported_video_codecs|.
[Sync]
IsKeySystemSupported(string key_system)
=> (bool is_supported,
array<VideoCodec> supported_video_codecs,
- bool supports_persistent_license);
+ bool supports_persistent_license,
+ array<EncryptionMode> supported_encryption_schemes);
};
diff --git a/chromium/media/mojo/interfaces/media_types.mojom b/chromium/media/mojo/interfaces/media_types.mojom
index 4231856098e..66df670a578 100644
--- a/chromium/media/mojo/interfaces/media_types.mojom
+++ b/chromium/media/mojo/interfaces/media_types.mojom
@@ -65,6 +65,10 @@ enum VideoRotation;
[Native]
enum WatchTimeKey;
+// See media/base/decrypt_config.h for descriptions.
+[Native]
+enum EncryptionMode;
+
// This defines a mojo transport format for media::EncryptionPattern
// See media/base/encryption_pattern.h for description.
struct EncryptionPattern {
@@ -157,9 +161,11 @@ struct SubsampleEntry;
// This defines a mojo transport format for media::DecryptConfig.
// See media/base/decrypt_config.h for descriptions.
struct DecryptConfig {
+ EncryptionMode encryption_mode;
string key_id;
string iv;
array<SubsampleEntry> subsamples;
+ EncryptionPattern? encryption_pattern;
};
// This defines a mojo transport format for media::DecoderBuffer.
diff --git a/chromium/media/mojo/interfaces/media_types.typemap b/chromium/media/mojo/interfaces/media_types.typemap
index e8b0967e9fa..42b4d2198bb 100644
--- a/chromium/media/mojo/interfaces/media_types.typemap
+++ b/chromium/media/mojo/interfaces/media_types.typemap
@@ -9,6 +9,8 @@ public_headers = [
"//media/base/buffering_state.h",
"//media/base/channel_layout.h",
"//media/base/decode_status.h",
+ "//media/base/decrypt_config.h",
+ "//media/base/encryption_pattern.h",
"//media/base/encryption_scheme.h",
"//media/base/hdr_metadata.h",
"//media/base/media_log_event.h",
@@ -38,6 +40,7 @@ type_mappings = [
"media.mojom.ChannelLayout=media::ChannelLayout",
"media.mojom.ColorSpace=media::ColorSpace",
"media.mojom.DecodeStatus=media::DecodeStatus",
+ "media.mojom.EncryptionMode=media::EncryptionMode",
"media.mojom.EncryptionScheme.CipherMode=media::EncryptionScheme::CipherMode",
"media.mojom.MediaLogEvent=media::MediaLogEvent",
"media.mojom.OutputDeviceStatus=media::OutputDeviceStatus",
@@ -49,4 +52,5 @@ type_mappings = [
"media.mojom.VideoPixelFormat=media::VideoPixelFormat",
"media.mojom.VideoRotation=media::VideoRotation",
"media.mojom.WatchTimeKey=media::WatchTimeKey",
+ "media.mojom.EncryptionPattern=media::EncryptionPattern",
]
diff --git a/chromium/media/mojo/interfaces/video_decoder.mojom b/chromium/media/mojo/interfaces/video_decoder.mojom
index 07bf9a13cca..4763625e163 100644
--- a/chromium/media/mojo/interfaces/video_decoder.mojom
+++ b/chromium/media/mojo/interfaces/video_decoder.mojom
@@ -8,6 +8,7 @@ import "gpu/ipc/common/sync_token.mojom";
import "media/mojo/interfaces/media_log.mojom";
import "media/mojo/interfaces/media_types.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
+import "ui/gfx/mojo/color_space.mojom";
// Identifies a CommandBufferStub. MediaGpuChannelManager is responsible
// for minting |channel_token| values.
@@ -60,7 +61,8 @@ interface VideoDecoder {
associated MediaLog media_log,
VideoFrameHandleReleaser& video_frame_handle_releaser,
handle<data_pipe_consumer> decoder_buffer_pipe,
- CommandBufferId? command_buffer_id);
+ CommandBufferId? command_buffer_id,
+ gfx.mojom.ColorSpace target_color_space);
// Configure (or reconfigure) the decoder. This must be called before decoding
// any frames, and must not be called while there are pending Initialize(),
diff --git a/chromium/media/mojo/interfaces/watch_time_recorder.mojom b/chromium/media/mojo/interfaces/watch_time_recorder.mojom
index e0258df9b1c..9326c013837 100644
--- a/chromium/media/mojo/interfaces/watch_time_recorder.mojom
+++ b/chromium/media/mojo/interfaces/watch_time_recorder.mojom
@@ -17,6 +17,7 @@ struct PlaybackProperties {
bool has_audio; // Note: Due to the above, we also need these
bool has_video; // booleans for audio and video presence.
bool is_background; // Is report for playback in the background?
+ bool is_muted; // Is report for muted playback?
bool is_mse;
bool is_eme;
bool is_embedded_media_experience; // Playback from 'Downloads' on Android.
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index 4d87502453f..938b4c6a003 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -32,8 +32,6 @@ component("services") {
"mojo_audio_decoder_service.h",
"mojo_audio_input_stream.cc",
"mojo_audio_input_stream.h",
- "mojo_audio_input_stream_observer.cc",
- "mojo_audio_input_stream_observer.h",
"mojo_audio_output_stream.cc",
"mojo_audio_output_stream.h",
"mojo_audio_output_stream_provider.cc",
@@ -103,7 +101,6 @@ component("services") {
"//media/gpu/ipc/service",
"//media/mojo/common",
"//media/mojo/common:mojo_shared_buffer_video_frame",
- "//mojo/common",
"//services/metrics/public/cpp:metrics_cpp",
"//services/metrics/public/cpp:ukm_builders",
"//services/service_manager/public/mojom",
@@ -133,7 +130,11 @@ component("services") {
"mojo_cdm_proxy_service.cc",
"mojo_cdm_proxy_service.h",
]
- deps += [ "//media/cdm:cdm_api" ]
+ deps += [
+ "//media/cdm:cdm_api",
+ "//media/cdm:cdm_paths",
+ "//media/cdm/library_cdm/clear_key_cdm:clear_key_cdm_proxy",
+ ]
# TODO(xhwang): Ideally media should not worry about sandbox. Find a way to
# remove this dependency.
@@ -161,7 +162,6 @@ source_set("unit_tests") {
sources = [
"deferred_destroy_strong_binding_set_unittest.cc",
"media_metrics_provider_unittest.cc",
- "mojo_audio_input_stream_observer_unittest.cc",
"mojo_audio_input_stream_unittest.cc",
"mojo_audio_output_stream_provider_unittest.cc",
"mojo_audio_output_stream_unittest.cc",
@@ -238,6 +238,7 @@ service_test("media_service_unittests") {
":services",
"//base",
"//media:test_support",
+ "//media/cdm:cdm_paths",
"//media/mojo/clients",
"//media/mojo/common",
"//media/mojo/interfaces",
diff --git a/chromium/media/mojo/services/OWNERS b/chromium/media/mojo/services/OWNERS
index fce6c7aef0b..da9d4b5c016 100644
--- a/chromium/media/mojo/services/OWNERS
+++ b/chromium/media/mojo/services/OWNERS
@@ -15,3 +15,6 @@ per-file test_manifest.json=file://ipc/SECURITY_OWNERS
per-file pipeline_apptest_manifest.json=set noparent
per-file pipeline_apptest_manifest.json=file://ipc/SECURITY_OWNERS
+
+per-file mojo_audio_output*=file://media/audio/OWNERS
+per-file mojo_audio_input*=file://media/audio/OWNERS
diff --git a/chromium/media/mojo/services/cdm_manifest.json b/chromium/media/mojo/services/cdm_manifest.json
index 96a06dd68f2..78ea3fccf42 100644
--- a/chromium/media/mojo/services/cdm_manifest.json
+++ b/chromium/media/mojo/services/cdm_manifest.json
@@ -5,7 +5,7 @@
"interface_provider_specs": {
"service_manager:connector": {
"provides": {
- "media:cdm": [ "media::mojom::CdmService" ]
+ "media:cdm": [ "media.mojom.CdmService" ]
},
"requires": {
"*": [ "app" ]
diff --git a/chromium/media/mojo/services/cdm_service.cc b/chromium/media/mojo/services/cdm_service.cc
index 678476e763b..1fdc540dd75 100644
--- a/chromium/media/mojo/services/cdm_service.cc
+++ b/chromium/media/mojo/services/cdm_service.cc
@@ -22,44 +22,49 @@ namespace media {
namespace {
+using service_manager::ServiceContextRef;
+
constexpr base::TimeDelta kServiceContextRefReleaseDelay =
base::TimeDelta::FromSeconds(5);
-void DeleteServiceContextRef(service_manager::ServiceContextRef* ref) {
+void DeleteServiceContextRef(ServiceContextRef* ref) {
delete ref;
}
// Starting a new process and loading the library CDM could be expensive. This
-// class helps delay the release of service_manager::ServiceContextRef by
+// class helps delay the release of ServiceContextRef by
// |kServiceContextRefReleaseDelay|, which will ultimately delay CdmService
// destruction by the same delay as well. This helps reduce the chance of
// destroying the CdmService and immediately creates it (in another process) in
// cases like navigation, which could cause long service connection delays.
-class DelayedReleaseServiceContextRef
- : public service_manager::ServiceContextRef {
+class DelayedReleaseServiceContextRef : public ServiceContextRef {
public:
- explicit DelayedReleaseServiceContextRef(
- std::unique_ptr<service_manager::ServiceContextRef> ref)
+ DelayedReleaseServiceContextRef(std::unique_ptr<ServiceContextRef> ref,
+ base::TimeDelta delay)
: ref_(std::move(ref)),
- task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
+ delay_(delay),
+ task_runner_(base::ThreadTaskRunnerHandle::Get()) {
+ DCHECK_GT(delay_, base::TimeDelta());
+ }
~DelayedReleaseServiceContextRef() override {
service_manager::ServiceContextRef* ref_ptr = ref_.release();
if (!task_runner_->PostNonNestableDelayedTask(
FROM_HERE, base::BindOnce(&DeleteServiceContextRef, ref_ptr),
- kServiceContextRefReleaseDelay)) {
+ delay_)) {
DeleteServiceContextRef(ref_ptr);
}
}
- // service_manager::ServiceContextRef implementation.
+ // ServiceContextRef implementation.
std::unique_ptr<ServiceContextRef> Clone() override {
NOTIMPLEMENTED();
return nullptr;
}
private:
- std::unique_ptr<service_manager::ServiceContextRef> ref_;
+ std::unique_ptr<ServiceContextRef> ref_;
+ base::TimeDelta delay_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
DISALLOW_COPY_AND_ASSIGN(DelayedReleaseServiceContextRef);
@@ -85,10 +90,9 @@ class DelayedReleaseServiceContextRef
// details.
class CdmFactoryImpl : public DeferredDestroy<mojom::CdmFactory> {
public:
- CdmFactoryImpl(
- CdmService::Client* client,
- service_manager::mojom::InterfaceProviderPtr interfaces,
- std::unique_ptr<service_manager::ServiceContextRef> service_context_ref)
+ CdmFactoryImpl(CdmService::Client* client,
+ service_manager::mojom::InterfaceProviderPtr interfaces,
+ std::unique_ptr<ServiceContextRef> service_context_ref)
: client_(client),
interfaces_(std::move(interfaces)),
service_context_ref_(std::move(service_context_ref)) {
@@ -147,7 +151,7 @@ class CdmFactoryImpl : public DeferredDestroy<mojom::CdmFactory> {
CdmService::Client* client_;
service_manager::mojom::InterfaceProviderPtr interfaces_;
mojo::StrongBindingSet<mojom::ContentDecryptionModule> cdm_bindings_;
- std::unique_ptr<service_manager::ServiceContextRef> service_context_ref_;
+ std::unique_ptr<ServiceContextRef> service_context_ref_;
std::unique_ptr<media::CdmFactory> cdm_factory_;
base::OnceClosure destroy_cb_;
@@ -157,7 +161,8 @@ class CdmFactoryImpl : public DeferredDestroy<mojom::CdmFactory> {
} // namespace
CdmService::CdmService(std::unique_ptr<Client> client)
- : client_(std::move(client)) {
+ : client_(std::move(client)),
+ service_release_delay_(kServiceContextRefReleaseDelay) {
DVLOG(1) << __func__;
DCHECK(client_);
registry_.AddInterface<mojom::CdmService>(
@@ -260,10 +265,10 @@ void CdmService::CreateCdmFactory(
if (!client_)
return;
- std::unique_ptr<service_manager::ServiceContextRef> service_context_ref =
- is_delayed_service_release_enabled
+ std::unique_ptr<ServiceContextRef> service_context_ref =
+ service_release_delay_ > base::TimeDelta()
? std::make_unique<DelayedReleaseServiceContextRef>(
- ref_factory_->CreateRef())
+ ref_factory_->CreateRef(), service_release_delay_)
: ref_factory_->CreateRef();
cdm_factory_bindings_.AddBinding(
diff --git a/chromium/media/mojo/services/cdm_service.h b/chromium/media/mojo/services/cdm_service.h
index 22f418c4bfe..67771567730 100644
--- a/chromium/media/mojo/services/cdm_service.h
+++ b/chromium/media/mojo/services/cdm_service.h
@@ -53,8 +53,10 @@ class MEDIA_MOJO_EXPORT CdmService : public service_manager::Service,
explicit CdmService(std::unique_ptr<Client> client);
~CdmService() final;
- void DisableDelayedServiceReleaseForTesting() {
- is_delayed_service_release_enabled = false;
+ // By default CdmService release is delayed. Overrides the delay with |delay|.
+ // If |delay| is 0, delayed service release will be disabled.
+ void SetServiceReleaseDelayForTesting(base::TimeDelta delay) {
+ service_release_delay_ = delay;
}
size_t BoundCdmFactorySizeForTesting() const {
@@ -92,7 +94,7 @@ class MEDIA_MOJO_EXPORT CdmService : public service_manager::Service,
DeferredDestroyStrongBindingSet<mojom::CdmFactory> cdm_factory_bindings_;
service_manager::BinderRegistry registry_;
mojo::BindingSet<mojom::CdmService> bindings_;
- bool is_delayed_service_release_enabled = true;
+ base::TimeDelta service_release_delay_;
};
} // namespace media
diff --git a/chromium/media/mojo/services/cdm_service_unittest.cc b/chromium/media/mojo/services/cdm_service_unittest.cc
index b0d39879bc4..90a9e99f5d2 100644
--- a/chromium/media/mojo/services/cdm_service_unittest.cc
+++ b/chromium/media/mojo/services/cdm_service_unittest.cc
@@ -25,13 +25,18 @@
#include "url/gurl.h"
#include "url/origin.h"
-using testing::Invoke;
-using testing::InvokeWithoutArgs;
-
namespace media {
namespace {
+using testing::_;
+using testing::Invoke;
+using testing::InvokeWithoutArgs;
+
+MATCHER_P(MatchesResult, success, "") {
+ return arg->success == success;
+}
+
const char kClearKeyKeySystem[] = "org.w3.clearkey";
const char kInvalidKeySystem[] = "invalid.key.system";
const char kSecurityOrigin[] = "https://foo.com";
@@ -87,12 +92,16 @@ class ServiceTestClient : public service_manager::test::ServiceTestClient,
std::make_unique<CdmService>(std::move(mock_cdm_service_client));
cdm_service_ = cdm_service.get();
- // Delayed service release involves a posted delayed task which will not
- // block *.RunUntilIdle() and hence cause a memory leak in the test.
- cdm_service_->DisableDelayedServiceReleaseForTesting();
+ cdm_service_->SetServiceReleaseDelayForTesting(service_release_delay_);
service_context_ = std::make_unique<service_manager::ServiceContext>(
std::move(cdm_service), std::move(request));
+ service_context_->SetQuitClosure(base::BindRepeating(
+ &ServiceTestClient::DestroyService, base::Unretained(this)));
+ }
+
+ void SetServiceReleaseDelay(base::TimeDelta delay) {
+ service_release_delay_ = delay;
}
void DestroyService() { service_context_.reset(); }
@@ -108,6 +117,11 @@ class ServiceTestClient : public service_manager::test::ServiceTestClient,
service_factory_bindings_.AddBinding(this, std::move(request));
}
+ // Delayed service release involves a posted delayed task which will not
+ // block *.RunUntilIdle() and hence cause a memory leak in the test. So by
+ // default use a zero value delay to disable the delay.
+ base::TimeDelta service_release_delay_;
+
service_manager::BinderRegistry registry_;
mojo::BindingSet<service_manager::mojom::ServiceFactory>
service_factory_bindings_;
@@ -116,22 +130,20 @@ class ServiceTestClient : public service_manager::test::ServiceTestClient,
MockCdmServiceClient* mock_cdm_service_client_ = nullptr;
};
-} // namespace
-
class CdmServiceTest : public service_manager::test::ServiceTest {
public:
CdmServiceTest() : ServiceTest("cdm_service_unittest") {}
~CdmServiceTest() override {}
+ MOCK_METHOD0(CdmServiceConnectionClosed, void());
MOCK_METHOD0(CdmFactoryConnectionClosed, void());
MOCK_METHOD0(CdmConnectionClosed, void());
- // service_manager::test::ServiceTest:
- void SetUp() override {
- ServiceTest::SetUp();
-
+ void Initialize() {
connector()->BindInterface(media::mojom::kCdmServiceName,
&cdm_service_ptr_);
+ cdm_service_ptr_.set_connection_error_handler(base::BindRepeating(
+ &CdmServiceTest::CdmServiceConnectionClosed, base::Unretained(this)));
service_manager::mojom::InterfaceProviderPtr interfaces;
auto provider = std::make_unique<MediaInterfaceProvider>(
@@ -146,21 +158,22 @@ class CdmServiceTest : public service_manager::test::ServiceTest {
&CdmServiceTest::CdmFactoryConnectionClosed, base::Unretained(this)));
}
- // MOCK_METHOD* doesn't support move-only types. Work around this by having
- // an extra method.
- MOCK_METHOD1(OnCdmInitializedInternal, void(bool result));
- void OnCdmInitialized(mojom::CdmPromiseResultPtr result,
- int cdm_id,
- mojom::DecryptorPtr decryptor) {
- OnCdmInitializedInternal(result->success);
+ void InitializeWithServiceReleaseDelay(base::TimeDelta delay) {
+ service_test_client_->SetServiceReleaseDelay(delay);
+ Initialize();
}
+ MOCK_METHOD3(OnCdmInitialized,
+ void(mojom::CdmPromiseResultPtr result,
+ int cdm_id,
+ mojom::DecryptorPtr decryptor));
+
void InitializeCdm(const std::string& key_system, bool expected_result) {
base::RunLoop run_loop;
cdm_factory_ptr_->CreateCdm(key_system, mojo::MakeRequest(&cdm_ptr_));
cdm_ptr_.set_connection_error_handler(base::BindRepeating(
&CdmServiceTest::CdmConnectionClosed, base::Unretained(this)));
- EXPECT_CALL(*this, OnCdmInitializedInternal(expected_result))
+ EXPECT_CALL(*this, OnCdmInitialized(MatchesResult(expected_result), _, _))
.WillOnce(InvokeWithoutArgs(&run_loop, &base::RunLoop::Quit));
cdm_ptr_->Initialize(key_system, url::Origin::Create(GURL(kSecurityOrigin)),
CdmConfig(),
@@ -169,6 +182,7 @@ class CdmServiceTest : public service_manager::test::ServiceTest {
run_loop.Run();
}
+ // service_manager::test::ServiceTest implementation.
std::unique_ptr<service_manager::Service> CreateService() override {
auto service_test_client = std::make_unique<ServiceTestClient>(this);
service_test_client_ = service_test_client.get();
@@ -184,14 +198,17 @@ class CdmServiceTest : public service_manager::test::ServiceTest {
DISALLOW_COPY_AND_ASSIGN(CdmServiceTest);
};
+} // namespace
+
TEST_F(CdmServiceTest, LoadCdm) {
- base::FilePath cdm_path(FILE_PATH_LITERAL("dummy path"));
+ Initialize();
// Even with a dummy path where the CDM cannot be loaded, EnsureSandboxed()
// should still be called to ensure the process is sandboxed.
EXPECT_CALL(*service_test_client_->mock_cdm_service_client(),
EnsureSandboxed());
+ base::FilePath cdm_path(FILE_PATH_LITERAL("dummy path"));
#if defined(OS_MACOSX)
// Token provider will not be used since the path is a dummy path.
cdm_service_ptr_->LoadCdm(cdm_path, nullptr);
@@ -203,22 +220,26 @@ TEST_F(CdmServiceTest, LoadCdm) {
}
TEST_F(CdmServiceTest, InitializeCdm_Success) {
+ Initialize();
InitializeCdm(kClearKeyKeySystem, true);
}
TEST_F(CdmServiceTest, InitializeCdm_InvalidKeySystem) {
+ Initialize();
InitializeCdm(kInvalidKeySystem, false);
}
TEST_F(CdmServiceTest, DestroyAndRecreateCdm) {
+ Initialize();
InitializeCdm(kClearKeyKeySystem, true);
cdm_ptr_.reset();
InitializeCdm(kClearKeyKeySystem, true);
}
// CdmFactory connection error will NOT destroy CDMs. Instead, it will only be
-// destroyed after |cdm_| is reset.
+// destroyed after |cdm_ptr_| is reset.
TEST_F(CdmServiceTest, DestroyCdmFactory) {
+ Initialize();
auto* service = service_test_client_->cdm_service();
InitializeCdm(kClearKeyKeySystem, true);
@@ -236,13 +257,39 @@ TEST_F(CdmServiceTest, DestroyCdmFactory) {
EXPECT_EQ(service->UnboundCdmFactorySizeForTesting(), 0u);
}
+// Same as DestroyCdmFactory test, but do not disable delayed service release.
+// TODO(xhwang): Use ScopedTaskEnvironment::MainThreadType::MOCK_TIME and
+// ScopedTaskEnvironment::FastForwardBy() so we don't have to really wait for
+// the delay in the test. But currently FastForwardBy() doesn't support delayed
+// task yet.
+TEST_F(CdmServiceTest, DestroyCdmFactory_DelayedServiceRelease) {
+ constexpr base::TimeDelta kServiceContextRefReleaseDelay =
+ base::TimeDelta::FromSeconds(1);
+ InitializeWithServiceReleaseDelay(kServiceContextRefReleaseDelay);
+
+ InitializeCdm(kClearKeyKeySystem, true);
+ cdm_factory_ptr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ base::RunLoop run_loop;
+ auto start_time = base::Time::Now();
+ cdm_ptr_.reset();
+ EXPECT_CALL(*this, CdmServiceConnectionClosed())
+ .WillOnce(Invoke(&run_loop, &base::RunLoop::Quit));
+ run_loop.Run();
+ auto time_passed = base::Time::Now() - start_time;
+ EXPECT_GE(time_passed, kServiceContextRefReleaseDelay);
+}
+
// Destroy service will destroy the CdmFactory and all CDMs.
TEST_F(CdmServiceTest, DestroyCdmService) {
+ Initialize();
InitializeCdm(kClearKeyKeySystem, true);
base::RunLoop run_loop;
// Ideally we should not care about order, and should only quit the loop when
// both connections are closed.
+ EXPECT_CALL(*this, CdmServiceConnectionClosed());
EXPECT_CALL(*this, CdmFactoryConnectionClosed());
EXPECT_CALL(*this, CdmConnectionClosed())
.WillOnce(Invoke(&run_loop, &base::RunLoop::Quit));
diff --git a/chromium/media/mojo/services/cdm_service_unittest_manifest.json b/chromium/media/mojo/services/cdm_service_unittest_manifest.json
index 1d35efbb1a7..9440910d412 100644
--- a/chromium/media/mojo/services/cdm_service_unittest_manifest.json
+++ b/chromium/media/mojo/services/cdm_service_unittest_manifest.json
@@ -5,7 +5,7 @@
"service_manager:connector": {
"provides": {
"service_manager:service_factory": [
- "service_manager::mojom::ServiceFactory"
+ "service_manager.mojom.ServiceFactory"
]
},
"requires": {
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index fb112f24420..dd6d7ba0912 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -7,13 +7,16 @@
#include <utility>
#include "base/bind.h"
+#include "base/feature_list.h"
#include "build/build_config.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "media/base/audio_decoder.h"
#include "media/base/cdm_factory.h"
+#include "media/base/media_switches.h"
#include "media/base/video_decoder.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/ipc/service/media_gpu_channel_manager.h"
+#include "media/gpu/ipc/service/vda_video_decoder.h"
#if defined(OS_ANDROID)
#include "base/memory/ptr_util.h"
@@ -61,7 +64,7 @@ std::unique_ptr<MediaDrmStorage> CreateMediaDrmStorage(
}
#endif // defined(OS_ANDROID)
-#if defined(OS_ANDROID) || defined(OS_WIN)
+#if defined(OS_ANDROID) || defined(OS_MACOSX) || defined(OS_WIN)
gpu::CommandBufferStub* GetCommandBufferStub(
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager,
base::UnguessableToken channel_token,
@@ -82,11 +85,13 @@ gpu::CommandBufferStub* GetCommandBufferStub(
GpuMojoMediaClient::GpuMojoMediaClient(
const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb,
CdmProxyFactoryCB cdm_proxy_factory_cb)
: gpu_preferences_(gpu_preferences),
+ gpu_workarounds_(gpu_workarounds),
gpu_task_runner_(std::move(gpu_task_runner)),
media_gpu_channel_manager_(std::move(media_gpu_channel_manager)),
android_overlay_factory_cb_(std::move(android_overlay_factory_cb)),
@@ -109,7 +114,8 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
mojom::CommandBufferIdPtr command_buffer_id,
- RequestOverlayInfoCB request_overlay_info_cb) {
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) {
// Both MCVD and D3D11 VideoDecoders need a command buffer.
if (!command_buffer_id)
return nullptr;
@@ -126,9 +132,19 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
android_overlay_factory_cb_, std::move(request_overlay_info_cb),
std::make_unique<VideoFrameFactoryImpl>(gpu_task_runner_,
std::move(get_stub_cb)));
-#elif defined(OS_WIN)
- return std::make_unique<D3D11VideoDecoder>(
- gpu_task_runner_,
+#elif defined(OS_MACOSX) || defined(OS_WIN)
+#if defined(OS_WIN)
+ if (base::FeatureList::IsEnabled(kD3D11VideoDecoder)) {
+ return D3D11VideoDecoder::Create(
+ gpu_task_runner_, gpu_preferences_, gpu_workarounds_,
+ base::BindRepeating(&GetCommandBufferStub, media_gpu_channel_manager_,
+ command_buffer_id->channel_token,
+ command_buffer_id->route_id));
+ }
+#endif // defined(OS_WIN)
+ return VdaVideoDecoder::Create(
+ task_runner, gpu_task_runner_, media_log, target_color_space,
+ gpu_preferences_, gpu_workarounds_,
base::BindRepeating(&GetCommandBufferStub, media_gpu_channel_manager_,
command_buffer_id->channel_token,
command_buffer_id->route_id));
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.h b/chromium/media/mojo/services/gpu_mojo_media_client.h
index eda075db512..37acd5ab568 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.h
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.h
@@ -12,6 +12,7 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/cdm/cdm_proxy.h"
#include "media/mojo/services/mojo_media_client.h"
@@ -28,6 +29,7 @@ class GpuMojoMediaClient : public MojoMediaClient {
// CdmProxy is not supported on the platform.
GpuMojoMediaClient(
const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb,
@@ -42,7 +44,8 @@ class GpuMojoMediaClient : public MojoMediaClient {
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
mojom::CommandBufferIdPtr command_buffer_id,
- RequestOverlayInfoCB request_overlay_info_cb) final;
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) final;
std::unique_ptr<CdmFactory> CreateCdmFactory(
service_manager::mojom::InterfaceProvider* interface_provider) final;
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
@@ -51,6 +54,7 @@ class GpuMojoMediaClient : public MojoMediaClient {
private:
gpu::GpuPreferences gpu_preferences_;
+ gpu::GpuDriverBugWorkarounds gpu_workarounds_;
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager_;
AndroidOverlayMojoFactoryCB android_overlay_factory_cb_;
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index 760568207a6..14df0447a7d 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -11,6 +11,7 @@
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/media_log.h"
+#include "media/mojo/services/mojo_decryptor_service.h"
#include "media/mojo/services/mojo_media_client.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "services/service_manager/public/mojom/interface_provider.mojom.h"
@@ -25,9 +26,7 @@
#if BUILDFLAG(ENABLE_MOJO_RENDERER)
#include "base/bind_helpers.h"
-#include "media/base/audio_renderer_sink.h"
-#include "media/base/renderer_factory.h"
-#include "media/base/video_renderer_sink.h"
+#include "media/base/renderer.h"
#include "media/mojo/services/mojo_renderer_service.h"
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
@@ -58,6 +57,8 @@ InterfaceFactoryImpl::InterfaceFactoryImpl(
mojo_media_client_(mojo_media_client) {
DVLOG(1) << __func__;
DCHECK(mojo_media_client_);
+
+ SetBindingConnectionErrorHandler();
}
InterfaceFactoryImpl::~InterfaceFactoryImpl() {
@@ -68,6 +69,7 @@ InterfaceFactoryImpl::~InterfaceFactoryImpl() {
void InterfaceFactoryImpl::CreateAudioDecoder(
mojo::InterfaceRequest<mojom::AudioDecoder> request) {
+ DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
scoped_refptr<base::SingleThreadTaskRunner> task_runner(
base::ThreadTaskRunnerHandle::Get());
@@ -88,6 +90,7 @@ void InterfaceFactoryImpl::CreateAudioDecoder(
void InterfaceFactoryImpl::CreateVideoDecoder(
mojom::VideoDecoderRequest request) {
+ DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
video_decoder_bindings_.AddBinding(
std::make_unique<MojoVideoDecoderService>(mojo_media_client_,
@@ -100,11 +103,8 @@ void InterfaceFactoryImpl::CreateRenderer(
media::mojom::HostedRendererType type,
const std::string& type_specific_id,
mojo::InterfaceRequest<mojom::Renderer> request) {
+ DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_MOJO_RENDERER)
- RendererFactory* renderer_factory = GetRendererFactory();
- if (!renderer_factory)
- return;
-
// Creation requests for non default renderers should have already been
// handled by now, in a different layer.
if (type != media::mojom::HostedRendererType::kDefault) {
@@ -112,17 +112,11 @@ void InterfaceFactoryImpl::CreateRenderer(
return;
}
- scoped_refptr<base::SingleThreadTaskRunner> task_runner(
- base::ThreadTaskRunnerHandle::Get());
- auto audio_sink =
- mojo_media_client_->CreateAudioRendererSink(type_specific_id);
-
- auto video_sink = mojo_media_client_->CreateVideoRendererSink(task_runner);
- // TODO(hubbe): Find out if gfx::ColorSpace() is correct for the
- // target_color_space.
- auto renderer = renderer_factory->CreateRenderer(
- task_runner, task_runner, audio_sink.get(), video_sink.get(),
- RequestOverlayInfoCB(), gfx::ColorSpace());
+ // For HostedRendererType::kDefault type, |type_specific_id| represents an
+ // audio device ID. See interface_factory.mojom.
+ const std::string& audio_device_id = type_specific_id;
+ auto renderer = mojo_media_client_->CreateRenderer(
+ base::ThreadTaskRunnerHandle::Get(), media_log_, audio_device_id);
if (!renderer) {
DLOG(ERROR) << "Renderer creation failed.";
return;
@@ -130,8 +124,8 @@ void InterfaceFactoryImpl::CreateRenderer(
std::unique_ptr<MojoRendererService> mojo_renderer_service =
std::make_unique<MojoRendererService>(
- &cdm_service_context_, std::move(audio_sink), std::move(video_sink),
- std::move(renderer), MojoRendererService::InitiateSurfaceRequestCB());
+ &cdm_service_context_, std::move(renderer),
+ MojoRendererService::InitiateSurfaceRequestCB());
MojoRendererService* mojo_renderer_service_ptr = mojo_renderer_service.get();
@@ -150,6 +144,7 @@ void InterfaceFactoryImpl::CreateRenderer(
void InterfaceFactoryImpl::CreateCdm(
const std::string& /* key_system */,
mojo::InterfaceRequest<mojom::ContentDecryptionModule> request) {
+ DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_MOJO_CDM)
CdmFactory* cdm_factory = GetCdmFactory();
if (!cdm_factory)
@@ -161,8 +156,23 @@ void InterfaceFactoryImpl::CreateCdm(
#endif // BUILDFLAG(ENABLE_MOJO_CDM)
}
+void InterfaceFactoryImpl::CreateDecryptor(int cdm_id,
+ mojom::DecryptorRequest request) {
+ DVLOG(2) << __func__;
+ auto mojo_decryptor_service =
+ MojoDecryptorService::Create(cdm_id, &cdm_service_context_);
+ if (!mojo_decryptor_service) {
+ DLOG(ERROR) << "MojoDecryptorService creation failed.";
+ return;
+ }
+
+ decryptor_bindings_.AddBinding(std::move(mojo_decryptor_service),
+ std::move(request));
+}
+
void InterfaceFactoryImpl::CreateCdmProxy(const std::string& cdm_guid,
mojom::CdmProxyRequest request) {
+ DVLOG(2) << __func__;
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
if (!base::IsValidGUID(cdm_guid)) {
DLOG(ERROR) << "Invalid CDM GUID: " << cdm_guid;
@@ -182,17 +192,83 @@ void InterfaceFactoryImpl::CreateCdmProxy(const std::string& cdm_guid,
#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
}
+void InterfaceFactoryImpl::OnDestroyPending(base::OnceClosure destroy_cb) {
+ DVLOG(1) << __func__;
+ destroy_cb_ = std::move(destroy_cb);
+ if (IsEmpty())
+ std::move(destroy_cb_).Run();
+ // else the callback will be called when IsEmpty() becomes true.
+}
+
+bool InterfaceFactoryImpl::IsEmpty() {
+#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
+ if (!audio_decoder_bindings_.empty())
+ return false;
+#endif // BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
+
+#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
+ if (!video_decoder_bindings_.empty())
+ return false;
+#endif // BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
+
#if BUILDFLAG(ENABLE_MOJO_RENDERER)
-RendererFactory* InterfaceFactoryImpl::GetRendererFactory() {
- if (!renderer_factory_) {
- renderer_factory_ = mojo_media_client_->CreateRendererFactory(media_log_);
- LOG_IF(ERROR, !renderer_factory_) << "RendererFactory not available.";
- }
- return renderer_factory_.get();
+ if (!renderer_bindings_.empty())
+ return false;
+#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
+
+#if BUILDFLAG(ENABLE_MOJO_CDM)
+ if (!cdm_bindings_.empty())
+ return false;
+#endif // BUILDFLAG(ENABLE_MOJO_CDM)
+
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+ if (!cdm_proxy_bindings_.empty())
+ return false;
+#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
+
+ if (!decryptor_bindings_.empty())
+ return false;
+
+ return true;
}
+
+void InterfaceFactoryImpl::SetBindingConnectionErrorHandler() {
+ // base::Unretained is safe because all bindings are owned by |this|. If
+ // |this| is destructed, the bindings will be destructed as well and the
+ // connection error handler should never be called.
+ auto connection_error_cb = base::BindRepeating(
+ &InterfaceFactoryImpl::OnBindingConnectionError, base::Unretained(this));
+
+#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
+ audio_decoder_bindings_.set_connection_error_handler(connection_error_cb);
+#endif // BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
+
+#if BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
+ video_decoder_bindings_.set_connection_error_handler(connection_error_cb);
+#endif // BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
+
+#if BUILDFLAG(ENABLE_MOJO_RENDERER)
+ renderer_bindings_.set_connection_error_handler(connection_error_cb);
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
#if BUILDFLAG(ENABLE_MOJO_CDM)
+ cdm_bindings_.set_connection_error_handler(connection_error_cb);
+#endif // BUILDFLAG(ENABLE_MOJO_CDM)
+
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+ cdm_proxy_bindings_.set_connection_error_handler(connection_error_cb);
+#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
+
+ decryptor_bindings_.set_connection_error_handler(connection_error_cb);
+}
+
+void InterfaceFactoryImpl::OnBindingConnectionError() {
+ DVLOG(2) << __func__;
+ if (destroy_cb_ && IsEmpty())
+ std::move(destroy_cb_).Run();
+}
+
+#if BUILDFLAG(ENABLE_MOJO_CDM)
CdmFactory* InterfaceFactoryImpl::GetCdmFactory() {
if (!cdm_factory_) {
cdm_factory_ = mojo_media_client_->CreateCdmFactory(interfaces_.get());
diff --git a/chromium/media/mojo/services/interface_factory_impl.h b/chromium/media/mojo/services/interface_factory_impl.h
index 7797ca1b415..486fdaea204 100644
--- a/chromium/media/mojo/services/interface_factory_impl.h
+++ b/chromium/media/mojo/services/interface_factory_impl.h
@@ -10,6 +10,7 @@
#include "base/macros.h"
#include "media/mojo/buildflags.h"
#include "media/mojo/interfaces/interface_factory.mojom.h"
+#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/strong_binding_set.h"
#include "services/service_manager/public/cpp/connector.h"
@@ -20,9 +21,8 @@ namespace media {
class CdmFactory;
class MediaLog;
class MojoMediaClient;
-class RendererFactory;
-class InterfaceFactoryImpl : public mojom::InterfaceFactory {
+class InterfaceFactoryImpl : public DeferredDestroy<mojom::InterfaceFactory> {
public:
InterfaceFactoryImpl(
service_manager::mojom::InterfaceProviderPtr interfaces,
@@ -39,13 +39,20 @@ class InterfaceFactoryImpl : public mojom::InterfaceFactory {
mojom::RendererRequest request) final;
void CreateCdm(const std::string& key_system,
mojom::ContentDecryptionModuleRequest request) final;
+ void CreateDecryptor(int cdm_id, mojom::DecryptorRequest request) final;
void CreateCdmProxy(const std::string& cdm_guid,
mojom::CdmProxyRequest request) final;
+ // DeferredDestroy<mojom::InterfaceFactory> implemenation.
+ void OnDestroyPending(base::OnceClosure destroy_cb) final;
+
private:
-#if BUILDFLAG(ENABLE_MOJO_RENDERER)
- RendererFactory* GetRendererFactory();
-#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
+ // Returns true when there is no media component (audio/video decoder,
+ // renderer, cdm and cdm proxy) bindings exist.
+ bool IsEmpty();
+
+ void SetBindingConnectionErrorHandler();
+ void OnBindingConnectionError();
#if BUILDFLAG(ENABLE_MOJO_CDM)
CdmFactory* GetCdmFactory();
@@ -66,7 +73,6 @@ class InterfaceFactoryImpl : public mojom::InterfaceFactory {
#if BUILDFLAG(ENABLE_MOJO_RENDERER)
MediaLog* media_log_;
- std::unique_ptr<RendererFactory> renderer_factory_;
mojo::StrongBindingSet<mojom::Renderer> renderer_bindings_;
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
@@ -80,8 +86,11 @@ class InterfaceFactoryImpl : public mojom::InterfaceFactory {
mojo::StrongBindingSet<mojom::CdmProxy> cdm_proxy_bindings_;
#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
+ mojo::StrongBindingSet<mojom::Decryptor> decryptor_bindings_;
+
std::unique_ptr<service_manager::ServiceContextRef> connection_ref_;
MojoMediaClient* mojo_media_client_;
+ base::OnceClosure destroy_cb_;
DISALLOW_COPY_AND_ASSIGN(InterfaceFactoryImpl);
};
diff --git a/chromium/media/mojo/services/media_manifest.json b/chromium/media/mojo/services/media_manifest.json
index 64c6194d6bf..ca7971ecb25 100644
--- a/chromium/media/mojo/services/media_manifest.json
+++ b/chromium/media/mojo/services/media_manifest.json
@@ -4,7 +4,7 @@
"interface_provider_specs": {
"service_manager:connector": {
"provides": {
- "media:media": [ "media::mojom::MediaService" ]
+ "media:media": [ "media.mojom.MediaService" ]
},
"requires": {
"*": [ "app" ]
diff --git a/chromium/media/mojo/services/media_resource_shim.cc b/chromium/media/mojo/services/media_resource_shim.cc
index ace3f01b4ab..184f2a35022 100644
--- a/chromium/media/mojo/services/media_resource_shim.cc
+++ b/chromium/media/mojo/services/media_resource_shim.cc
@@ -39,10 +39,6 @@ std::vector<DemuxerStream*> MediaResourceShim::GetAllStreams() {
return result;
}
-void MediaResourceShim::SetStreamStatusChangeCB(
- const StreamStatusChangeCB& cb) {
-}
-
void MediaResourceShim::OnStreamReady() {
if (++streams_ready_ == streams_.size())
base::ResetAndReturn(&demuxer_ready_cb_).Run();
diff --git a/chromium/media/mojo/services/media_resource_shim.h b/chromium/media/mojo/services/media_resource_shim.h
index fe54ef7c648..fab9eecd5d5 100644
--- a/chromium/media/mojo/services/media_resource_shim.h
+++ b/chromium/media/mojo/services/media_resource_shim.h
@@ -27,7 +27,6 @@ class MediaResourceShim : public MediaResource {
// MediaResource interface.
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
private:
// Called as each mojom::DemuxerStream becomes ready. Once all streams
diff --git a/chromium/media/mojo/services/media_service.h b/chromium/media/mojo/services/media_service.h
index 5219eb2a56e..0e4dc375009 100644
--- a/chromium/media/mojo/services/media_service.h
+++ b/chromium/media/mojo/services/media_service.h
@@ -11,9 +11,9 @@
#include "media/base/media_log.h"
#include "media/mojo/interfaces/interface_factory.mojom.h"
#include "media/mojo/interfaces/media_service.mojom.h"
+#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding_set.h"
-#include "mojo/public/cpp/bindings/strong_binding_set.h"
#include "services/service_manager/public/cpp/binder_registry.h"
#include "services/service_manager/public/cpp/service.h"
#include "services/service_manager/public/cpp/service_context.h"
@@ -56,7 +56,8 @@ class MEDIA_MOJO_EXPORT MediaService : public service_manager::Service,
// Note: Since |&media_log_| is passed to bindings, the bindings must be
// destructed first.
- mojo::StrongBindingSet<mojom::InterfaceFactory> interface_factory_bindings_;
+ DeferredDestroyStrongBindingSet<mojom::InterfaceFactory>
+ interface_factory_bindings_;
service_manager::BinderRegistry registry_;
mojo::BindingSet<mojom::MediaService> bindings_;
diff --git a/chromium/media/mojo/services/media_service_factory.cc b/chromium/media/mojo/services/media_service_factory.cc
index be65dd05d12..b789dfd4819 100644
--- a/chromium/media/mojo/services/media_service_factory.cc
+++ b/chromium/media/mojo/services/media_service_factory.cc
@@ -31,14 +31,15 @@ std::unique_ptr<service_manager::Service> CreateMediaService() {
std::unique_ptr<service_manager::Service> CreateGpuMediaService(
const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb,
CdmProxyFactoryCB cdm_proxy_factory_cb) {
return std::unique_ptr<service_manager::Service>(
new MediaService(std::make_unique<GpuMojoMediaClient>(
- gpu_preferences, task_runner, media_gpu_channel_manager,
- std::move(android_overlay_factory_cb),
+ gpu_preferences, gpu_workarounds, task_runner,
+ media_gpu_channel_manager, std::move(android_overlay_factory_cb),
std::move(cdm_proxy_factory_cb))));
}
diff --git a/chromium/media/mojo/services/media_service_factory.h b/chromium/media/mojo/services/media_service_factory.h
index 2c0e7ad644b..8dd062587e7 100644
--- a/chromium/media/mojo/services/media_service_factory.h
+++ b/chromium/media/mojo/services/media_service_factory.h
@@ -11,6 +11,7 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "gpu/command_buffer/service/gpu_preferences.h"
+#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/base/android_overlay_mojo_factory.h"
#include "media/cdm/cdm_proxy.h"
#include "media/mojo/services/media_mojo_export.h"
@@ -34,6 +35,7 @@ CreateMediaService();
std::unique_ptr<service_manager::Service> MEDIA_MOJO_EXPORT
CreateGpuMediaService(
const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
base::WeakPtr<MediaGpuChannelManager> media_gpu_channel_manager,
AndroidOverlayMojoFactoryCB android_overlay_factory_cb,
diff --git a/chromium/media/mojo/services/media_service_unittest.cc b/chromium/media/mojo/services/media_service_unittest.cc
index 3685165b8c0..4897ac8790c 100644
--- a/chromium/media/mojo/services/media_service_unittest.cc
+++ b/chromium/media/mojo/services/media_service_unittest.cc
@@ -11,11 +11,13 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/run_loop.h"
+#include "base/task_scheduler/post_task.h"
#include "build/build_config.h"
#include "media/base/cdm_config.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
#include "media/mojo/buildflags.h"
+#include "media/mojo/clients/mojo_decryptor.h"
#include "media/mojo/clients/mojo_demuxer_stream_impl.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/interfaces/constants.mojom.h"
@@ -32,20 +34,54 @@
#include "url/gurl.h"
#include "url/origin.h"
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+#include "media/cdm/cdm_paths.h" // nogncheck
+#include "media/mojo/interfaces/cdm_proxy.mojom.h"
+#endif
+
+namespace media {
+
+namespace {
+
+using testing::_;
+using testing::DoAll;
using testing::Invoke;
using testing::InvokeWithoutArgs;
using testing::NiceMock;
+using testing::SaveArg;
using testing::StrictMock;
+using testing::WithArg;
-namespace media {
-namespace {
+MATCHER_P(MatchesResult, success, "") {
+ return arg->success == success;
+}
#if BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)
const char kClearKeyKeySystem[] = "org.w3.clearkey";
const char kInvalidKeySystem[] = "invalid.key.system";
#endif
-const char kSecurityOrigin[] = "http://foo.com";
+const char kSecurityOrigin[] = "https://foo.com";
+
+// Returns a trivial encrypted DecoderBuffer.
+scoped_refptr<DecoderBuffer> CreateEncryptedBuffer() {
+ scoped_refptr<DecoderBuffer> encrypted_buffer(new DecoderBuffer(100));
+ encrypted_buffer->set_decrypt_config(
+ DecryptConfig::CreateCencConfig("dummy_key_id", "0123456789ABCDEF", {}));
+ return encrypted_buffer;
+}
+
+class MockCdmProxyClient : public mojom::CdmProxyClient {
+ public:
+ MockCdmProxyClient() = default;
+ ~MockCdmProxyClient() override = default;
+
+ // mojom::CdmProxyClient implementation.
+ MOCK_METHOD0(NotifyHardwareReset, void());
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockCdmProxyClient);
+};
class MockRendererClient : public mojom::RendererClient {
public:
@@ -73,10 +109,20 @@ class MockRendererClient : public mojom::RendererClient {
DISALLOW_COPY_AND_ASSIGN(MockRendererClient);
};
+ACTION_P(QuitLoop, run_loop) {
+ base::PostTask(FROM_HERE, run_loop->QuitClosure());
+}
+
+// Tests MediaService built into a standalone mojo service binary (see
+// ServiceMain() in main.cc) where MediaService uses TestMojoMediaClient.
+// TestMojoMediaClient supports CDM creation using DefaultCdmFactory (only
+// supports Clear Key key system), and Renderer creation using
+// DefaultRendererFactory that always create media::RendererImpl.
class MediaServiceTest : public service_manager::test::ServiceTest {
public:
MediaServiceTest()
: ServiceTest("media_service_unittests"),
+ cdm_proxy_client_binding_(&cdm_proxy_client_),
renderer_client_binding_(&renderer_client_),
video_stream_(DemuxerStream::VIDEO) {}
~MediaServiceTest() override = default;
@@ -84,44 +130,100 @@ class MediaServiceTest : public service_manager::test::ServiceTest {
void SetUp() override {
ServiceTest::SetUp();
- media::mojom::MediaServicePtr media_service;
- connector()->BindInterface(media::mojom::kMediaServiceName, &media_service);
-
- service_manager::mojom::InterfaceProviderPtr interfaces;
+ service_manager::mojom::InterfaceProviderPtr host_interfaces;
auto provider = std::make_unique<MediaInterfaceProvider>(
- mojo::MakeRequest(&interfaces));
- media_service->CreateInterfaceFactory(
- mojo::MakeRequest(&interface_factory_), std::move(interfaces));
-
- run_loop_.reset(new base::RunLoop());
+ mojo::MakeRequest(&host_interfaces));
+
+ connector()->BindInterface(mojom::kMediaServiceName, &media_service_);
+ media_service_.set_connection_error_handler(
+ base::BindRepeating(&MediaServiceTest::MediaServiceConnectionClosed,
+ base::Unretained(this)));
+ media_service_->CreateInterfaceFactory(
+ mojo::MakeRequest(&interface_factory_), std::move(host_interfaces));
}
- // MOCK_METHOD* doesn't support move only types. Work around this by having
- // an extra method.
- MOCK_METHOD2(OnCdmInitializedInternal, void(bool result, int cdm_id));
- void OnCdmInitialized(mojom::CdmPromiseResultPtr result,
- int cdm_id,
- mojom::DecryptorPtr decryptor) {
- OnCdmInitializedInternal(result->success, cdm_id);
- }
+ MOCK_METHOD3(OnCdmInitialized,
+ void(mojom::CdmPromiseResultPtr result,
+ int cdm_id,
+ mojom::DecryptorPtr decryptor));
+ MOCK_METHOD0(OnCdmConnectionError, void());
- void InitializeCdm(const std::string& key_system,
- bool expected_result,
- int cdm_id) {
+ // Returns the CDM ID associated with the CDM.
+ int InitializeCdm(const std::string& key_system, bool expected_result) {
+ base::RunLoop run_loop;
interface_factory_->CreateCdm(key_system, mojo::MakeRequest(&cdm_));
+ cdm_.set_connection_error_handler(base::BindRepeating(
+ &MediaServiceTest::OnCdmConnectionError, base::Unretained(this)));
+
+ int cdm_id = CdmContext::kInvalidCdmId;
- EXPECT_CALL(*this, OnCdmInitializedInternal(expected_result, cdm_id))
- .WillOnce(InvokeWithoutArgs(run_loop_.get(), &base::RunLoop::Quit));
+ // The last parameter mojom::DecryptorPtr is move-only and not supported by
+ // DoAll. Hence use WithArg to only extract the "int cdm_id" out and then
+ // call DoAll.
+ EXPECT_CALL(*this, OnCdmInitialized(MatchesResult(expected_result), _, _))
+ .WillOnce(WithArg<1>(DoAll(SaveArg<0>(&cdm_id), QuitLoop(&run_loop))));
cdm_->Initialize(key_system, url::Origin::Create(GURL(kSecurityOrigin)),
CdmConfig(),
- base::Bind(&MediaServiceTest::OnCdmInitialized,
- base::Unretained(this)));
+ base::BindOnce(&MediaServiceTest::OnCdmInitialized,
+ base::Unretained(this)));
+ run_loop.Run();
+ return cdm_id;
+ }
+
+ MOCK_METHOD4(OnCdmProxyInitialized,
+ void(CdmProxy::Status status,
+ CdmProxy::Protocol protocol,
+ uint32_t crypto_session_id,
+ int cdm_id));
+
+ // Returns the CDM ID associated with the CdmProxy.
+ int InitializeCdmProxy(const std::string& cdm_guid) {
+ base::RunLoop run_loop;
+ interface_factory_->CreateCdmProxy(cdm_guid,
+ mojo::MakeRequest(&cdm_proxy_));
+
+ mojom::CdmProxyClientAssociatedPtrInfo client_ptr_info;
+ cdm_proxy_client_binding_.Bind(mojo::MakeRequest(&client_ptr_info));
+ int cdm_id = CdmContext::kInvalidCdmId;
+
+ EXPECT_CALL(*this, OnCdmProxyInitialized(CdmProxy::Status::kOk, _, _, _))
+ .WillOnce(DoAll(SaveArg<3>(&cdm_id), QuitLoop(&run_loop)));
+ cdm_proxy_->Initialize(
+ std::move(client_ptr_info),
+ base::BindOnce(&MediaServiceTest::OnCdmProxyInitialized,
+ base::Unretained(this)));
+ run_loop.Run();
+ return cdm_id;
+ }
+
+ MOCK_METHOD2(OnDecrypted,
+ void(Decryptor::Status, scoped_refptr<DecoderBuffer>));
+
+ void CreateDecryptor(int cdm_id, bool expected_result) {
+ base::RunLoop run_loop;
+ mojom::DecryptorPtr decryptor_ptr;
+ interface_factory_->CreateDecryptor(cdm_id,
+ mojo::MakeRequest(&decryptor_ptr));
+ MojoDecryptor mojo_decryptor(std::move(decryptor_ptr));
+
+ // In the success case, there's no decryption key to decrypt the buffer so
+ // we would expect no-key.
+ auto expected_status =
+ expected_result ? Decryptor::kNoKey : Decryptor::kError;
+
+ EXPECT_CALL(*this, OnDecrypted(expected_status, _))
+ .WillOnce(QuitLoop(&run_loop));
+ mojo_decryptor.Decrypt(Decryptor::kVideo, CreateEncryptedBuffer(),
+ base::BindRepeating(&MediaServiceTest::OnDecrypted,
+ base::Unretained(this)));
+ run_loop.Run();
}
MOCK_METHOD1(OnRendererInitialized, void(bool));
void InitializeRenderer(const VideoDecoderConfig& video_config,
bool expected_result) {
+ base::RunLoop run_loop;
interface_factory_->CreateRenderer(
media::mojom::HostedRendererType::kDefault, std::string(),
mojo::MakeRequest(&renderer_));
@@ -135,25 +237,31 @@ class MediaServiceTest : public service_manager::test::ServiceTest {
mojom::RendererClientAssociatedPtrInfo client_ptr_info;
renderer_client_binding_.Bind(mojo::MakeRequest(&client_ptr_info));
- EXPECT_CALL(*this, OnRendererInitialized(expected_result))
- .WillOnce(InvokeWithoutArgs(run_loop_.get(), &base::RunLoop::Quit));
std::vector<mojom::DemuxerStreamPtrInfo> streams;
streams.push_back(std::move(video_stream_proxy_info));
- renderer_->Initialize(std::move(client_ptr_info), std::move(streams),
- base::nullopt, base::nullopt,
- base::Bind(&MediaServiceTest::OnRendererInitialized,
- base::Unretained(this)));
+
+ EXPECT_CALL(*this, OnRendererInitialized(expected_result))
+ .WillOnce(QuitLoop(&run_loop));
+ renderer_->Initialize(
+ std::move(client_ptr_info), std::move(streams), base::nullopt,
+ base::nullopt,
+ base::BindOnce(&MediaServiceTest::OnRendererInitialized,
+ base::Unretained(this)));
+ run_loop.Run();
}
- MOCK_METHOD0(ConnectionClosed, void());
+ MOCK_METHOD0(MediaServiceConnectionClosed, void());
protected:
- std::unique_ptr<base::RunLoop> run_loop_;
-
+ mojom::MediaServicePtr media_service_;
mojom::InterfaceFactoryPtr interface_factory_;
mojom::ContentDecryptionModulePtr cdm_;
+ mojom::CdmProxyPtr cdm_proxy_;
mojom::RendererPtr renderer_;
+ NiceMock<MockCdmProxyClient> cdm_proxy_client_;
+ mojo::AssociatedBinding<mojom::CdmProxyClient> cdm_proxy_client_binding_;
+
NiceMock<MockRendererClient> renderer_client_;
mojo::AssociatedBinding<mojom::RendererClient> renderer_client_binding_;
@@ -168,47 +276,148 @@ class MediaServiceTest : public service_manager::test::ServiceTest {
// Note: base::RunLoop::RunUntilIdle() does not work well in these tests because
// even when the loop is idle, we may still have pending events in the pipe.
+// - If you have an InterfacePtr hosted by the service in the service process,
+// you can use InterfacePtr::FlushForTesting(). Note that this doesn't drain
+// the task runner in the test process and doesn't cover all negative cases.
+// - If you expect a callback on an InterfacePtr call or connection error, use
+// base::RunLoop::Run() and QuitLoop().
// TODO(crbug.com/829233): Enable these tests on Android.
#if BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)
TEST_F(MediaServiceTest, InitializeCdm_Success) {
- InitializeCdm(kClearKeyKeySystem, true, 1);
- run_loop_->Run();
+ InitializeCdm(kClearKeyKeySystem, true);
}
TEST_F(MediaServiceTest, InitializeCdm_InvalidKeySystem) {
- InitializeCdm(kInvalidKeySystem, false, 0);
- run_loop_->Run();
+ InitializeCdm(kInvalidKeySystem, false);
+}
+
+TEST_F(MediaServiceTest, Decryptor_WithCdm) {
+ int cdm_id = InitializeCdm(kClearKeyKeySystem, true);
+ CreateDecryptor(cdm_id, true);
}
#endif // BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)
#if BUILDFLAG(ENABLE_MOJO_RENDERER)
TEST_F(MediaServiceTest, InitializeRenderer) {
InitializeRenderer(TestVideoConfig::Normal(), true);
- run_loop_->Run();
}
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+TEST_F(MediaServiceTest, CdmProxy) {
+ InitializeCdmProxy(kClearKeyCdmGuid);
+}
+
+TEST_F(MediaServiceTest, Decryptor_WithCdmProxy) {
+ int cdm_id = InitializeCdmProxy(kClearKeyCdmGuid);
+ CreateDecryptor(cdm_id, true);
+}
+
+TEST_F(MediaServiceTest, Decryptor_WrongCdmId) {
+ int cdm_id = InitializeCdmProxy(kClearKeyCdmGuid);
+ CreateDecryptor(cdm_id + 1, false);
+}
+
+TEST_F(MediaServiceTest, DeferredDestruction_CdmProxy) {
+ InitializeCdmProxy(kClearKeyCdmGuid);
+
+ // Disconnecting InterfaceFactory should not terminate the MediaService since
+ // there is still a CdmProxy hosted.
+ interface_factory_.reset();
+ cdm_proxy_.FlushForTesting();
+
+ // Disconnecting CdmProxy will now terminate the MediaService.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*this, MediaServiceConnectionClosed())
+ .WillOnce(QuitLoop(&run_loop));
+ cdm_proxy_.reset();
+ run_loop.Run();
+}
+#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
+
+TEST_F(MediaServiceTest, Decryptor_WithoutCdmOrCdmProxy) {
+ // Creating decryptor without creating CDM or CdmProxy.
+ CreateDecryptor(1, false);
+}
+
+TEST_F(MediaServiceTest, Lifetime_DestroyMediaService) {
+ // Disconnecting |media_service_| doesn't terminate MediaService
+ // since |interface_factory_| is still alive. This is ensured here since
+ // MediaServiceConnectionClosed() is not called.
+ EXPECT_CALL(*this, MediaServiceConnectionClosed()).Times(0);
+ media_service_.reset();
+ interface_factory_.FlushForTesting();
+}
+
+TEST_F(MediaServiceTest, Lifetime_DestroyInterfaceFactory) {
+ // Disconnecting InterfaceFactory will now terminate the MediaService since
+ // there's no media components hosted.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*this, MediaServiceConnectionClosed())
+ .WillOnce(QuitLoop(&run_loop));
+ interface_factory_.reset();
+ run_loop.Run();
+}
+
+#if (BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)) || \
+ BUILDFLAG(ENABLE_MOJO_RENDERER)
+// MediaService stays alive as long as there are InterfaceFactory impls, which
+// are then deferred destroyed until no media components (e.g. CDM or Renderer)
+// are hosted.
TEST_F(MediaServiceTest, Lifetime) {
- // The lifetime of the media service is controlled by the number of
- // live InterfaceFactory impls, not MediaService impls, so this pipe should
- // be closed when the last InterfaceFactory is destroyed.
- media::mojom::MediaServicePtr media_service;
- connector()->BindInterface(media::mojom::kMediaServiceName, &media_service);
- media_service.set_connection_error_handler(
- base::Bind(&MediaServiceTest::ConnectionClosed, base::Unretained(this)));
-
- // Disconnecting CDM and Renderer services doesn't terminate the app.
+#if BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)
+ InitializeCdm(kClearKeyKeySystem, true);
+#endif
+
+#if BUILDFLAG(ENABLE_MOJO_RENDERER)
+ InitializeRenderer(TestVideoConfig::Normal(), true);
+#endif
+
+ // Disconnecting CDM and Renderer services doesn't terminate MediaService
+ // since |interface_factory_| is still alive.
cdm_.reset();
renderer_.reset();
+ interface_factory_.FlushForTesting();
- // Disconnecting InterfaceFactory service should terminate the app, which will
- // close the connection.
- EXPECT_CALL(*this, ConnectionClosed())
- .WillOnce(Invoke(run_loop_.get(), &base::RunLoop::Quit));
+ // Disconnecting InterfaceFactory will now terminate the MediaService.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*this, MediaServiceConnectionClosed())
+ .WillOnce(QuitLoop(&run_loop));
interface_factory_.reset();
+ run_loop.Run();
+}
+
+TEST_F(MediaServiceTest, DeferredDestruction) {
+#if BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)
+ InitializeCdm(kClearKeyKeySystem, true);
+#endif
- run_loop_->Run();
+#if BUILDFLAG(ENABLE_MOJO_RENDERER)
+ InitializeRenderer(TestVideoConfig::Normal(), true);
+#endif
+
+ ASSERT_TRUE(cdm_ || renderer_);
+
+ // Disconnecting InterfaceFactory should not terminate the MediaService since
+ // there are still media components (CDM or Renderer) hosted.
+ interface_factory_.reset();
+ if (cdm_)
+ cdm_.FlushForTesting();
+ else if (renderer_)
+ renderer_.FlushForTesting();
+ else
+ NOTREACHED();
+
+ // Disconnecting CDM and Renderer will now terminate the MediaService.
+ base::RunLoop run_loop;
+ EXPECT_CALL(*this, MediaServiceConnectionClosed())
+ .WillOnce(QuitLoop(&run_loop));
+ cdm_.reset();
+ renderer_.reset();
+ run_loop.Run();
}
+#endif // (BUILDFLAG(ENABLE_MOJO_CDM) && !defined(OS_ANDROID)) ||
+ // BUILDFLAG(ENABLE_MOJO_RENDERER)
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.cc b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
index 5f50351e72d..15cae7c515f 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
@@ -57,7 +57,7 @@ void MojoAudioDecoderService::Initialize(const AudioDecoderConfig& config,
base::Bind(&MojoAudioDecoderService::OnInitialized, weak_this_,
base::Passed(&callback)),
base::Bind(&MojoAudioDecoderService::OnAudioBufferReady, weak_this_),
- media::AudioDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
}
void MojoAudioDecoderService::SetDataSource(
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.cc b/chromium/media/mojo/services/mojo_audio_input_stream.cc
index 7592b7589f4..3762dbb608c 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/callback_helpers.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/sync_socket.h"
#include "mojo/public/cpp/system/platform_handle.h"
@@ -49,6 +49,12 @@ MojoAudioInputStream::~MojoAudioInputStream() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
+void MojoAudioInputStream::SetOutputDeviceForAec(
+ const std::string& raw_output_device_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ delegate_->OnSetOutputDeviceForAec(raw_output_device_id);
+}
+
void MojoAudioInputStream::Record() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
delegate_->OnRecordStream();
@@ -67,24 +73,20 @@ void MojoAudioInputStream::SetVolume(double volume) {
void MojoAudioInputStream::OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> foreign_socket,
bool initially_muted) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stream_created_callback_);
- DCHECK(shared_memory);
DCHECK(foreign_socket);
- base::SharedMemoryHandle foreign_memory_handle =
- shared_memory->GetReadOnlyHandle();
- if (!base::SharedMemory::IsHandleValid(foreign_memory_handle)) {
+ if (!shared_memory_region.IsValid()) {
OnStreamError(/*not used*/ 0);
return;
}
- mojo::ScopedSharedBufferHandle buffer_handle = mojo::WrapSharedMemoryHandle(
- foreign_memory_handle, shared_memory->requested_size(),
- mojo::UnwrappedSharedMemoryHandleProtection::kReadOnly);
+ mojo::ScopedSharedBufferHandle buffer_handle =
+ mojo::WrapReadOnlySharedMemoryRegion(std::move(shared_memory_region));
mojo::ScopedHandle socket_handle =
mojo::WrapPlatformFile(foreign_socket->Release());
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.h b/chromium/media/mojo/services/mojo_audio_input_stream.h
index d28f1e1c27e..26c4af1ef56 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.h
@@ -42,6 +42,8 @@ class MEDIA_MOJO_EXPORT MojoAudioInputStream
~MojoAudioInputStream() override;
+ void SetOutputDeviceForAec(const std::string& raw_output_device_id);
+
private:
// mojom::AudioInputStream implementation.
void Record() override;
@@ -50,7 +52,7 @@ class MEDIA_MOJO_EXPORT MojoAudioInputStream
// AudioInputDelegate::EventHandler implementation.
void OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::ReadOnlySharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> foreign_socket,
bool initially_muted) override;
void OnMuted(int stream_id, bool is_muted) override;
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_observer.cc b/chromium/media/mojo/services/mojo_audio_input_stream_observer.cc
deleted file mode 100644
index 8f93afeadb4..00000000000
--- a/chromium/media/mojo/services/mojo_audio_input_stream_observer.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/mojo/services/mojo_audio_input_stream_observer.h"
-
-#include <utility>
-
-namespace media {
-
-MojoAudioInputStreamObserver::MojoAudioInputStreamObserver(
- mojom::AudioInputStreamObserverRequest request,
- base::OnceClosure recording_started_callback,
- base::OnceClosure connection_error_callback)
- : binding_(this, std::move(request)),
- recording_started_callback_(std::move(recording_started_callback)) {
- DCHECK(recording_started_callback_);
- binding_.set_connection_error_handler(std::move(connection_error_callback));
-}
-
-MojoAudioInputStreamObserver::~MojoAudioInputStreamObserver() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
-}
-
-void MojoAudioInputStreamObserver::DidStartRecording() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(owning_sequence_);
- DCHECK(recording_started_callback_);
- std::move(recording_started_callback_).Run();
-}
-
-} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_observer.h b/chromium/media/mojo/services/mojo_audio_input_stream_observer.h
deleted file mode 100644
index c85f73df712..00000000000
--- a/chromium/media/mojo/services/mojo_audio_input_stream_observer.h
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_MOJO_SERVICES_MOJO_AUDIO_INPUT_STREAM_OBSERVER_H_
-#define MEDIA_MOJO_SERVICES_MOJO_AUDIO_INPUT_STREAM_OBSERVER_H_
-
-#include "media/mojo/interfaces/audio_input_stream.mojom.h"
-#include "media/mojo/services/media_mojo_export.h"
-#include "mojo/public/cpp/bindings/binding.h"
-
-namespace media {
-
-class MEDIA_MOJO_EXPORT MojoAudioInputStreamObserver
- : public mojom::AudioInputStreamObserver {
- public:
- MojoAudioInputStreamObserver(mojom::AudioInputStreamObserverRequest request,
- base::OnceClosure recording_started_callback,
- base::OnceClosure connection_error_callback);
- ~MojoAudioInputStreamObserver() override;
-
- void DidStartRecording() override;
-
- private:
- mojo::Binding<AudioInputStreamObserver> binding_;
- base::OnceClosure recording_started_callback_;
-
- SEQUENCE_CHECKER(owning_sequence_);
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioInputStreamObserver);
-};
-
-} // namespace media
-
-#endif // MEDIA_MOJO_SERVICES_MOJO_AUDIO_INPUT_STREAM_OBSERVER_H_
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_observer_unittest.cc b/chromium/media/mojo/services/mojo_audio_input_stream_observer_unittest.cc
deleted file mode 100644
index 287eeb3e99a..00000000000
--- a/chromium/media/mojo/services/mojo_audio_input_stream_observer_unittest.cc
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/mojo/services/mojo_audio_input_stream_observer.h"
-
-#include <memory>
-#include <utility>
-
-#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
-#include "mojo/public/cpp/bindings/associated_binding.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-class MojoAudioInputStreamObserverTest : public testing::Test {
- public:
- MojoAudioInputStreamObserverTest() {}
- ~MojoAudioInputStreamObserverTest() {}
-
- std::unique_ptr<MojoAudioInputStreamObserver> CreateObserver(
- media::mojom::AudioInputStreamObserverRequest request) {
- return std::make_unique<MojoAudioInputStreamObserver>(
- std::move(request),
- base::BindOnce(
- &MojoAudioInputStreamObserverTest::RecordingStartedCallback,
- base::Unretained(this)),
- base::BindOnce(
- &MojoAudioInputStreamObserverTest::BindingConnectionError,
- base::Unretained(this)));
- }
-
- MOCK_METHOD0(RecordingStartedCallback, void());
- MOCK_METHOD0(BindingConnectionError, void());
-
- private:
- base::test::ScopedTaskEnvironment scoped_task_env_;
-
- DISALLOW_COPY_AND_ASSIGN(MojoAudioInputStreamObserverTest);
-};
-
-TEST_F(MojoAudioInputStreamObserverTest, DidStartRecording) {
- media::mojom::AudioInputStreamObserverPtr observer_ptr;
- std::unique_ptr<MojoAudioInputStreamObserver> observer =
- CreateObserver(mojo::MakeRequest(&observer_ptr));
-
- EXPECT_CALL(*this, RecordingStartedCallback());
- observer_ptr->DidStartRecording();
- base::RunLoop().RunUntilIdle();
-}
-
-TEST_F(MojoAudioInputStreamObserverTest, BindingConnectionError) {
- media::mojom::AudioInputStreamObserverPtr observer_ptr;
- std::unique_ptr<MojoAudioInputStreamObserver> observer =
- CreateObserver(mojo::MakeRequest(&observer_ptr));
-
- EXPECT_CALL(*this, BindingConnectionError());
- observer_ptr.reset();
- base::RunLoop().RunUntilIdle();
-}
-
-} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
index 5271b9227e1..a193f4fc665 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
-#include "base/memory/shared_memory.h"
+#include "base/memory/read_only_shared_memory_region.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/sync_socket.h"
@@ -59,11 +59,12 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
class MockDelegate : public AudioInputDelegate {
public:
MockDelegate() = default;
- ~MockDelegate() = default;
+ ~MockDelegate() override = default;
MOCK_METHOD0(GetStreamId, int());
MOCK_METHOD0(OnRecordStream, void());
MOCK_METHOD1(OnSetVolume, void(double));
+ MOCK_METHOD1(OnSetOutputDeviceForAec, void(const std::string&));
};
class MockDelegateFactory {
@@ -105,17 +106,9 @@ class MockClient : public mojom::AudioInputStreamClient {
socket_ = std::make_unique<base::CancelableSyncSocket>(fd);
EXPECT_NE(socket_->handle(), base::CancelableSyncSocket::kInvalidHandle);
- size_t memory_length;
- base::SharedMemoryHandle shmem_handle;
- mojo::UnwrappedSharedMemoryHandleProtection protection;
- EXPECT_EQ(mojo::UnwrapSharedMemoryHandle(
- std::move(data_pipe->shared_memory), &shmem_handle,
- &memory_length, &protection),
- MOJO_RESULT_OK);
- EXPECT_EQ(protection,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadOnly);
- buffer_ = std::make_unique<base::SharedMemory>(shmem_handle,
- true /* read_only */);
+ region_ = mojo::UnwrapReadOnlySharedMemoryRegion(
+ std::move(data_pipe->shared_memory));
+ EXPECT_TRUE(region_.IsValid());
GotNotification(initially_muted);
}
@@ -127,7 +120,7 @@ class MockClient : public mojom::AudioInputStreamClient {
MOCK_METHOD0(OnError, void());
private:
- std::unique_ptr<base::SharedMemory> buffer_;
+ base::ReadOnlySharedMemoryRegion region_;
std::unique_ptr<base::CancelableSyncSocket> socket_;
DISALLOW_COPY_AND_ASSIGN(MockClient);
@@ -171,10 +164,8 @@ class MojoAudioInputStreamTest : public Test {
base::WrapUnique(delegate_));
EXPECT_TRUE(
base::CancelableSyncSocket::CreatePair(&local_, foreign_socket_.get()));
- base::SharedMemoryCreateOptions shmem_options;
- shmem_options.size = kShmemSize;
- shmem_options.share_read_only = true;
- EXPECT_TRUE(mem_.Create(shmem_options));
+ mem_ = base::ReadOnlySharedMemoryRegion::Create(kShmemSize).region;
+ EXPECT_TRUE(mem_.IsValid());
EXPECT_CALL(mock_delegate_factory_, MockCreateDelegate(NotNull()))
.WillOnce(SaveArg<0>(&delegate_event_handler_));
}
@@ -182,7 +173,7 @@ class MojoAudioInputStreamTest : public Test {
base::MessageLoop loop_;
base::CancelableSyncSocket local_;
std::unique_ptr<TestCancelableSyncSocket> foreign_socket_;
- base::SharedMemory mem_;
+ base::ReadOnlySharedMemoryRegion mem_;
StrictMock<MockDelegate>* delegate_ = nullptr;
AudioInputDelegate::EventHandler* delegate_event_handler_ = nullptr;
StrictMock<MockDelegateFactory> mock_delegate_factory_;
@@ -232,8 +223,9 @@ TEST_F(MojoAudioInputStreamTest, DestructWithCallPending_Safe) {
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(
- kStreamId, &mem_, std::move(foreign_socket_), kInitiallyNotMuted);
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_),
+ kInitiallyNotMuted);
audio_input_ptr->Record();
impl_.reset();
base::RunLoop().RunUntilIdle();
@@ -247,8 +239,9 @@ TEST_F(MojoAudioInputStreamTest, Created_NotifiesClient) {
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(
- kStreamId, &mem_, std::move(foreign_socket_), kInitiallyNotMuted);
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_),
+ kInitiallyNotMuted);
base::RunLoop().RunUntilIdle();
}
@@ -294,8 +287,9 @@ TEST_F(MojoAudioInputStreamTest, DelegateErrorAfterCreated_PropagatesError) {
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(
- kStreamId, &mem_, std::move(foreign_socket_), kInitiallyNotMuted);
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_),
+ kInitiallyNotMuted);
delegate_event_handler_->OnStreamError(kStreamId);
base::RunLoop().RunUntilIdle();
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.cc b/chromium/media/mojo/services/mojo_audio_output_stream.cc
index 79e52418c6a..5cdfb6ca5fd 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.cc
@@ -15,29 +15,20 @@
namespace media {
MojoAudioOutputStream::MojoAudioOutputStream(
- mojom::AudioOutputStreamRequest request,
- mojom::AudioOutputStreamClientPtr client,
CreateDelegateCallback create_delegate_callback,
StreamCreatedCallback stream_created_callback,
- base::OnceClosure deleter_callback)
+ DeleterCallback deleter_callback)
: stream_created_callback_(std::move(stream_created_callback)),
deleter_callback_(std::move(deleter_callback)),
- binding_(this, std::move(request)),
- client_(std::move(client)),
+ binding_(this),
weak_factory_(this) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stream_created_callback_);
DCHECK(deleter_callback_);
- // |this| owns |binding_|, so unretained is safe.
- binding_.set_connection_error_handler(
- base::BindOnce(&MojoAudioOutputStream::OnError, base::Unretained(this)));
- client_.set_connection_error_handler(
- base::BindOnce(&MojoAudioOutputStream::OnError, base::Unretained(this)));
delegate_ = std::move(create_delegate_callback).Run(this);
if (!delegate_) {
// Failed to initialize the stream. We cannot call |deleter_callback_| yet,
// since construction isn't done.
- binding_.Close();
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE,
base::BindOnce(&MojoAudioOutputStream::OnStreamError,
@@ -72,44 +63,46 @@ void MojoAudioOutputStream::SetVolume(double volume) {
void MojoAudioOutputStream::OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> foreign_socket) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(stream_created_callback_);
- DCHECK(shared_memory);
DCHECK(foreign_socket);
- base::SharedMemoryHandle foreign_memory_handle =
- base::SharedMemory::DuplicateHandle(shared_memory->handle());
- if (!base::SharedMemory::IsHandleValid(foreign_memory_handle)) {
+ if (!shared_memory_region.IsValid()) {
OnStreamError(/*not used*/ 0);
return;
}
- mojo::ScopedSharedBufferHandle buffer_handle = mojo::WrapSharedMemoryHandle(
- foreign_memory_handle, shared_memory->requested_size(),
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
+ mojo::ScopedSharedBufferHandle buffer_handle =
+ mojo::WrapUnsafeSharedMemoryRegion(std::move(shared_memory_region));
mojo::ScopedHandle socket_handle =
mojo::WrapPlatformFile(foreign_socket->Release());
DCHECK(buffer_handle.is_valid());
DCHECK(socket_handle.is_valid());
- base::ResetAndReturn(&stream_created_callback_)
- .Run(
- {base::in_place, std::move(buffer_handle), std::move(socket_handle)});
+ mojom::AudioOutputStreamPtr stream;
+ binding_.Bind(mojo::MakeRequest(&stream));
+ // |this| owns |binding_| so unretained is safe.
+ binding_.set_connection_error_handler(base::BindOnce(
+ &MojoAudioOutputStream::StreamConnectionLost, base::Unretained(this)));
+
+ std::move(stream_created_callback_)
+ .Run(std::move(stream), {base::in_place, std::move(buffer_handle),
+ std::move(socket_handle)});
}
void MojoAudioOutputStream::OnStreamError(int stream_id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- client_->OnError();
- OnError();
+ DCHECK(deleter_callback_);
+ std::move(deleter_callback_).Run(/*had_error*/ true); // Deletes |this|.
}
-void MojoAudioOutputStream::OnError() {
+void MojoAudioOutputStream::StreamConnectionLost() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(deleter_callback_);
- std::move(deleter_callback_).Run(); // Deletes |this|.
+ std::move(deleter_callback_).Run(/*had_error*/ false); // Deletes |this|.
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.h b/chromium/media/mojo/services/mojo_audio_output_stream.h
index 217052605cf..31c1fcf5eef 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.h
@@ -23,21 +23,22 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStream
public AudioOutputDelegate::EventHandler {
public:
using StreamCreatedCallback =
- mojom::AudioOutputStreamProvider::AcquireCallback;
+ base::OnceCallback<void(mojom::AudioOutputStreamPtr,
+ media::mojom::AudioDataPipePtr)>;
using CreateDelegateCallback =
base::OnceCallback<std::unique_ptr<AudioOutputDelegate>(
AudioOutputDelegate::EventHandler*)>;
+ using DeleterCallback = base::OnceCallback<void(bool)>;
// |create_delegate_callback| is used to obtain an AudioOutputDelegate for the
// stream in the constructor. |stream_created_callback| is called when the
// stream has been initialized. |deleter_callback| is called when this class
- // should be removed (stream ended/error). |deleter_callback| is required to
- // destroy |this| synchronously.
- MojoAudioOutputStream(mojom::AudioOutputStreamRequest request,
- mojom::AudioOutputStreamClientPtr client,
- CreateDelegateCallback create_delegate_callback,
+ // should be removed (stream ended/error). Its argument indicates if an error
+ // was encountered (false indicates that the remote end closed the stream).
+ // |deleter_callback| is required to destroy |this| synchronously.
+ MojoAudioOutputStream(CreateDelegateCallback create_delegate_callback,
StreamCreatedCallback stream_created_callback,
- base::OnceClosure deleter_callback);
+ DeleterCallback deleter_callback);
~MojoAudioOutputStream() override;
@@ -50,19 +51,17 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStream
// AudioOutputDelegate::EventHandler implementation.
void OnStreamCreated(
int stream_id,
- const base::SharedMemory* shared_memory,
+ base::UnsafeSharedMemoryRegion shared_memory_region,
std::unique_ptr<base::CancelableSyncSocket> foreign_socket) override;
void OnStreamError(int stream_id) override;
- // Closes connection to client and notifies owner.
- void OnError();
+ void StreamConnectionLost();
SEQUENCE_CHECKER(sequence_checker_);
StreamCreatedCallback stream_created_callback_;
- base::OnceClosure deleter_callback_;
+ DeleterCallback deleter_callback_;
mojo::Binding<AudioOutputStream> binding_;
- mojom::AudioOutputStreamClientPtr client_;
std::unique_ptr<AudioOutputDelegate> delegate_;
base::WeakPtrFactory<MojoAudioOutputStream> weak_factory_;
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
index 32e7b7fc790..1a661e8099b 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
@@ -23,8 +23,9 @@ MojoAudioOutputStreamProvider::MojoAudioOutputStreamProvider(
observer_binding_(observer_.get()) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Unretained is safe since |this| owns |binding_|.
- binding_.set_connection_error_handler(base::Bind(
- &MojoAudioOutputStreamProvider::OnError, base::Unretained(this)));
+ binding_.set_connection_error_handler(
+ base::BindOnce(&MojoAudioOutputStreamProvider::CleanUp,
+ base::Unretained(this), /*had_error*/ false));
DCHECK(create_delegate_callback_);
DCHECK(deleter_callback_);
}
@@ -34,10 +35,8 @@ MojoAudioOutputStreamProvider::~MojoAudioOutputStreamProvider() {
}
void MojoAudioOutputStreamProvider::Acquire(
- mojom::AudioOutputStreamRequest stream_request,
- mojom::AudioOutputStreamClientPtr client,
const AudioParameters& params,
- AcquireCallback callback) {
+ mojom::AudioOutputStreamProviderClientPtr provider_client) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
#if !defined(OS_ANDROID)
if (params.IsBitstreamFormat()) {
@@ -53,29 +52,32 @@ void MojoAudioOutputStreamProvider::Acquire(
return;
}
+ provider_client_ = std::move(provider_client);
+
mojom::AudioOutputStreamObserverPtr observer_ptr;
observer_binding_.Bind(mojo::MakeRequest(&observer_ptr));
// Unretained is safe since |this| owns |audio_output_|.
- audio_output_.emplace(std::move(stream_request), std::move(client),
- base::BindOnce(std::move(create_delegate_callback_),
- params, std::move(observer_ptr)),
- std::move(callback),
- base::BindOnce(&MojoAudioOutputStreamProvider::OnError,
- base::Unretained(this)));
+ audio_output_.emplace(
+ base::BindOnce(std::move(create_delegate_callback_), params,
+ std::move(observer_ptr)),
+ base::BindOnce(&mojom::AudioOutputStreamProviderClient::Created,
+ base::Unretained(provider_client_.get())),
+ base::BindOnce(&MojoAudioOutputStreamProvider::CleanUp,
+ base::Unretained(this)));
}
-void MojoAudioOutputStreamProvider::OnError() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- // Deletes |this|:
+void MojoAudioOutputStreamProvider::CleanUp(bool had_error) {
+ if (had_error) {
+ provider_client_.ResetWithReason(
+ static_cast<uint32_t>(media::mojom::AudioOutputStreamObserver::
+ DisconnectReason::kPlatformError),
+ std::string());
+ }
std::move(deleter_callback_).Run(this);
}
void MojoAudioOutputStreamProvider::BadMessage(const std::string& error) {
mojo::ReportBadMessage(error);
- if (binding_.is_bound())
- binding_.Unbind();
- if (observer_binding_.is_bound())
- observer_binding_.Unbind();
std::move(deleter_callback_).Run(this); // deletes |this|.
}
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
index f642571b1a0..0d580f31b98 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
@@ -42,25 +42,25 @@ class MEDIA_MOJO_EXPORT MojoAudioOutputStreamProvider
private:
// mojom::AudioOutputStreamProvider implementation.
- void Acquire(mojom::AudioOutputStreamRequest stream_request,
- mojom::AudioOutputStreamClientPtr client,
- const AudioParameters& params,
- AcquireCallback acquire_callback) override;
+ void Acquire(
+ const AudioParameters& params,
+ mojom::AudioOutputStreamProviderClientPtr provider_client) override;
// Called when |audio_output_| had an error.
- void OnError();
+ void CleanUp(bool had_error);
// Closes mojo connections, reports a bad message, and self-destructs.
void BadMessage(const std::string& error);
SEQUENCE_CHECKER(sequence_checker_);
- base::Optional<MojoAudioOutputStream> audio_output_;
mojo::Binding<AudioOutputStreamProvider> binding_;
CreateDelegateCallback create_delegate_callback_;
DeleterCallback deleter_callback_;
std::unique_ptr<mojom::AudioOutputStreamObserver> observer_;
mojo::Binding<mojom::AudioOutputStreamObserver> observer_binding_;
+ base::Optional<MojoAudioOutputStream> audio_output_;
+ mojom::AudioOutputStreamProviderClientPtr provider_client_;
DISALLOW_COPY_AND_ASSIGN(MojoAudioOutputStreamProvider);
};
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
index 49edfa94372..e559976433e 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
@@ -31,8 +31,6 @@ using testing::StrictMock;
using MockDeleter = base::MockCallback<
base::OnceCallback<void(mojom::AudioOutputStreamProvider*)>>;
-void FakeAcquireCallback(mojom::AudioDataPipePtr data_pipe) {}
-
class FakeObserver : public mojom::AudioOutputStreamObserver {
public:
FakeObserver() = default;
@@ -83,26 +81,22 @@ TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
mojo::MakeRequest(&provider_ptr), base::BindOnce(&CreateFakeDelegate),
deleter.Get(), std::make_unique<FakeObserver>());
- mojom::AudioOutputStreamPtr stream_1;
- mojom::AudioOutputStreamClientPtr client_1;
- mojom::AudioOutputStreamClientRequest client_request_1 =
- mojo::MakeRequest(&client_1);
-
- mojom::AudioOutputStreamPtr stream_2;
- mojom::AudioOutputStreamClientPtr client_2;
- mojom::AudioOutputStreamClientRequest client_request_2 =
- mojo::MakeRequest(&client_2);
- provider_ptr->Acquire(mojo::MakeRequest(&stream_1), std::move(client_1),
- media::AudioParameters::UnavailableDeviceParams(),
- base::BindOnce(&FakeAcquireCallback));
- provider_ptr->Acquire(mojo::MakeRequest(&stream_2), std::move(client_2),
- media::AudioParameters::UnavailableDeviceParams(),
- base::BindOnce(&FakeAcquireCallback));
+ mojom::AudioOutputStreamProviderClientPtr client_1;
+ mojo::MakeRequest(&client_1);
+ provider_ptr->Acquire(media::AudioParameters::UnavailableDeviceParams(),
+ std::move(client_1));
+
+ mojom::AudioOutputStreamProviderClientPtr client_2;
+ mojo::MakeRequest(&client_2);
+ provider_ptr->Acquire(media::AudioParameters::UnavailableDeviceParams(),
+ std::move(client_2));
EXPECT_CALL(deleter, Run(provider)).WillOnce(DeleteArg<0>());
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
+
+ mojo::edk::SetDefaultProcessErrorCallback(mojo::edk::ProcessErrorCallback());
}
TEST(MojoAudioOutputStreamProviderTest,
@@ -124,12 +118,9 @@ TEST(MojoAudioOutputStreamProviderTest,
mojo::MakeRequest(&provider_ptr), base::BindOnce(&CreateFakeDelegate),
deleter.Get(), std::make_unique<FakeObserver>());
- mojom::AudioOutputStreamPtr stream;
- mojom::AudioOutputStreamClientPtr client;
- mojom::AudioOutputStreamClientRequest client_request =
- mojo::MakeRequest(&client);
- provider_ptr->Acquire(mojo::MakeRequest(&stream), std::move(client), params,
- base::BindOnce(&FakeAcquireCallback));
+ mojom::AudioOutputStreamProviderClientPtr client;
+ mojo::MakeRequest(&client);
+ provider_ptr->Acquire(params, std::move(client));
#if defined(OS_ANDROID)
base::RunLoop().RunUntilIdle();
@@ -144,6 +135,7 @@ TEST(MojoAudioOutputStreamProviderTest,
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
#endif
+ mojo::edk::SetDefaultProcessErrorCallback(mojo::edk::ProcessErrorCallback());
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
index efd161972c2..508e845d35f 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
@@ -11,6 +11,7 @@
#include "base/run_loop.h"
#include "base/sync_socket.h"
#include "media/audio/audio_output_controller.h"
+#include "mojo/public/cpp/system/message_pipe.h"
#include "mojo/public/cpp/system/platform_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -57,7 +58,7 @@ class TestCancelableSyncSocket : public base::CancelableSyncSocket {
class MockDelegate : public AudioOutputDelegate {
public:
MockDelegate() = default;
- ~MockDelegate() = default;
+ ~MockDelegate() override = default;
MOCK_METHOD0(GetStreamId, int());
MOCK_METHOD0(OnPlayStream, void());
@@ -88,14 +89,14 @@ class MockDelegateFactory {
class MockDeleter {
public:
- MOCK_METHOD0(Finished, void());
+ MOCK_METHOD1(Finished, void(bool));
};
-class MockClient : public mojom::AudioOutputStreamClient {
+class MockClient {
public:
MockClient() = default;
- void Initialized(mojom::AudioDataPipePtr data_pipe) {
+ void Initialize(mojom::AudioDataPipePtr data_pipe) {
ASSERT_TRUE(data_pipe->shared_memory.is_valid());
ASSERT_TRUE(data_pipe->socket.is_valid());
@@ -121,8 +122,6 @@ class MockClient : public mojom::AudioOutputStreamClient {
MOCK_METHOD0(GotNotification, void());
- MOCK_METHOD0(OnError, void());
-
private:
std::unique_ptr<base::SharedMemory> buffer_;
std::unique_ptr<base::CancelableSyncSocket> socket_;
@@ -133,9 +132,9 @@ std::unique_ptr<AudioOutputDelegate> CreateNoDelegate(
return nullptr;
}
-void NotCalled(mojom::AudioDataPipePtr data_pipe) {
- EXPECT_TRUE(false) << "The StreamCreated callback was called despite the "
- "test expecting it not to.";
+void NotCalled(mojom::AudioOutputStreamPtr, mojom::AudioDataPipePtr) {
+ ADD_FAILURE() << "The StreamCreated callback was called despite the test "
+ "expecting it not to.";
}
} // namespace
@@ -143,30 +142,38 @@ void NotCalled(mojom::AudioDataPipePtr data_pipe) {
class MojoAudioOutputStreamTest : public Test {
public:
MojoAudioOutputStreamTest()
- : foreign_socket_(std::make_unique<TestCancelableSyncSocket>()),
- client_binding_(&client_, mojo::MakeRequest(&client_ptr_)) {}
+ : foreign_socket_(std::make_unique<TestCancelableSyncSocket>()) {}
AudioOutputStreamPtr CreateAudioOutput() {
- AudioOutputStreamPtr p;
+ mojom::AudioOutputStreamPtr p;
+ pending_stream_request_ = mojo::MakeRequest(&p);
ExpectDelegateCreation();
impl_ = std::make_unique<MojoAudioOutputStream>(
- mojo::MakeRequest(&p), std::move(client_ptr_),
base::BindOnce(&MockDelegateFactory::CreateDelegate,
base::Unretained(&mock_delegate_factory_)),
- base::BindOnce(&MockClient::Initialized, base::Unretained(&client_)),
+ base::BindOnce(&MojoAudioOutputStreamTest::CreatedStream,
+ base::Unretained(this)),
base::BindOnce(&MockDeleter::Finished, base::Unretained(&deleter_)));
- EXPECT_TRUE(p.is_bound());
return p;
}
protected:
+ void CreatedStream(mojom::AudioOutputStreamPtr stream,
+ mojom::AudioDataPipePtr data_pipe) {
+ EXPECT_EQ(mojo::FuseMessagePipes(pending_stream_request_.PassMessagePipe(),
+ stream.PassInterface().PassHandle()),
+ MOJO_RESULT_OK);
+ client_.Initialize(std::move(data_pipe));
+ }
+
void ExpectDelegateCreation() {
delegate_ = new StrictMock<MockDelegate>();
mock_delegate_factory_.PrepareDelegateForCreation(
base::WrapUnique(delegate_));
EXPECT_TRUE(
base::CancelableSyncSocket::CreatePair(&local_, foreign_socket_.get()));
- EXPECT_TRUE(mem_.CreateAnonymous(kShmemSize));
+ mem_ = base::UnsafeSharedMemoryRegion::Create(kShmemSize);
+ EXPECT_TRUE(mem_.IsValid());
EXPECT_CALL(mock_delegate_factory_, MockCreateDelegate(NotNull()))
.WillOnce(SaveArg<0>(&delegate_event_handler_));
}
@@ -174,51 +181,57 @@ class MojoAudioOutputStreamTest : public Test {
base::MessageLoop loop_;
base::CancelableSyncSocket local_;
std::unique_ptr<TestCancelableSyncSocket> foreign_socket_;
- base::SharedMemory mem_;
+ base::UnsafeSharedMemoryRegion mem_;
StrictMock<MockDelegate>* delegate_ = nullptr;
AudioOutputDelegate::EventHandler* delegate_event_handler_ = nullptr;
StrictMock<MockDelegateFactory> mock_delegate_factory_;
StrictMock<MockDeleter> deleter_;
StrictMock<MockClient> client_;
- media::mojom::AudioOutputStreamClientPtr client_ptr_;
- mojo::Binding<media::mojom::AudioOutputStreamClient> client_binding_;
+ mojom::AudioOutputStreamRequest pending_stream_request_;
std::unique_ptr<MojoAudioOutputStream> impl_;
};
TEST_F(MojoAudioOutputStreamTest, NoDelegate_SignalsError) {
- bool deleter_called = false;
- EXPECT_CALL(client_, OnError()).Times(1);
mojom::AudioOutputStreamPtr stream_ptr;
MojoAudioOutputStream stream(
- mojo::MakeRequest(&stream_ptr), std::move(client_ptr_),
base::BindOnce(&CreateNoDelegate), base::BindOnce(&NotCalled),
- base::BindOnce([](bool* p) { *p = true; }, &deleter_called));
- EXPECT_FALSE(deleter_called)
- << "Stream shouldn't call the deleter from its constructor.";
+ base::BindOnce(&MockDeleter::Finished, base::Unretained(&deleter_)));
+ EXPECT_CALL(deleter_, Finished(true));
base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(deleter_called);
}
TEST_F(MojoAudioOutputStreamTest, Play_Plays) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
+
+ EXPECT_CALL(client_, GotNotification());
EXPECT_CALL(*delegate_, OnPlayStream());
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr->Play();
base::RunLoop().RunUntilIdle();
}
TEST_F(MojoAudioOutputStreamTest, Pause_Pauses) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
+
+ EXPECT_CALL(client_, GotNotification());
EXPECT_CALL(*delegate_, OnPauseStream());
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr->Pause();
base::RunLoop().RunUntilIdle();
}
TEST_F(MojoAudioOutputStreamTest, SetVolume_SetsVolume) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
+
+ EXPECT_CALL(client_, GotNotification());
EXPECT_CALL(*delegate_, OnSetVolume(kNewVolume));
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr->SetVolume(kNewVolume);
base::RunLoop().RunUntilIdle();
}
@@ -230,7 +243,7 @@ TEST_F(MojoAudioOutputStreamTest, DestructWithCallPending_Safe) {
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(kStreamId, &mem_,
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
std::move(foreign_socket_));
audio_output_ptr->Play();
impl_.reset();
@@ -245,7 +258,7 @@ TEST_F(MojoAudioOutputStreamTest, Created_NotifiesClient) {
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(kStreamId, &mem_,
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
std::move(foreign_socket_));
base::RunLoop().RunUntilIdle();
@@ -253,9 +266,11 @@ TEST_F(MojoAudioOutputStreamTest, Created_NotifiesClient) {
TEST_F(MojoAudioOutputStreamTest, SetVolumeTooLarge_Error) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
- EXPECT_CALL(deleter_, Finished());
- EXPECT_CALL(client_, OnError()).Times(1);
+ EXPECT_CALL(deleter_, Finished(true));
+ EXPECT_CALL(client_, GotNotification());
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr->SetVolume(15);
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClear(&deleter_);
@@ -263,9 +278,11 @@ TEST_F(MojoAudioOutputStreamTest, SetVolumeTooLarge_Error) {
TEST_F(MojoAudioOutputStreamTest, SetVolumeNegative_Error) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
- EXPECT_CALL(deleter_, Finished());
- EXPECT_CALL(client_, OnError()).Times(1);
+ EXPECT_CALL(deleter_, Finished(true));
+ EXPECT_CALL(client_, GotNotification());
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr->SetVolume(-0.5);
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClear(&deleter_);
@@ -273,8 +290,7 @@ TEST_F(MojoAudioOutputStreamTest, SetVolumeNegative_Error) {
TEST_F(MojoAudioOutputStreamTest, DelegateErrorBeforeCreated_PropagatesError) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
- EXPECT_CALL(deleter_, Finished());
- EXPECT_CALL(client_, OnError()).Times(1);
+ EXPECT_CALL(deleter_, Finished(true));
ASSERT_NE(nullptr, delegate_event_handler_);
delegate_event_handler_->OnStreamError(kStreamId);
@@ -286,13 +302,12 @@ TEST_F(MojoAudioOutputStreamTest, DelegateErrorBeforeCreated_PropagatesError) {
TEST_F(MojoAudioOutputStreamTest, DelegateErrorAfterCreated_PropagatesError) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
EXPECT_CALL(client_, GotNotification());
- EXPECT_CALL(deleter_, Finished());
- EXPECT_CALL(client_, OnError()).Times(1);
+ EXPECT_CALL(deleter_, Finished(true));
base::RunLoop().RunUntilIdle();
ASSERT_NE(nullptr, delegate_event_handler_);
foreign_socket_->ExpectOwnershipTransfer();
- delegate_event_handler_->OnStreamCreated(kStreamId, &mem_,
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
std::move(foreign_socket_));
delegate_event_handler_->OnStreamError(kStreamId);
@@ -300,9 +315,14 @@ TEST_F(MojoAudioOutputStreamTest, DelegateErrorAfterCreated_PropagatesError) {
Mock::VerifyAndClear(&deleter_);
}
-TEST_F(MojoAudioOutputStreamTest, RemoteEndGone_Error) {
+TEST_F(MojoAudioOutputStreamTest, RemoteEndGone_CallsDeleter) {
AudioOutputStreamPtr audio_output_ptr = CreateAudioOutput();
- EXPECT_CALL(deleter_, Finished());
+
+ EXPECT_CALL(client_, GotNotification());
+ EXPECT_CALL(deleter_, Finished(false));
+
+ delegate_event_handler_->OnStreamCreated(kStreamId, std::move(mem_),
+ std::move(foreign_socket_));
audio_output_ptr.reset();
base::RunLoop().RunUntilIdle();
Mock::VerifyAndClear(&deleter_);
diff --git a/chromium/media/mojo/services/mojo_cdm_service.cc b/chromium/media/mojo/services/mojo_cdm_service.cc
index 5c39b204fe2..31bb3e7fa5e 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.cc
+++ b/chromium/media/mojo/services/mojo_cdm_service.cc
@@ -166,12 +166,17 @@ void MojoCdmService::OnCdmCreated(
// If |cdm| has a decryptor, create the MojoDecryptorService
// and pass the connection back to the client.
- mojom::DecryptorPtr decryptor_service;
+ mojom::DecryptorPtr decryptor_ptr;
CdmContext* const cdm_context = cdm_->GetCdmContext();
if (cdm_context && cdm_context->GetDecryptor()) {
- decryptor_.reset(new MojoDecryptorService(
- cdm_context->GetDecryptor(), MakeRequest(&decryptor_service),
- base::Bind(&MojoCdmService::OnDecryptorConnectionError, weak_this_)));
+ // Both |cdm_| and |decryptor_| are owned by |this|, so we don't need to
+ // pass in a CdmContextRef.
+ decryptor_.reset(
+ new MojoDecryptorService(cdm_context->GetDecryptor(), nullptr));
+ decryptor_binding_ = std::make_unique<mojo::Binding<mojom::Decryptor>>(
+ decryptor_.get(), MakeRequest(&decryptor_ptr));
+ decryptor_binding_->set_connection_error_handler(base::BindOnce(
+ &MojoCdmService::OnDecryptorConnectionError, weak_this_));
}
// If the |context_| is not null, we should support connecting the |cdm| with
@@ -184,7 +189,7 @@ void MojoCdmService::OnCdmCreated(
cdm_promise_result->success = true;
std::move(callback).Run(std::move(cdm_promise_result), cdm_id,
- std::move(decryptor_service));
+ std::move(decryptor_ptr));
}
void MojoCdmService::OnSessionMessage(const std::string& session_id,
diff --git a/chromium/media/mojo/services/mojo_cdm_service.h b/chromium/media/mojo/services/mojo_cdm_service.h
index 051d46dd18b..716620b9e6c 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_service.h
@@ -21,6 +21,7 @@
#include "media/mojo/services/mojo_cdm_promise.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "media/mojo/services/mojo_decryptor_service.h"
+#include "mojo/public/cpp/bindings/binding.h"
namespace media {
@@ -106,6 +107,7 @@ class MEDIA_MOJO_EXPORT MojoCdmService : public mojom::ContentDecryptionModule {
// MojoDecryptorService is passed the Decryptor from |cdm_|, so
// |decryptor_| must not outlive |cdm_|.
std::unique_ptr<MojoDecryptorService> decryptor_;
+ std::unique_ptr<mojo::Binding<mojom::Decryptor>> decryptor_binding_;
// Set to a valid CDM ID if the |cdm_| is successfully created.
int cdm_id_;
diff --git a/chromium/media/mojo/services/mojo_cdm_service_context.cc b/chromium/media/mojo/services/mojo_cdm_service_context.cc
index f7d62ccd6a0..49e71b5e111 100644
--- a/chromium/media/mojo/services/mojo_cdm_service_context.cc
+++ b/chromium/media/mojo/services/mojo_cdm_service_context.cc
@@ -38,13 +38,14 @@ class CdmProxyContextRef : public CdmContextRef, public CdmContext {
private:
// CdmContext implementation.
- CdmProxyContext* GetCdmProxyContext() final {
+ Decryptor* GetDecryptor() final {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return cdm_context_ ? cdm_context_->GetDecryptor() : nullptr;
+ }
- if (!cdm_context_)
- return nullptr;
-
- return cdm_context_->GetCdmProxyContext();
+ CdmProxyContext* GetCdmProxyContext() final {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ return cdm_context_ ? cdm_context_->GetCdmProxyContext() : nullptr;
}
base::WeakPtr<CdmContext> cdm_context_;
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.cc b/chromium/media/mojo/services/mojo_decryptor_service.cc
index 99307ad60fb..8a08b90f305 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.cc
+++ b/chromium/media/mojo/services/mojo_decryptor_service.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/cdm_context.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decryptor.h"
#include "media/base/video_decoder_config.h"
@@ -17,6 +18,7 @@
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
#include "media/mojo/interfaces/demuxer_stream.mojom.h"
+#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
namespace media {
@@ -45,17 +47,40 @@ class FrameResourceReleaserImpl final : public mojom::FrameResourceReleaser {
} // namespace
+// static
+std::unique_ptr<MojoDecryptorService> MojoDecryptorService::Create(
+ int cdm_id,
+ MojoCdmServiceContext* mojo_cdm_service_context) {
+ auto cdm_context_ref = mojo_cdm_service_context->GetCdmContextRef(cdm_id);
+ if (!cdm_context_ref) {
+ DVLOG(1) << "CdmContextRef not found for CDM ID: " << cdm_id;
+ return nullptr;
+ }
+
+ auto* cdm_context = cdm_context_ref->GetCdmContext();
+ DCHECK(cdm_context);
+
+ auto* decryptor = cdm_context->GetDecryptor();
+ if (!decryptor) {
+ DVLOG(1) << "CdmContext does not support Decryptor";
+ return nullptr;
+ }
+
+ return std::make_unique<MojoDecryptorService>(decryptor,
+ std::move(cdm_context_ref));
+}
+
MojoDecryptorService::MojoDecryptorService(
media::Decryptor* decryptor,
- mojo::InterfaceRequest<mojom::Decryptor> request,
- const base::Closure& error_handler)
- : binding_(this, std::move(request)),
- decryptor_(decryptor),
+ std::unique_ptr<CdmContextRef> cdm_context_ref)
+ : decryptor_(decryptor),
+ cdm_context_ref_(std::move(cdm_context_ref)),
weak_factory_(this) {
DVLOG(1) << __func__;
DCHECK(decryptor_);
+ // |cdm_context_ref_| could be null, in which case the owner of |this| will
+ // make sure |decryptor_| is always valid.
weak_this_ = weak_factory_.GetWeakPtr();
- binding_.set_connection_error_handler(error_handler);
}
MojoDecryptorService::~MojoDecryptorService() {
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.h b/chromium/media/mojo/services/mojo_decryptor_service.h
index 879456b1f35..17e82580997 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.h
+++ b/chromium/media/mojo/services/mojo_decryptor_service.h
@@ -13,30 +13,34 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
#include "media/mojo/interfaces/decryptor.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
-#include "mojo/public/cpp/bindings/binding.h"
namespace media {
class DecoderBuffer;
+class MojoCdmServiceContext;
class MojoDecoderBufferReader;
class MojoDecoderBufferWriter;
-// A mojom::Decryptor implementation. This object is owned by the creator,
-// and uses a weak binding across the mojo interface.
+// A mojom::Decryptor implementation that proxies decryptor calls to a
+// media::Decryptor.
class MEDIA_MOJO_EXPORT MojoDecryptorService : public mojom::Decryptor {
public:
using StreamType = media::Decryptor::StreamType;
using Status = media::Decryptor::Status;
- // Constructs a MojoDecryptorService and binds it to the |request|.
- // |error_handler| will be called if a connection error occurs.
- // Caller must ensure that |decryptor| outlives |this|.
+ static std::unique_ptr<MojoDecryptorService> Create(
+ int cdm_id,
+ MojoCdmServiceContext* mojo_cdm_service_context);
+
+ // If |cdm_context_ref| is null, caller must ensure that |decryptor| outlives
+ // |this|. Otherwise, |decryptor| is guaranteed to be valid as long as
+ // |cdm_context_ref| is held.
MojoDecryptorService(media::Decryptor* decryptor,
- mojo::InterfaceRequest<mojom::Decryptor> request,
- const base::Closure& error_handler);
+ std::unique_ptr<CdmContextRef> cdm_context_ref);
~MojoDecryptorService() final;
@@ -93,9 +97,6 @@ class MEDIA_MOJO_EXPORT MojoDecryptorService : public mojom::Decryptor {
// Returns audio/video buffer reader according to the |stream_type|.
MojoDecoderBufferReader* GetBufferReader(StreamType stream_type) const;
- // A weak binding is used to connect to the MojoDecryptor.
- mojo::Binding<mojom::Decryptor> binding_;
-
// Helper classes to receive encrypted DecoderBuffer from the client.
std::unique_ptr<MojoDecoderBufferReader> audio_buffer_reader_;
std::unique_ptr<MojoDecoderBufferReader> video_buffer_reader_;
@@ -106,6 +107,10 @@ class MEDIA_MOJO_EXPORT MojoDecryptorService : public mojom::Decryptor {
media::Decryptor* decryptor_;
+ // Holds the CdmContextRef to keep the CdmContext alive for the lifetime of
+ // the |decryptor_|.
+ std::unique_ptr<CdmContextRef> cdm_context_ref_;
+
base::WeakPtr<MojoDecryptorService> weak_this_;
base::WeakPtrFactory<MojoDecryptorService> weak_factory_;
diff --git a/chromium/media/mojo/services/mojo_jpeg_decode_accelerator_service_unittest.cc b/chromium/media/mojo/services/mojo_jpeg_decode_accelerator_service_unittest.cc
index 4e7fc0d33f2..d12bf86edf3 100644
--- a/chromium/media/mojo/services/mojo_jpeg_decode_accelerator_service_unittest.cc
+++ b/chromium/media/mojo/services/mojo_jpeg_decode_accelerator_service_unittest.cc
@@ -78,7 +78,7 @@ TEST_F(MojoJpegDecodeAcceleratorServiceTest, InitializeAndDecode) {
kArbitraryBitstreamBufferId,
base::SharedMemory::DuplicateHandle(shm.handle()),
kInputBufferSizeInBytes);
- bitstream_buffer.SetDecryptConfig(DecryptConfig(kKeyId, kIv, subsamples));
+ bitstream_buffer.SetDecryptionSettings(kKeyId, kIv, subsamples);
jpeg_decoder->Decode(
bitstream_buffer, kDummyFrameCodedSize, std::move(output_frame_handle),
diff --git a/chromium/media/mojo/services/mojo_jpeg_encode_accelerator_service.cc b/chromium/media/mojo/services/mojo_jpeg_encode_accelerator_service.cc
index 25b67848d06..41ec4400eba 100644
--- a/chromium/media/mojo/services/mojo_jpeg_encode_accelerator_service.cc
+++ b/chromium/media/mojo/services/mojo_jpeg_encode_accelerator_service.cc
@@ -145,8 +145,11 @@ void MojoJpegEncodeAcceleratorService::EncodeWithFD(
media::BitstreamBuffer output_buffer(buffer_id, output_shm_handle,
output_buffer_size);
- media::BitstreamBuffer exif_buffer(buffer_id, exif_shm_handle,
- exif_buffer_size);
+ std::unique_ptr<media::BitstreamBuffer> exif_buffer;
+ if (exif_buffer_size > 0) {
+ exif_buffer = std::make_unique<media::BitstreamBuffer>(
+ buffer_id, exif_shm_handle, exif_buffer_size);
+ }
gfx::Size coded_size(coded_size_width, coded_size_height);
if (encode_cb_map_.find(buffer_id) != encode_cb_map_.end()) {
@@ -187,7 +190,7 @@ void MojoJpegEncodeAcceleratorService::EncodeWithFD(
base::Passed(&input_shm)));
DCHECK(accelerator_);
- accelerator_->Encode(frame, kJpegQuality, &exif_buffer, output_buffer);
+ accelerator_->Encode(frame, kJpegQuality, exif_buffer.get(), output_buffer);
#else
NOTREACHED();
#endif
diff --git a/chromium/media/mojo/services/mojo_media_client.cc b/chromium/media/mojo/services/mojo_media_client.cc
index 1bdf6e16971..83ef569ff33 100644
--- a/chromium/media/mojo/services/mojo_media_client.cc
+++ b/chromium/media/mojo/services/mojo_media_client.cc
@@ -6,12 +6,10 @@
#include "base/single_thread_task_runner.h"
#include "media/base/audio_decoder.h"
-#include "media/base/audio_renderer_sink.h"
#include "media/base/cdm_factory.h"
#include "media/base/media_log.h"
-#include "media/base/renderer_factory.h"
+#include "media/base/renderer.h"
#include "media/base/video_decoder.h"
-#include "media/base/video_renderer_sink.h"
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
#include "media/cdm/cdm_proxy.h"
@@ -34,25 +32,18 @@ std::unique_ptr<VideoDecoder> MojoMediaClient::CreateVideoDecoder(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
mojom::CommandBufferIdPtr command_buffer_id,
- RequestOverlayInfoCB request_overlay_info_cb) {
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) {
return nullptr;
}
-scoped_refptr<AudioRendererSink> MojoMediaClient::CreateAudioRendererSink(
+std::unique_ptr<Renderer> MojoMediaClient::CreateRenderer(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
const std::string& audio_device_id) {
return nullptr;
}
-std::unique_ptr<VideoRendererSink> MojoMediaClient::CreateVideoRendererSink(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner) {
- return nullptr;
-}
-
-std::unique_ptr<RendererFactory> MojoMediaClient::CreateRendererFactory(
- MediaLog* media_log) {
- return nullptr;
-}
-
std::unique_ptr<CdmFactory> MojoMediaClient::CreateCdmFactory(
service_manager::mojom::InterfaceProvider* host_interfaces) {
return nullptr;
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 9e84464a48d..47c8b9712d6 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -18,25 +18,27 @@
namespace base {
class SingleThreadTaskRunner;
-}
+} // namespace base
+
+namespace gfx {
+class ColorSpace;
+} // namespace gfx
namespace service_manager {
class Connector;
namespace mojom {
class InterfaceProvider;
-}
+} // namespace mojom
} // namespace service_manager
namespace media {
class AudioDecoder;
-class AudioRendererSink;
class CdmFactory;
class CdmProxy;
class MediaLog;
-class RendererFactory;
+class Renderer;
class VideoDecoder;
-class VideoRendererSink;
class MEDIA_MOJO_EXPORT MojoMediaClient {
public:
@@ -56,22 +58,16 @@ class MEDIA_MOJO_EXPORT MojoMediaClient {
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
mojom::CommandBufferIdPtr command_buffer_id,
- RequestOverlayInfoCB request_overlay_info_cb);
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space);
- // Returns the output sink used for rendering audio on |audio_device_id|.
- // May be null if the RendererFactory doesn't need an audio sink.
- virtual scoped_refptr<AudioRendererSink> CreateAudioRendererSink(
+ // Returns the Renderer to be used by MojoRendererService.
+ // TODO(hubbe): Find out whether we should pass in |target_color_space| here.
+ virtual std::unique_ptr<Renderer> CreateRenderer(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
const std::string& audio_device_id);
- // Returns the output sink used for rendering video.
- // May be null if the RendererFactory doesn't need a video sink.
- virtual std::unique_ptr<VideoRendererSink> CreateVideoRendererSink(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
-
- // Returns the RendererFactory to be used by MojoRendererService.
- virtual std::unique_ptr<RendererFactory> CreateRendererFactory(
- MediaLog* media_log);
-
// Returns the CdmFactory to be used by MojoCdmService. |host_interfaces| can
// be used to request interfaces provided remotely by the host. It may be a
// nullptr if the host chose not to bind the InterfacePtr.
diff --git a/chromium/media/mojo/services/mojo_media_log.cc b/chromium/media/mojo/services/mojo_media_log.cc
index f13f13ea963..30941d4bab8 100644
--- a/chromium/media/mojo/services/mojo_media_log.cc
+++ b/chromium/media/mojo/services/mojo_media_log.cc
@@ -8,9 +8,8 @@
namespace media {
-// TODO(sandersd): Do we need to respond to the channel closing?
MojoMediaLog::MojoMediaLog(
- mojo::AssociatedInterfacePtr<mojom::MediaLog> remote_media_log)
+ scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log)
: remote_media_log_(std::move(remote_media_log)) {
DVLOG(1) << __func__;
}
@@ -22,7 +21,7 @@ MojoMediaLog::~MojoMediaLog() {
void MojoMediaLog::AddEvent(std::unique_ptr<MediaLogEvent> event) {
DVLOG(1) << __func__;
DCHECK(event);
- remote_media_log_->AddEvent(*event);
+ (**remote_media_log_).AddEvent(*event);
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_media_log.h b/chromium/media/mojo/services/mojo_media_log.h
index bf46c381486..78e526f359a 100644
--- a/chromium/media/mojo/services/mojo_media_log.h
+++ b/chromium/media/mojo/services/mojo_media_log.h
@@ -8,9 +8,9 @@
#include <memory>
#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
#include "media/base/media_log.h"
#include "media/mojo/interfaces/media_log.mojom.h"
-#include "mojo/public/cpp/bindings/associated_interface_ptr.h"
namespace media {
@@ -18,14 +18,14 @@ class MojoMediaLog final : public MediaLog {
public:
// TODO(sandersd): Template on Ptr type to support non-associated.
explicit MojoMediaLog(
- mojo::AssociatedInterfacePtr<mojom::MediaLog> remote_media_log);
+ scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log);
~MojoMediaLog() final;
// MediaLog implementation.
void AddEvent(std::unique_ptr<MediaLogEvent> event) override;
private:
- mojo::AssociatedInterfacePtr<mojom::MediaLog> remote_media_log_;
+ scoped_refptr<mojom::ThreadSafeMediaLogAssociatedPtr> remote_media_log_;
DISALLOW_COPY_AND_ASSIGN(MojoMediaLog);
};
diff --git a/chromium/media/mojo/services/mojo_renderer_service.cc b/chromium/media/mojo/services/mojo_renderer_service.cc
index 7097a77d07a..f385149ccb6 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.cc
+++ b/chromium/media/mojo/services/mojo_renderer_service.cc
@@ -9,11 +9,9 @@
#include "base/bind.h"
#include "base/memory/ptr_util.h"
#include "base/optional.h"
-#include "media/base/audio_renderer_sink.h"
#include "media/base/cdm_context.h"
#include "media/base/media_url_demuxer.h"
#include "media/base/renderer.h"
-#include "media/base/video_renderer_sink.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/services/media_resource_shim.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
@@ -36,14 +34,12 @@ const int kTimeUpdateIntervalMs = 50;
// static
mojo::StrongBindingPtr<mojom::Renderer> MojoRendererService::Create(
MojoCdmServiceContext* mojo_cdm_service_context,
- scoped_refptr<AudioRendererSink> audio_sink,
- std::unique_ptr<VideoRendererSink> video_sink,
std::unique_ptr<media::Renderer> renderer,
const InitiateSurfaceRequestCB& initiate_surface_request_cb,
mojo::InterfaceRequest<mojom::Renderer> request) {
- MojoRendererService* service = new MojoRendererService(
- mojo_cdm_service_context, std::move(audio_sink), std::move(video_sink),
- std::move(renderer), initiate_surface_request_cb);
+ MojoRendererService* service =
+ new MojoRendererService(mojo_cdm_service_context, std::move(renderer),
+ initiate_surface_request_cb);
mojo::StrongBindingPtr<mojom::Renderer> binding =
mojo::MakeStrongBinding<mojom::Renderer>(base::WrapUnique(service),
@@ -54,27 +50,13 @@ mojo::StrongBindingPtr<mojom::Renderer> MojoRendererService::Create(
return binding;
}
-// static
-mojo::StrongBindingPtr<mojom::Renderer> MojoRendererService::Create(
- std::unique_ptr<media::Renderer> renderer,
- const InitiateSurfaceRequestCB& initiate_surface_request_cb,
- mojo::InterfaceRequest<mojom::Renderer> request) {
- return MojoRendererService::Create(
- nullptr, nullptr, nullptr, std::move(renderer),
- initiate_surface_request_cb, std::move(request));
-}
-
MojoRendererService::MojoRendererService(
MojoCdmServiceContext* mojo_cdm_service_context,
- scoped_refptr<AudioRendererSink> audio_sink,
- std::unique_ptr<VideoRendererSink> video_sink,
std::unique_ptr<media::Renderer> renderer,
InitiateSurfaceRequestCB initiate_surface_request_cb)
: mojo_cdm_service_context_(mojo_cdm_service_context),
state_(STATE_UNINITIALIZED),
playback_rate_(0),
- audio_sink_(std::move(audio_sink)),
- video_sink_(std::move(video_sink)),
renderer_(std::move(renderer)),
initiate_surface_request_cb_(initiate_surface_request_cb),
weak_factory_(this) {
@@ -281,9 +263,8 @@ void MojoRendererService::OnFlushCompleted(FlushCallback callback) {
std::move(callback).Run();
}
-void MojoRendererService::OnCdmAttached(
- base::OnceCallback<void(bool)> callback,
- bool success) {
+void MojoRendererService::OnCdmAttached(base::OnceCallback<void(bool)> callback,
+ bool success) {
DVLOG(1) << __func__ << "(" << success << ")";
if (!success)
diff --git a/chromium/media/mojo/services/mojo_renderer_service.h b/chromium/media/mojo/services/mojo_renderer_service.h
index 8303d772c7b..49a192a55d3 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.h
+++ b/chromium/media/mojo/services/mojo_renderer_service.h
@@ -25,12 +25,10 @@
namespace media {
-class AudioRendererSink;
class CdmContextRef;
class MediaResourceShim;
class MojoCdmServiceContext;
class Renderer;
-class VideoRendererSink;
// A mojom::Renderer implementation that use a media::Renderer to render
// media streams.
@@ -43,21 +41,6 @@ class MEDIA_MOJO_EXPORT MojoRendererService : public mojom::Renderer,
// which is safely accessible via the returned StrongBindingPtr.
static mojo::StrongBindingPtr<mojom::Renderer> Create(
MojoCdmServiceContext* mojo_cdm_service_context,
- scoped_refptr<AudioRendererSink> audio_sink,
- std::unique_ptr<VideoRendererSink> video_sink,
- std::unique_ptr<media::Renderer> renderer,
- const InitiateSurfaceRequestCB& initiate_surface_request_cb,
- mojo::InterfaceRequest<mojom::Renderer> request);
-
- // Helper function to bind MojoRendererService with a StrongBinding,
- // which is safely accessible via the returned StrongBindingPtr.
- // NOTE: Some media::Renderers don't need Audio/VideoRendererSinks, and don't
- // support encrypted content. For example, MediaPlayerRenderer instead uses a
- // StreamTextureWrapper, and FlingingRenderer does not need to render any
- // video on the local device. This function serves the same purpose as the one
- // above, but without forcing classes to define the forward declared
- // AudioRendererSink, VideoRendererSink and MojoCdmServiceContext.
- static mojo::StrongBindingPtr<mojom::Renderer> Create(
std::unique_ptr<media::Renderer> renderer,
const InitiateSurfaceRequestCB& initiate_surface_request_cb,
mojo::InterfaceRequest<mojom::Renderer> request);
@@ -65,8 +48,6 @@ class MEDIA_MOJO_EXPORT MojoRendererService : public mojom::Renderer,
// |mojo_cdm_service_context| can be used to find the CDM to support
// encrypted media. If null, encrypted media is not supported.
MojoRendererService(MojoCdmServiceContext* mojo_cdm_service_context,
- scoped_refptr<AudioRendererSink> audio_sink,
- std::unique_ptr<VideoRendererSink> video_sink,
std::unique_ptr<media::Renderer> renderer,
InitiateSurfaceRequestCB initiate_surface_request_cb);
@@ -150,14 +131,8 @@ class MEDIA_MOJO_EXPORT MojoRendererService : public mojom::Renderer,
// the |renderer_|.
std::unique_ptr<CdmContextRef> cdm_context_ref_;
- // Audio and Video sinks.
- // May be null if underlying |renderer_| does not use them.
- scoped_refptr<AudioRendererSink> audio_sink_;
- std::unique_ptr<VideoRendererSink> video_sink_;
-
// Note: Destroy |renderer_| first to avoid access violation into other
- // members, e.g. |media_resource_|, |cdm_|, |audio_sink_|, and
- // |video_sink_|.
+ // members, e.g. |media_resource_| and |cdm_|.
// Must use "media::" because "Renderer" is ambiguous.
std::unique_ptr<media::Renderer> renderer_;
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index a68e7701ffb..ccfd012bcbc 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -8,6 +8,7 @@
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
+#include "base/metrics/histogram_macros.h"
#include "base/optional.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/cdm_context.h"
@@ -29,6 +30,13 @@ namespace media {
namespace {
+// Number of active (Decode() was called at least once)
+// MojoVideoDecoderService instances that are alive.
+//
+// Since MojoVideoDecoderService is constructed only by the MediaFactory,
+// this will only ever be accessed from a single thread.
+static int32_t g_num_active_mvd_instances = 0;
+
class StaticSyncTokenClient : public VideoFrame::SyncTokenClient {
public:
explicit StaticSyncTokenClient(const gpu::SyncToken& sync_token)
@@ -66,7 +74,7 @@ class VideoFrameHandleReleaserImpl final
// VideoFrame.
base::UnguessableToken RegisterVideoFrame(scoped_refptr<VideoFrame> frame) {
base::UnguessableToken token = base::UnguessableToken::Create();
- DVLOG(2) << __func__ << " => " << token.ToString();
+ DVLOG(3) << __func__ << " => " << token.ToString();
video_frames_[token] = std::move(frame);
return token;
}
@@ -74,7 +82,7 @@ class VideoFrameHandleReleaserImpl final
// mojom::MojoVideoFrameHandleReleaser implementation
void ReleaseVideoFrame(const base::UnguessableToken& release_token,
const gpu::SyncToken& release_sync_token) final {
- DVLOG(2) << __func__ << "(" << release_token.ToString() << ")";
+ DVLOG(3) << __func__ << "(" << release_token.ToString() << ")";
auto it = video_frames_.find(release_token);
if (it == video_frames_.end()) {
mojo::ReportBadMessage("Unknown |release_token|.");
@@ -98,14 +106,17 @@ MojoVideoDecoderService::MojoVideoDecoderService(
: mojo_media_client_(mojo_media_client),
mojo_cdm_service_context_(mojo_cdm_service_context),
weak_factory_(this) {
- DVLOG(3) << __func__;
+ DVLOG(1) << __func__;
DCHECK(mojo_media_client_);
DCHECK(mojo_cdm_service_context_);
weak_this_ = weak_factory_.GetWeakPtr();
}
MojoVideoDecoderService::~MojoVideoDecoderService() {
- DVLOG(3) << __func__;
+ DVLOG(1) << __func__;
+
+ if (is_active_instance_)
+ g_num_active_mvd_instances--;
}
void MojoVideoDecoderService::Construct(
@@ -113,7 +124,8 @@ void MojoVideoDecoderService::Construct(
mojom::MediaLogAssociatedPtrInfo media_log,
mojom::VideoFrameHandleReleaserRequest video_frame_handle_releaser,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
- mojom::CommandBufferIdPtr command_buffer_id) {
+ mojom::CommandBufferIdPtr command_buffer_id,
+ const gfx::ColorSpace& target_color_space) {
DVLOG(1) << __func__;
if (decoder_) {
@@ -124,9 +136,9 @@ void MojoVideoDecoderService::Construct(
client_.Bind(std::move(client));
- mojom::MediaLogAssociatedPtr media_log_ptr;
- media_log_ptr.Bind(std::move(media_log));
- media_log_ = std::make_unique<MojoMediaLog>(std::move(media_log_ptr));
+ media_log_ = std::make_unique<MojoMediaLog>(
+ mojom::ThreadSafeMediaLogAssociatedPtr::Create(
+ std::move(media_log), base::ThreadTaskRunnerHandle::Get()));
video_frame_handle_releaser_ =
mojo::MakeStrongBinding(std::make_unique<VideoFrameHandleReleaserImpl>(),
@@ -139,14 +151,16 @@ void MojoVideoDecoderService::Construct(
base::ThreadTaskRunnerHandle::Get(), media_log_.get(),
std::move(command_buffer_id),
base::Bind(&MojoVideoDecoderService::OnDecoderRequestedOverlayInfo,
- weak_this_));
+ weak_this_),
+ target_color_space);
}
void MojoVideoDecoderService::Initialize(const VideoDecoderConfig& config,
bool low_delay,
int32_t cdm_id,
InitializeCallback callback) {
- DVLOG(1) << __func__;
+ DVLOG(1) << __func__ << " config = " << config.AsHumanReadableString()
+ << ", cdm_id = " << cdm_id;
if (!decoder_) {
std::move(callback).Run(false, false, 1);
@@ -155,7 +169,7 @@ void MojoVideoDecoderService::Initialize(const VideoDecoderConfig& config,
// Get CdmContext from cdm_id if the stream is encrypted.
CdmContext* cdm_context = nullptr;
- if (config.is_encrypted()) {
+ if (cdm_id != CdmContext::kInvalidCdmId) {
cdm_context_ref_ = mojo_cdm_service_context_->GetCdmContextRef(cdm_id);
if (!cdm_context_ref_) {
DVLOG(1) << "CdmContextRef not found for CDM id: " << cdm_id;
@@ -173,40 +187,46 @@ void MojoVideoDecoderService::Initialize(const VideoDecoderConfig& config,
base::Passed(&callback)),
base::BindRepeating(&MojoVideoDecoderService::OnDecoderOutput,
weak_this_),
- media::VideoDecoder::WaitingForDecryptionKeyCB());
+ base::NullCallback());
}
void MojoVideoDecoderService::Decode(mojom::DecoderBufferPtr buffer,
DecodeCallback callback) {
- DVLOG(2) << __func__ << " pts=" << buffer->timestamp.InMilliseconds();
+ DVLOG(3) << __func__ << " pts=" << buffer->timestamp.InMilliseconds();
if (!decoder_) {
std::move(callback).Run(DecodeStatus::DECODE_ERROR);
return;
}
+ if (!is_active_instance_) {
+ is_active_instance_ = true;
+ g_num_active_mvd_instances++;
+ UMA_HISTOGRAM_EXACT_LINEAR("Media.MojoVideoDecoder.ActiveInstances",
+ g_num_active_mvd_instances, 64);
+ }
+
mojo_decoder_buffer_reader_->ReadDecoderBuffer(
std::move(buffer), base::BindOnce(&MojoVideoDecoderService::OnReaderRead,
weak_this_, std::move(callback)));
}
void MojoVideoDecoderService::Reset(ResetCallback callback) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
if (!decoder_) {
std::move(callback).Run();
return;
}
- // Flush the reader so that pending decodes will be dispatches first.
+ // Flush the reader so that pending decodes will be dispatched first.
mojo_decoder_buffer_reader_->Flush(
base::Bind(&MojoVideoDecoderService::OnReaderFlushed, weak_this_,
base::Passed(&callback)));
}
-void MojoVideoDecoderService::OnDecoderInitialized(
- InitializeCallback callback,
- bool success) {
+void MojoVideoDecoderService::OnDecoderInitialized(InitializeCallback callback,
+ bool success) {
DVLOG(1) << __func__;
DCHECK(decoder_);
@@ -239,21 +259,26 @@ void MojoVideoDecoderService::OnReaderFlushed(ResetCallback callback) {
void MojoVideoDecoderService::OnDecoderDecoded(DecodeCallback callback,
DecodeStatus status) {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__;
std::move(callback).Run(status);
}
void MojoVideoDecoderService::OnDecoderReset(ResetCallback callback) {
- DVLOG(1) << __func__;
+ DVLOG(2) << __func__;
std::move(callback).Run();
}
void MojoVideoDecoderService::OnDecoderOutput(
const scoped_refptr<VideoFrame>& frame) {
- DVLOG(2) << __func__;
+ DVLOG(3) << __func__;
DCHECK(client_);
DCHECK(decoder_);
+ // All MojoVideoDecoder-based decoders are hardware decoders. If you're the
+ // first to implement an out-of-process decoder that is not power efficent,
+ // you can remove this DCHECK.
+ DCHECK(frame->metadata()->IsTrue(VideoFrameMetadata::POWER_EFFICIENT));
+
base::Optional<base::UnguessableToken> release_token;
if (frame->HasReleaseMailboxCB() && video_frame_handle_releaser_) {
// |video_frame_handle_releaser_| is explicitly constructed with a
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.h b/chromium/media/mojo/services/mojo_video_decoder_service.h
index 739fff0b124..05aa82518f6 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.h
@@ -46,7 +46,8 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
mojom::MediaLogAssociatedPtrInfo media_log,
mojom::VideoFrameHandleReleaserRequest video_frame_handle_releaser,
mojo::ScopedDataPipeConsumerHandle decoder_buffer_pipe,
- mojom::CommandBufferIdPtr command_buffer_id) final;
+ mojom::CommandBufferIdPtr command_buffer_id,
+ const gfx::ColorSpace& target_color_space) final;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
int32_t cdm_id,
@@ -75,6 +76,9 @@ class MEDIA_MOJO_EXPORT MojoVideoDecoderService final
bool restart_for_transitions,
const ProvideOverlayInfoCB& provide_overlay_info_cb);
+ // Whether this instance is active (Decode() was called at least once).
+ bool is_active_instance_ = false;
+
// Decoder factory.
MojoMediaClient* mojo_media_client_;
diff --git a/chromium/media/mojo/services/test_mojo_media_client.cc b/chromium/media/mojo/services/test_mojo_media_client.cc
index d5def90093f..6c3ce3d4427 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.cc
+++ b/chromium/media/mojo/services/test_mojo_media_client.cc
@@ -6,7 +6,6 @@
#include <memory>
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/audio/audio_device_description.h"
@@ -19,9 +18,16 @@
#include "media/base/null_video_sink.h"
#include "media/base/renderer_factory.h"
#include "media/cdm/default_cdm_factory.h"
+#include "media/renderers/default_decoder_factory.h"
#include "media/renderers/default_renderer_factory.h"
#include "media/video/gpu_video_accelerator_factories.h"
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+#include "media/cdm/cdm_paths.h" // nogncheck
+#include "media/cdm/cdm_proxy.h" // nogncheck
+#include "media/cdm/library_cdm/clear_key_cdm/clear_key_cdm_proxy.h" // nogncheck
+#endif
+
namespace media {
TestMojoMediaClient::TestMojoMediaClient() = default;
@@ -50,23 +56,40 @@ void TestMojoMediaClient::Initialize(
}
}
-scoped_refptr<AudioRendererSink> TestMojoMediaClient::CreateAudioRendererSink(
+std::unique_ptr<Renderer> TestMojoMediaClient::CreateRenderer(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
const std::string& /* audio_device_id */) {
- return new AudioOutputStreamSink();
-}
+ // If called the first time, do one time initialization.
+ if (!decoder_factory_) {
+ decoder_factory_.reset(new media::DefaultDecoderFactory(nullptr));
+ }
-std::unique_ptr<VideoRendererSink> TestMojoMediaClient::CreateVideoRendererSink(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner) {
- return std::make_unique<NullVideoSink>(
+ if (!renderer_factory_) {
+ renderer_factory_ = std::make_unique<DefaultRendererFactory>(
+ media_log, decoder_factory_.get(),
+ DefaultRendererFactory::GetGpuFactoriesCB());
+ }
+
+ // We cannot share AudioOutputStreamSink or NullVideoSink among different
+ // RendererImpls. Thus create one for each Renderer creation.
+ auto audio_sink = base::MakeRefCounted<AudioOutputStreamSink>();
+ auto video_sink = std::make_unique<NullVideoSink>(
false, base::TimeDelta::FromSecondsD(1.0 / 60),
NullVideoSink::NewFrameCB(), task_runner);
-}
+ auto* video_sink_ptr = video_sink.get();
-std::unique_ptr<RendererFactory> TestMojoMediaClient::CreateRendererFactory(
- MediaLog* media_log) {
- return std::make_unique<DefaultRendererFactory>(
- media_log, nullptr, DefaultRendererFactory::GetGpuFactoriesCB());
-}
+ // Hold created sinks since DefaultRendererFactory only takes raw pointers to
+ // the sinks. We are not cleaning up them even after a created Renderer is
+ // destroyed. But this is fine since this class is only used for tests.
+ audio_sinks_.push_back(audio_sink);
+ video_sinks_.push_back(std::move(video_sink));
+
+ return renderer_factory_->CreateRenderer(
+ task_runner, task_runner, audio_sink.get(), video_sink_ptr,
+ RequestOverlayInfoCB(), gfx::ColorSpace());
+
+} // namespace media
std::unique_ptr<CdmFactory> TestMojoMediaClient::CreateCdmFactory(
service_manager::mojom::InterfaceProvider* /* host_interfaces */) {
@@ -74,4 +97,15 @@ std::unique_ptr<CdmFactory> TestMojoMediaClient::CreateCdmFactory(
return std::make_unique<DefaultCdmFactory>();
}
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+std::unique_ptr<CdmProxy> TestMojoMediaClient::CreateCdmProxy(
+ const std::string& cdm_guid) {
+ DVLOG(1) << __func__ << ": cdm_guid = " << cdm_guid;
+ if (cdm_guid == kClearKeyCdmGuid)
+ return std::make_unique<ClearKeyCdmProxy>();
+
+ return nullptr;
+}
+#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
+
} // namespace media
diff --git a/chromium/media/mojo/services/test_mojo_media_client.h b/chromium/media/mojo/services/test_mojo_media_client.h
index f593ed8ded2..d5adbeb790e 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.h
+++ b/chromium/media/mojo/services/test_mojo_media_client.h
@@ -6,24 +6,22 @@
#define MEDIA_MOJO_SERVICES_TEST_MOJO_MEDIA_CLIENT_H_
#include <memory>
+#include <vector>
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "media/media_buildflags.h"
#include "media/mojo/services/mojo_media_client.h"
-namespace base {
-class SingleThreadTaskRunner;
-}
-
namespace media {
class AudioManager;
class AudioRendererSink;
-class MediaLog;
+class DecoderFactory;
class RendererFactory;
class VideoRendererSink;
-// Default MojoMediaClient for MediaService.
+// Test MojoMediaClient for MediaService.
class TestMojoMediaClient : public MojoMediaClient {
public:
TestMojoMediaClient();
@@ -31,17 +29,22 @@ class TestMojoMediaClient : public MojoMediaClient {
// MojoMediaClient implementation.
void Initialize(service_manager::Connector* connector) final;
- scoped_refptr<AudioRendererSink> CreateAudioRendererSink(
+ std::unique_ptr<Renderer> CreateRenderer(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
const std::string& audio_device_id) final;
- std::unique_ptr<VideoRendererSink> CreateVideoRendererSink(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner) final;
- std::unique_ptr<RendererFactory> CreateRendererFactory(
- MediaLog* media_log) final;
std::unique_ptr<CdmFactory> CreateCdmFactory(
service_manager::mojom::InterfaceProvider* /* host_interfaces */) final;
+#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
+ std::unique_ptr<CdmProxy> CreateCdmProxy(const std::string& cdm_guid) final;
+#endif // BUILDFLAG(ENABLE_LIBRARY_CDMS)
private:
std::unique_ptr<AudioManager> audio_manager_;
+ std::unique_ptr<DecoderFactory> decoder_factory_;
+ std::unique_ptr<RendererFactory> renderer_factory_;
+ std::vector<scoped_refptr<AudioRendererSink>> audio_sinks_;
+ std::vector<std::unique_ptr<VideoRendererSink>> video_sinks_;
DISALLOW_COPY_AND_ASSIGN(TestMojoMediaClient);
};
diff --git a/chromium/media/mojo/services/watch_time_recorder.cc b/chromium/media/mojo/services/watch_time_recorder.cc
index 80816bd8c15..e34b0e3aeb9 100644
--- a/chromium/media/mojo/services/watch_time_recorder.cc
+++ b/chromium/media/mojo/services/watch_time_recorder.cc
@@ -85,74 +85,6 @@ static VideoDecoderName ConvertVideoDecoderNameToEnum(const std::string& name) {
return VideoDecoderName::kUnknown;
}
-static bool ShouldReportToUma(WatchTimeKey key) {
- switch (key) {
- // These keys are not currently reported to UMA, but are used for UKM metric
- // calculations. To report them in the future just add the keys to report to
- // the lower list and add histograms.xml entries for them.
- case WatchTimeKey::kVideoAll:
- case WatchTimeKey::kVideoMse:
- case WatchTimeKey::kVideoEme:
- case WatchTimeKey::kVideoSrc:
- case WatchTimeKey::kVideoBattery:
- case WatchTimeKey::kVideoAc:
- case WatchTimeKey::kVideoDisplayFullscreen:
- case WatchTimeKey::kVideoDisplayInline:
- case WatchTimeKey::kVideoDisplayPictureInPicture:
- case WatchTimeKey::kVideoEmbeddedExperience:
- case WatchTimeKey::kVideoNativeControlsOn:
- case WatchTimeKey::kVideoNativeControlsOff:
- case WatchTimeKey::kVideoBackgroundAll:
- case WatchTimeKey::kVideoBackgroundMse:
- case WatchTimeKey::kVideoBackgroundEme:
- case WatchTimeKey::kVideoBackgroundSrc:
- case WatchTimeKey::kVideoBackgroundBattery:
- case WatchTimeKey::kVideoBackgroundAc:
- case WatchTimeKey::kVideoBackgroundEmbeddedExperience:
- return false;
-
- case WatchTimeKey::kAudioAll:
- case WatchTimeKey::kAudioMse:
- case WatchTimeKey::kAudioEme:
- case WatchTimeKey::kAudioSrc:
- case WatchTimeKey::kAudioBattery:
- case WatchTimeKey::kAudioAc:
- case WatchTimeKey::kAudioEmbeddedExperience:
- case WatchTimeKey::kAudioNativeControlsOn:
- case WatchTimeKey::kAudioNativeControlsOff:
- case WatchTimeKey::kAudioBackgroundAll:
- case WatchTimeKey::kAudioBackgroundMse:
- case WatchTimeKey::kAudioBackgroundEme:
- case WatchTimeKey::kAudioBackgroundSrc:
- case WatchTimeKey::kAudioBackgroundBattery:
- case WatchTimeKey::kAudioBackgroundAc:
- case WatchTimeKey::kAudioBackgroundEmbeddedExperience:
- case WatchTimeKey::kAudioVideoAll:
- case WatchTimeKey::kAudioVideoMse:
- case WatchTimeKey::kAudioVideoEme:
- case WatchTimeKey::kAudioVideoSrc:
- case WatchTimeKey::kAudioVideoBattery:
- case WatchTimeKey::kAudioVideoAc:
- case WatchTimeKey::kAudioVideoDisplayFullscreen:
- case WatchTimeKey::kAudioVideoDisplayInline:
- case WatchTimeKey::kAudioVideoDisplayPictureInPicture:
- case WatchTimeKey::kAudioVideoEmbeddedExperience:
- case WatchTimeKey::kAudioVideoNativeControlsOn:
- case WatchTimeKey::kAudioVideoNativeControlsOff:
- case WatchTimeKey::kAudioVideoBackgroundAll:
- case WatchTimeKey::kAudioVideoBackgroundMse:
- case WatchTimeKey::kAudioVideoBackgroundEme:
- case WatchTimeKey::kAudioVideoBackgroundSrc:
- case WatchTimeKey::kAudioVideoBackgroundBattery:
- case WatchTimeKey::kAudioVideoBackgroundAc:
- case WatchTimeKey::kAudioVideoBackgroundEmbeddedExperience:
- return true;
- }
-
- NOTREACHED();
- return false;
-}
-
static void RecordWatchTimeInternal(
base::StringPiece key,
base::TimeDelta value,
@@ -236,9 +168,10 @@ void WatchTimeRecorder::FinalizeWatchTime(
// Report only certain keys to UMA and only if they have at met the minimum
// watch time requirement. Otherwise, for SRC/MSE/EME keys, log them to the
// discard metric.
- if (ShouldReportToUma(kv.first)) {
+ base::StringPiece key_str = ConvertWatchTimeKeyToStringForUma(kv.first);
+ if (!key_str.empty()) {
if (kv.second >= kMinimumElapsedWatchTime) {
- RecordWatchTimeInternal(WatchTimeKeyToString(kv.first), kv.second);
+ RecordWatchTimeInternal(key_str, kv.second);
} else if (kv.second > base::TimeDelta()) {
auto it = std::find_if(extended_metrics_keys_.begin(),
extended_metrics_keys_.end(),
@@ -264,7 +197,7 @@ void WatchTimeRecorder::FinalizeWatchTime(
// Check for watch times entries that have corresponding MTBR entries and
// report the MTBR value using watch_time / |underflow_count|. Do this only
// for foreground reporters since we only have UMA keys for foreground.
- if (!properties_->is_background) {
+ if (!properties_->is_background && !properties_->is_muted) {
for (auto& mapping : extended_metrics_keys_) {
auto it = watch_time_info_.find(mapping.watch_time_key);
if (it == watch_time_info_.end() || it->second < kMinimumElapsedWatchTime)
@@ -308,11 +241,6 @@ void WatchTimeRecorder::UpdateUnderflowCount(int32_t count) {
underflow_count_ = count;
}
-// static
-bool WatchTimeRecorder::ShouldReportUmaForTesting(WatchTimeKey key) {
- return ShouldReportToUma(key);
-}
-
void WatchTimeRecorder::RecordUkmPlaybackData() {
// UKM may be unavailable in content_shell or other non-chrome/ builds; it
// may also be unavailable if browser shutdown has started; so this may be a
@@ -329,6 +257,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
builder.SetIsTopFrame(is_top_frame_);
builder.SetIsBackground(properties_->is_background);
+ builder.SetIsMuted(properties_->is_muted);
builder.SetPlayerID(player_id_);
bool recorded_all_metric = false;
@@ -336,6 +265,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
if (kv.first == WatchTimeKey::kAudioAll ||
kv.first == WatchTimeKey::kAudioBackgroundAll ||
kv.first == WatchTimeKey::kAudioVideoAll ||
+ kv.first == WatchTimeKey::kAudioVideoMutedAll ||
kv.first == WatchTimeKey::kAudioVideoBackgroundAll ||
kv.first == WatchTimeKey::kVideoAll ||
kv.first == WatchTimeKey::kVideoBackgroundAll) {
@@ -351,6 +281,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
} else if (kv.first == WatchTimeKey::kAudioAc ||
kv.first == WatchTimeKey::kAudioBackgroundAc ||
kv.first == WatchTimeKey::kAudioVideoAc ||
+ kv.first == WatchTimeKey::kAudioVideoMutedAc ||
kv.first == WatchTimeKey::kAudioVideoBackgroundAc ||
kv.first == WatchTimeKey::kVideoAc ||
kv.first == WatchTimeKey::kVideoBackgroundAc) {
@@ -358,25 +289,32 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
} else if (kv.first == WatchTimeKey::kAudioBattery ||
kv.first == WatchTimeKey::kAudioBackgroundBattery ||
kv.first == WatchTimeKey::kAudioVideoBattery ||
+ kv.first == WatchTimeKey::kAudioVideoMutedBattery ||
kv.first == WatchTimeKey::kAudioVideoBackgroundBattery ||
kv.first == WatchTimeKey::kVideoBattery ||
kv.first == WatchTimeKey::kVideoBackgroundBattery) {
builder.SetWatchTime_Battery(kv.second.InMilliseconds());
} else if (kv.first == WatchTimeKey::kAudioNativeControlsOn ||
kv.first == WatchTimeKey::kAudioVideoNativeControlsOn ||
+ kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOn ||
kv.first == WatchTimeKey::kVideoNativeControlsOn) {
builder.SetWatchTime_NativeControlsOn(kv.second.InMilliseconds());
} else if (kv.first == WatchTimeKey::kAudioNativeControlsOff ||
kv.first == WatchTimeKey::kAudioVideoNativeControlsOff ||
+ kv.first == WatchTimeKey::kAudioVideoMutedNativeControlsOff ||
kv.first == WatchTimeKey::kVideoNativeControlsOff) {
builder.SetWatchTime_NativeControlsOff(kv.second.InMilliseconds());
} else if (kv.first == WatchTimeKey::kAudioVideoDisplayFullscreen ||
+ kv.first == WatchTimeKey::kAudioVideoMutedDisplayFullscreen ||
kv.first == WatchTimeKey::kVideoDisplayFullscreen) {
builder.SetWatchTime_DisplayFullscreen(kv.second.InMilliseconds());
} else if (kv.first == WatchTimeKey::kAudioVideoDisplayInline ||
+ kv.first == WatchTimeKey::kAudioVideoMutedDisplayInline ||
kv.first == WatchTimeKey::kVideoDisplayInline) {
builder.SetWatchTime_DisplayInline(kv.second.InMilliseconds());
} else if (kv.first == WatchTimeKey::kAudioVideoDisplayPictureInPicture ||
+ kv.first ==
+ WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture ||
kv.first == WatchTimeKey::kVideoDisplayPictureInPicture) {
builder.SetWatchTime_DisplayPictureInPicture(kv.second.InMilliseconds());
}
diff --git a/chromium/media/mojo/services/watch_time_recorder.h b/chromium/media/mojo/services/watch_time_recorder.h
index ccc4330299e..82a7b670594 100644
--- a/chromium/media/mojo/services/watch_time_recorder.h
+++ b/chromium/media/mojo/services/watch_time_recorder.h
@@ -40,9 +40,6 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
void UpdateUnderflowCount(int32_t count) override;
- // Test helper method for determining if keys are not reported to UMA.
- static bool ShouldReportUmaForTesting(WatchTimeKey key);
-
private:
// Records a UKM event based on |aggregate_watch_time_info_|; only recorded
// with a complete finalize (destruction or empty FinalizeWatchTime call).
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index def3bacb95e..a9e2e0a428f 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -11,6 +11,7 @@
#include "base/hash.h"
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
+#include "base/strings/string_number_conversions.h"
#include "base/test/histogram_tester.h"
#include "base/test/test_message_loop.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -69,7 +70,7 @@ class WatchTimeRecorderTest : public testing::Test {
bool is_encrypted) {
Initialize(mojom::PlaybackProperties::New(
kUnknownAudioCodec, kUnknownVideoCodec, has_audio, has_video, false,
- is_mse, is_encrypted, false, gfx::Size(800, 600)));
+ false, is_mse, is_encrypted, false, gfx::Size(800, 600)));
}
void ExpectWatchTime(const std::vector<base::StringPiece>& keys,
@@ -77,7 +78,9 @@ class WatchTimeRecorderTest : public testing::Test {
for (int i = 0; i <= static_cast<int>(WatchTimeKey::kWatchTimeKeyMax);
++i) {
const base::StringPiece test_key =
- WatchTimeKeyToString(static_cast<WatchTimeKey>(i));
+ ConvertWatchTimeKeyToStringForUma(static_cast<WatchTimeKey>(i));
+ if (test_key.empty())
+ continue;
auto it = std::find(keys.begin(), keys.end(), test_key);
if (it == keys.end()) {
histogram_tester_->ExpectTotalCount(test_key.as_string(), 0);
@@ -161,8 +164,13 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
for (int i = 0; i <= static_cast<int>(WatchTimeKey::kWatchTimeKeyMax); ++i) {
const WatchTimeKey key = static_cast<WatchTimeKey>(i);
- SCOPED_TRACE(WatchTimeKeyToString(key));
+ auto key_str = ConvertWatchTimeKeyToStringForUma(key);
+ SCOPED_TRACE(key_str.empty() ? base::NumberToString(i)
+ : key_str.as_string());
+
+ // Values for |is_background| and |is_muted| don't matter in this test since
+ // they don't prevent the muted or background keys from being recorded.
Initialize(true, false, true, true);
wtr_->RecordWatchTime(WatchTimeKey::kWatchTimeKeyMax, kWatchTime1);
wtr_->RecordWatchTime(key, kWatchTime1);
@@ -176,10 +184,8 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
wtr_->FinalizeWatchTime({key});
base::RunLoop().RunUntilIdle();
- if (WatchTimeRecorder::ShouldReportUmaForTesting(key)) {
- const base::StringPiece key_str = WatchTimeKeyToString(key);
+ if (!key_str.empty())
ExpectWatchTime({key_str}, kWatchTime2);
- }
// These keys are only reported for a full finalize.
ExpectMtbrTime({}, base::TimeDelta());
@@ -199,6 +205,7 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioBackgroundAll:
case WatchTimeKey::kAudioVideoAll:
case WatchTimeKey::kAudioVideoBackgroundAll:
+ case WatchTimeKey::kAudioVideoMutedAll:
case WatchTimeKey::kVideoAll:
case WatchTimeKey::kVideoBackgroundAll:
ExpectUkmWatchTime({UkmEntry::kWatchTimeName}, kWatchTime2);
@@ -217,6 +224,10 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioVideoEme:
case WatchTimeKey::kAudioVideoSrc:
case WatchTimeKey::kAudioVideoEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoMutedMse:
+ case WatchTimeKey::kAudioVideoMutedEme:
+ case WatchTimeKey::kAudioVideoMutedSrc:
+ case WatchTimeKey::kAudioVideoMutedEmbeddedExperience:
case WatchTimeKey::kAudioVideoBackgroundMse:
case WatchTimeKey::kAudioVideoBackgroundEme:
case WatchTimeKey::kAudioVideoBackgroundSrc:
@@ -236,6 +247,7 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioBattery:
case WatchTimeKey::kAudioBackgroundBattery:
case WatchTimeKey::kAudioVideoBattery:
+ case WatchTimeKey::kAudioVideoMutedBattery:
case WatchTimeKey::kAudioVideoBackgroundBattery:
case WatchTimeKey::kVideoBattery:
case WatchTimeKey::kVideoBackgroundBattery:
@@ -247,24 +259,28 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioBackgroundAc:
case WatchTimeKey::kAudioVideoAc:
case WatchTimeKey::kAudioVideoBackgroundAc:
+ case WatchTimeKey::kAudioVideoMutedAc:
case WatchTimeKey::kVideoAc:
case WatchTimeKey::kVideoBackgroundAc:
ExpectUkmWatchTime({UkmEntry::kWatchTime_ACName}, kWatchTime2);
break;
case WatchTimeKey::kAudioVideoDisplayFullscreen:
+ case WatchTimeKey::kAudioVideoMutedDisplayFullscreen:
case WatchTimeKey::kVideoDisplayFullscreen:
ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayFullscreenName},
kWatchTime2);
break;
case WatchTimeKey::kAudioVideoDisplayInline:
+ case WatchTimeKey::kAudioVideoMutedDisplayInline:
case WatchTimeKey::kVideoDisplayInline:
ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayInlineName},
kWatchTime2);
break;
case WatchTimeKey::kAudioVideoDisplayPictureInPicture:
+ case WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture:
case WatchTimeKey::kVideoDisplayPictureInPicture:
ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayPictureInPictureName},
kWatchTime2);
@@ -272,6 +288,7 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioNativeControlsOn:
case WatchTimeKey::kAudioVideoNativeControlsOn:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOn:
case WatchTimeKey::kVideoNativeControlsOn:
ExpectUkmWatchTime({UkmEntry::kWatchTime_NativeControlsOnName},
kWatchTime2);
@@ -279,6 +296,7 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
case WatchTimeKey::kAudioNativeControlsOff:
case WatchTimeKey::kAudioVideoNativeControlsOff:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOff:
case WatchTimeKey::kVideoNativeControlsOff:
ExpectUkmWatchTime({UkmEntry::kWatchTime_NativeControlsOffName},
kWatchTime2);
@@ -365,9 +383,9 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
EXPECT_TRUE(test_recorder_->EntryHasMetric(entry, name));
TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
- mojom::PlaybackPropertiesPtr properties =
- mojom::PlaybackProperties::New(kCodecAAC, kCodecH264, true, true, false,
- false, false, false, gfx::Size(800, 600));
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
// Verify that UKM is reported along with the watch time.
@@ -400,6 +418,7 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
@@ -430,9 +449,9 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
}
TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
- mojom::PlaybackPropertiesPtr properties =
- mojom::PlaybackProperties::New(kCodecAAC, kCodecH264, true, true, false,
- false, false, false, gfx::Size(800, 600));
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
// Finalize everything. UKM is only recorded at destruction, so this should do
@@ -462,6 +481,7 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
test_recorder_->ExpectEntrySourceHasUrl(entry, GURL(kTestOrigin));
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
@@ -492,9 +512,9 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
- mojom::PlaybackPropertiesPtr properties =
- mojom::PlaybackProperties::New(kCodecAAC, kCodecH264, true, true, false,
- false, false, false, gfx::Size(800, 600));
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ kCodecAAC, kCodecH264, true, true, false, false, false, false, false,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(4);
@@ -509,6 +529,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
@@ -538,9 +559,9 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
- mojom::PlaybackPropertiesPtr properties =
- mojom::PlaybackProperties::New(kCodecOpus, kCodecVP9, true, true, false,
- true, true, false, gfx::Size(800, 600));
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ kCodecOpus, kCodecVP9, true, true, false, false, true, true, false,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
@@ -601,6 +622,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
EXPECT_UKM(UkmEntry::kVideoDecoderNameName, 5);
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
@@ -617,10 +639,10 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
}
}
-TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackground) {
- mojom::PlaybackPropertiesPtr properties =
- mojom::PlaybackProperties::New(kCodecAAC, kCodecH264, true, true, true,
- false, false, false, gfx::Size(800, 600));
+TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ kCodecAAC, kCodecH264, true, true, true, true, false, false, false,
+ gfx::Size(800, 600));
Initialize(properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
@@ -635,6 +657,7 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackground) {
EXPECT_UKM(UkmEntry::kWatchTimeName, kWatchTime.InMilliseconds());
EXPECT_UKM(UkmEntry::kIsBackgroundName, properties->is_background);
+ EXPECT_UKM(UkmEntry::kIsMutedName, properties->is_muted);
EXPECT_UKM(UkmEntry::kAudioCodecName, properties->audio_codec);
EXPECT_UKM(UkmEntry::kVideoCodecName, properties->video_codec);
EXPECT_UKM(UkmEntry::kHasAudioName, properties->has_audio);
diff --git a/chromium/media/muxers/webm_muxer.cc b/chromium/media/muxers/webm_muxer.cc
index bb05c845951..e281d6c3bc4 100644
--- a/chromium/media/muxers/webm_muxer.cc
+++ b/chromium/media/muxers/webm_muxer.cc
@@ -293,7 +293,9 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
DCHECK(audio_track);
DCHECK_EQ(params.sample_rate(), audio_track->sample_rate());
DCHECK_EQ(params.channels(), static_cast<int>(audio_track->channels()));
- audio_track->set_bit_depth(static_cast<uint64_t>(params.bits_per_sample()));
+
+ // Audio data is always pcm_f32le.
+ audio_track->set_bit_depth(32u);
if (audio_codec_ == kCodecOpus) {
audio_track->set_codec_id(mkvmuxer::Tracks::kOpusCodecId);
@@ -308,8 +310,6 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
// http://www.webmproject.org/docs/container/#muxer-guidelines
DCHECK_EQ(1000000ull, segment_.GetSegmentInfo()->timecode_scale());
} else if (audio_codec_ == kCodecPCM) {
- DCHECK_EQ(static_cast<uint64_t>(params.bits_per_sample()),
- audio_track->bit_depth());
audio_track->set_codec_id(kPcmCodecId);
}
}
diff --git a/chromium/media/muxers/webm_muxer_fuzzertest.cc b/chromium/media/muxers/webm_muxer_fuzzertest.cc
index 604761f1e8d..d92e7862cbc 100644
--- a/chromium/media/muxers/webm_muxer_fuzzertest.cc
+++ b/chromium/media/muxers/webm_muxer_fuzzertest.cc
@@ -87,7 +87,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
const AudioParameters params(
media::AudioParameters::AUDIO_PCM_LOW_LATENCY, layout, sample_rate,
- 16 /* bits_per_sample */, 60 * sample_rate);
+ 60 * sample_rate);
muxer.OnEncodedAudio(params, std::make_unique<std::string>(str),
base::TimeTicks());
base::RunLoop run_loop;
diff --git a/chromium/media/muxers/webm_muxer_unittest.cc b/chromium/media/muxers/webm_muxer_unittest.cc
index e81c9e69383..71eb0838be5 100644
--- a/chromium/media/muxers/webm_muxer_unittest.cc
+++ b/chromium/media/muxers/webm_muxer_unittest.cc
@@ -213,12 +213,10 @@ TEST_P(WebmMuxerTest, OnEncodedAudioTwoFrames) {
return;
const int sample_rate = 48000;
- const int bits_per_sample = 16;
const int frames_per_buffer = 480;
media::AudioParameters audio_params(
media::AudioParameters::Format::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_MONO, sample_rate, bits_per_sample,
- frames_per_buffer);
+ media::CHANNEL_LAYOUT_MONO, sample_rate, frames_per_buffer);
const std::string encoded_data("abcdefghijklmnopqrstuvwxyz");
@@ -288,12 +286,10 @@ TEST_P(WebmMuxerTest, VideoIsStoredWhileWaitingForAudio) {
}
const int sample_rate = 48000;
- const int bits_per_sample = 16;
const int frames_per_buffer = 480;
media::AudioParameters audio_params(
media::AudioParameters::Format::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_MONO, sample_rate, bits_per_sample,
- frames_per_buffer);
+ media::CHANNEL_LAYOUT_MONO, sample_rate, frames_per_buffer);
const std::string encoded_audio("thisisanencodedaudiopacket");
// Force one libwebm error and verify OnEncodedAudio() fails.
diff --git a/chromium/media/remoting/OWNERS b/chromium/media/remoting/OWNERS
index e85c2ba937d..dbe6348b1e9 100644
--- a/chromium/media/remoting/OWNERS
+++ b/chromium/media/remoting/OWNERS
@@ -1,4 +1,5 @@
erickung@chromium.org
miu@chromium.org
+xjz@chromium.org
# COMPONENT: Internals>Cast>Streaming
diff --git a/chromium/media/remoting/courier_renderer.cc b/chromium/media/remoting/courier_renderer.cc
index 672b2194e8a..6b3f06727ed 100644
--- a/chromium/media/remoting/courier_renderer.cc
+++ b/chromium/media/remoting/courier_renderer.cc
@@ -12,7 +12,6 @@
#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
-#include "base/message_loop/message_loop.h"
#include "base/numerics/safe_math.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/default_tick_clock.h"
diff --git a/chromium/media/remoting/courier_renderer.h b/chromium/media/remoting/courier_renderer.h
index df01dfd4474..cef22a0c7d5 100644
--- a/chromium/media/remoting/courier_renderer.h
+++ b/chromium/media/remoting/courier_renderer.h
@@ -14,7 +14,6 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
-#include "base/optional.h"
#include "base/single_thread_task_runner.h"
#include "base/synchronization/lock.h"
#include "base/timer/timer.h"
diff --git a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
index 198345a68c2..5a158bf1087 100644
--- a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
@@ -117,8 +117,8 @@ class DemuxerStreamAdapterTest : public ::testing::Test {
constexpr size_t kDataPipeCapacity = 256;
demuxer_stream_.reset(new FakeDemuxerStream(true)); // audio.
const MojoCreateDataPipeOptions data_pipe_options{
- sizeof(MojoCreateDataPipeOptions),
- MOJO_CREATE_DATA_PIPE_OPTIONS_FLAG_NONE, 1, kDataPipeCapacity};
+ sizeof(MojoCreateDataPipeOptions), MOJO_CREATE_DATA_PIPE_FLAG_NONE, 1,
+ kDataPipeCapacity};
mojom::RemotingDataStreamSenderPtr stream_sender;
mojo::ScopedDataPipeProducerHandle producer_end;
mojo::ScopedDataPipeConsumerHandle consumer_end;
diff --git a/chromium/media/remoting/end2end_test_renderer.cc b/chromium/media/remoting/end2end_test_renderer.cc
index 87065b748ca..b227ec924e4 100644
--- a/chromium/media/remoting/end2end_test_renderer.cc
+++ b/chromium/media/remoting/end2end_test_renderer.cc
@@ -218,5 +218,19 @@ void End2EndTestRenderer::OnMessageFromSink(
controller_->OnMessageFromSink(*message);
}
+void End2EndTestRenderer::OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ courier_renderer_->OnSelectedVideoTracksChanged(
+ enabled_tracks, std::move(change_completed_cb));
+}
+
+void End2EndTestRenderer::OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ courier_renderer_->OnEnabledAudioTracksChanged(
+ enabled_tracks, std::move(change_completed_cb));
+}
+
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/end2end_test_renderer.h b/chromium/media/remoting/end2end_test_renderer.h
index 8fa9f6c30f3..da119fde9be 100644
--- a/chromium/media/remoting/end2end_test_renderer.h
+++ b/chromium/media/remoting/end2end_test_renderer.h
@@ -37,6 +37,14 @@ class End2EndTestRenderer final : public Renderer {
void SetVolume(float volume) override;
base::TimeDelta GetMediaTime() override;
+ void OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) override;
+
+ void OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) override;
+
private:
// Called to send RPC messages to |receiver_|.
void SendMessageToSink(const std::vector<uint8_t>& message);
diff --git a/chromium/media/remoting/fake_media_resource.cc b/chromium/media/remoting/fake_media_resource.cc
index 14e21e10bc0..e1789f8b3e3 100644
--- a/chromium/media/remoting/fake_media_resource.cc
+++ b/chromium/media/remoting/fake_media_resource.cc
@@ -105,10 +105,5 @@ std::vector<DemuxerStream*> FakeMediaResource::GetAllStreams() {
return streams;
}
-void FakeMediaResource::SetStreamStatusChangeCB(
- const StreamStatusChangeCB& cb) {
- NOTIMPLEMENTED();
-}
-
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/fake_media_resource.h b/chromium/media/remoting/fake_media_resource.h
index 3c6772e09a6..5d4f9795753 100644
--- a/chromium/media/remoting/fake_media_resource.h
+++ b/chromium/media/remoting/fake_media_resource.h
@@ -51,7 +51,6 @@ class FakeMediaResource : public MediaResource {
// MediaResource implementation.
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override;
private:
std::unique_ptr<FakeDemuxerStream> demuxer_stream_;
diff --git a/chromium/media/remoting/metrics.cc b/chromium/media/remoting/metrics.cc
index c2a7486c981..2cec63576c6 100644
--- a/chromium/media/remoting/metrics.cc
+++ b/chromium/media/remoting/metrics.cc
@@ -193,8 +193,7 @@ void SessionMetricsRecorder::RecordVideoConfiguration() {
last_video_profile_, VIDEO_CODEC_PROFILE_MAX + 1);
UMA_HISTOGRAM_CUSTOM_ENUMERATION(
"Media.Remoting.VideoNaturalWidth", last_natural_size_.width(),
- base::CustomHistogram::ArrayToCustomRanges(
- kVideoWidthBuckets, arraysize(kVideoWidthBuckets)));
+ base::CustomHistogram::ArrayToCustomEnumRanges(kVideoWidthBuckets));
// Intentionally use integer division to truncate the result.
const int aspect_ratio_100 =
last_natural_size_.height()
@@ -202,8 +201,7 @@ void SessionMetricsRecorder::RecordVideoConfiguration() {
: kInfiniteRatio;
UMA_HISTOGRAM_CUSTOM_ENUMERATION(
"Media.Remoting.VideoAspectRatio", aspect_ratio_100,
- base::CustomHistogram::ArrayToCustomRanges(
- kCommonAspectRatios100, arraysize(kCommonAspectRatios100)));
+ base::CustomHistogram::ArrayToCustomEnumRanges(kCommonAspectRatios100));
}
void SessionMetricsRecorder::RecordTrackConfiguration() {
diff --git a/chromium/media/remoting/proto_enum_utils.cc b/chromium/media/remoting/proto_enum_utils.cc
index 452fb3fa1a9..e16c112c1f0 100644
--- a/chromium/media/remoting/proto_enum_utils.cc
+++ b/chromium/media/remoting/proto_enum_utils.cc
@@ -57,6 +57,7 @@ base::Optional<AudioCodec> ToMediaAudioCodec(
CASE_RETURN_OTHER(kCodecPCM_ALAW);
CASE_RETURN_OTHER(kCodecALAC);
CASE_RETURN_OTHER(kCodecAC3);
+ CASE_RETURN_OTHER(kCodecMpegHAudio);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -83,6 +84,7 @@ base::Optional<pb::AudioDecoderConfig::Codec> ToProtoAudioDecoderConfigCodec(
CASE_RETURN_OTHER(kCodecPCM_ALAW);
CASE_RETURN_OTHER(kCodecALAC);
CASE_RETURN_OTHER(kCodecAC3);
+ CASE_RETURN_OTHER(kCodecMpegHAudio);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -103,6 +105,7 @@ base::Optional<SampleFormat> ToMediaSampleFormat(
CASE_RETURN_OTHER(kSampleFormatS24);
CASE_RETURN_OTHER(kSampleFormatAc3);
CASE_RETURN_OTHER(kSampleFormatEac3);
+ CASE_RETURN_OTHER(kSampleFormatMpegHAudio);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -123,6 +126,7 @@ ToProtoAudioDecoderConfigSampleFormat(SampleFormat value) {
CASE_RETURN_OTHER(kSampleFormatS24);
CASE_RETURN_OTHER(kSampleFormatAc3);
CASE_RETURN_OTHER(kSampleFormatEac3);
+ CASE_RETURN_OTHER(kSampleFormatMpegHAudio);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -164,6 +168,7 @@ base::Optional<ChannelLayout> ToMediaChannelLayout(
CASE_RETURN_OTHER(CHANNEL_LAYOUT_DISCRETE);
CASE_RETURN_OTHER(CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC);
CASE_RETURN_OTHER(CHANNEL_LAYOUT_4_1_QUAD_SIDE);
+ CASE_RETURN_OTHER(CHANNEL_LAYOUT_BITSTREAM);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -205,6 +210,7 @@ ToProtoAudioDecoderConfigChannelLayout(ChannelLayout value) {
CASE_RETURN_OTHER(CHANNEL_LAYOUT_DISCRETE);
CASE_RETURN_OTHER(CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC);
CASE_RETURN_OTHER(CHANNEL_LAYOUT_4_1_QUAD_SIDE);
+ CASE_RETURN_OTHER(CHANNEL_LAYOUT_BITSTREAM);
}
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
@@ -604,5 +610,27 @@ ToProtoDemuxerStreamStatus(DemuxerStream::Status value) {
return base::nullopt; // Not a 'default' to ensure compile-time checks.
}
+base::Optional<EncryptionMode> ToMediaEncryptionMode(pb::EncryptionMode value) {
+ using OriginType = pb::EncryptionMode;
+ using OtherType = EncryptionMode;
+ switch (value) {
+ CASE_RETURN_OTHER(kUnencrypted);
+ CASE_RETURN_OTHER(kCenc);
+ CASE_RETURN_OTHER(kCbcs);
+ }
+ return base::nullopt; // Not a 'default' to ensure compile-time checks.
+}
+
+base::Optional<pb::EncryptionMode> ToProtoEncryptionMode(EncryptionMode value) {
+ using OriginType = EncryptionMode;
+ using OtherType = pb::EncryptionMode;
+ switch (value) {
+ CASE_RETURN_OTHER(kUnencrypted);
+ CASE_RETURN_OTHER(kCenc);
+ CASE_RETURN_OTHER(kCbcs);
+ }
+ return base::nullopt; // Not a 'default' to ensure compile-time checks.
+}
+
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/proto_enum_utils.h b/chromium/media/remoting/proto_enum_utils.h
index 1ffd188522e..791a4a39b4c 100644
--- a/chromium/media/remoting/proto_enum_utils.h
+++ b/chromium/media/remoting/proto_enum_utils.h
@@ -12,6 +12,7 @@
#include "media/base/cdm_promise.h"
#include "media/base/channel_layout.h"
#include "media/base/content_decryption_module.h"
+#include "media/base/decrypt_config.h"
#include "media/base/demuxer_stream.h"
#include "media/base/encryption_scheme.h"
#include "media/base/sample_format.h"
@@ -102,6 +103,9 @@ base::Optional<DemuxerStream::Status> ToDemuxerStreamStatus(
base::Optional<pb::DemuxerStreamReadUntilCallback::Status>
ToProtoDemuxerStreamStatus(DemuxerStream::Status value);
+base::Optional<EncryptionMode> ToMediaEncryptionMode(pb::EncryptionMode value);
+base::Optional<pb::EncryptionMode> ToProtoEncryptionMode(EncryptionMode value);
+
} // namespace remoting
} // namespace media
diff --git a/chromium/media/remoting/proto_utils.cc b/chromium/media/remoting/proto_utils.cc
index e4c3f014998..93d7a928f57 100644
--- a/chromium/media/remoting/proto_utils.cc
+++ b/chromium/media/remoting/proto_utils.cc
@@ -10,6 +10,7 @@
#include "base/logging.h"
#include "base/time/time.h"
#include "base/values.h"
+#include "media/base/decrypt_config.h"
#include "media/base/encryption_pattern.h"
#include "media/base/encryption_scheme.h"
#include "media/base/timestamp_constants.h"
@@ -31,6 +32,11 @@ std::unique_ptr<DecryptConfig> ConvertProtoToDecryptConfig(
if (!config_message.has_iv())
return nullptr;
+ if (!config_message.has_mode()) {
+ // Assume it's unencrypted.
+ return nullptr;
+ }
+
std::vector<SubsampleEntry> entries(config_message.sub_samples_size());
for (int i = 0; i < config_message.sub_samples_size(); ++i) {
entries.push_back(
@@ -38,9 +44,24 @@ std::unique_ptr<DecryptConfig> ConvertProtoToDecryptConfig(
config_message.sub_samples(i).cypher_bytes()));
}
- std::unique_ptr<DecryptConfig> decrypt_config(
- new DecryptConfig(config_message.key_id(), config_message.iv(), entries));
- return decrypt_config;
+ if (config_message.mode() == pb::EncryptionMode::kCenc) {
+ return DecryptConfig::CreateCencConfig(config_message.key_id(),
+ config_message.iv(), entries);
+ }
+
+ base::Optional<EncryptionPattern> pattern;
+ if (config_message.has_crypt_byte_block()) {
+ pattern = EncryptionPattern(config_message.crypt_byte_block(),
+ config_message.skip_byte_block());
+ }
+
+ if (config_message.mode() == pb::EncryptionMode::kCbcs) {
+ return DecryptConfig::CreateCbcsConfig(config_message.key_id(),
+ config_message.iv(), entries,
+ std::move(pattern));
+ }
+
+ return nullptr;
}
scoped_refptr<DecoderBuffer> ConvertProtoToDecoderBuffer(
@@ -112,6 +133,15 @@ void ConvertDecryptConfigToProto(const DecryptConfig& decrypt_config,
sub_sample->set_clear_bytes(entry.clear_bytes);
sub_sample->set_cypher_bytes(entry.cypher_bytes);
}
+
+ config_message->set_mode(
+ ToProtoEncryptionMode(decrypt_config.encryption_mode()).value());
+ if (decrypt_config.HasPattern()) {
+ config_message->set_crypt_byte_block(
+ decrypt_config.encryption_pattern()->crypt_byte_block());
+ config_message->set_skip_byte_block(
+ decrypt_config.encryption_pattern()->skip_byte_block());
+ }
}
void ConvertDecoderBufferToProto(const DecoderBuffer& decoder_buffer,
diff --git a/chromium/media/remoting/rpc.proto b/chromium/media/remoting/rpc.proto
index 047f2e8868f..28936962cd8 100644
--- a/chromium/media/remoting/rpc.proto
+++ b/chromium/media/remoting/rpc.proto
@@ -66,6 +66,7 @@ message AudioDecoderConfig {
kCodecPCM_ALAW = 14;
kCodecALAC = 15;
kCodecAC3 = 16;
+ kCodecMpegHAudio = 17;
}
// Proto version of media::SampleFormat.
@@ -82,6 +83,7 @@ message AudioDecoderConfig {
kSampleFormatS24 = 8;
kSampleFormatAc3 = 9;
kSampleFormatEac3 = 10;
+ kSampleFormatMpegHAudio = 11;
};
// Proto version of media::ChannelLayout.
@@ -119,6 +121,7 @@ message AudioDecoderConfig {
CHANNEL_LAYOUT_DISCRETE = 29;
CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC = 30;
CHANNEL_LAYOUT_4_1_QUAD_SIDE = 31;
+ CHANNEL_LAYOUT_BITSTREAM = 32;
};
optional Codec codec = 1;
@@ -238,6 +241,14 @@ message VideoDecoderConfig {
optional bytes extra_data = 9;
}
+// Proto version of media::EncryptionMode
+// NEVER change these numbers or re-use old ones; only add new ones.
+enum EncryptionMode {
+ kUnencrypted = 0;
+ kCenc = 1;
+ kCbcs = 2;
+}
+
message DecryptConfig {
message SubSample {
optional uint32 clear_bytes = 1;
@@ -247,6 +258,9 @@ message DecryptConfig {
optional bytes key_id = 1;
optional bytes iv = 2;
repeated SubSample sub_samples = 3;
+ optional EncryptionMode mode = 4;
+ optional uint32 crypt_byte_block = 5;
+ optional uint32 skip_byte_block = 6;
}
message PipelineStatistics {
diff --git a/chromium/media/remoting/stream_provider.h b/chromium/media/remoting/stream_provider.h
index a794ed40f16..8dc5003a2e5 100644
--- a/chromium/media/remoting/stream_provider.h
+++ b/chromium/media/remoting/stream_provider.h
@@ -26,7 +26,6 @@ class StreamProvider final : public MediaResource {
// MediaResource implemenation.
std::vector<DemuxerStream*> GetAllStreams() override;
- void SetStreamStatusChangeCB(const StreamStatusChangeCB& cb) override {}
void Initialize(int remote_audio_handle,
int remote_video_handle,
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index 2871d3765d9..b68e8f0b2c7 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -13,10 +13,15 @@ source_set("renderers") {
sources = [
"audio_renderer_impl.cc",
"audio_renderer_impl.h",
+ "default_decoder_factory.cc",
+ "default_decoder_factory.h",
"default_renderer_factory.cc",
"default_renderer_factory.h",
+ "flinging_renderer_client_factory.cc",
+ "flinging_renderer_client_factory.h",
"paint_canvas_video_renderer.cc",
"paint_canvas_video_renderer.h",
+ "remote_playback_client_wrapper.h",
"renderer_impl.cc",
"renderer_impl.h",
"video_overlay_factory.cc",
@@ -34,7 +39,7 @@ source_set("renderers") {
"//media/filters",
"//media/video",
"//skia",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
"//third_party/libyuv",
"//ui/gfx:geometry_skia",
"//ui/gfx:memory_buffer",
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 269d3478017..2737cded7ac 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -375,7 +375,14 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
current_decoder_config_ = stream->audio_decoder_config();
DCHECK(current_decoder_config_.IsValidConfig());
+ auto output_device_info = sink_->GetOutputDeviceInfo();
+ const AudioParameters& hw_params = output_device_info.output_params();
+ ChannelLayout hw_channel_layout =
+ hw_params.IsValid() ? hw_params.channel_layout() : CHANNEL_LAYOUT_NONE;
+
audio_buffer_stream_ = std::make_unique<AudioBufferStream>(
+ std::make_unique<AudioBufferStream::StreamTraits>(media_log_,
+ hw_channel_layout),
task_runner_, create_audio_decoders_cb_, media_log_);
audio_buffer_stream_->set_config_change_observer(base::Bind(
@@ -385,8 +392,6 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
// failed.
init_cb_ = BindToCurrentLoop(init_cb);
- auto output_device_info = sink_->GetOutputDeviceInfo();
- const AudioParameters& hw_params = output_device_info.output_params();
AudioCodec codec = stream->audio_decoder_config().codec();
if (auto* mc = GetMediaClient())
is_passthrough_ = mc->IsSupportedBitstreamAudioCodec(codec);
@@ -438,14 +443,12 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
audio_parameters_.Reset(
format, stream->audio_decoder_config().channel_layout(),
- stream->audio_decoder_config().samples_per_second(),
- stream->audio_decoder_config().bits_per_channel(), buffer_size);
+ stream->audio_decoder_config().samples_per_second(), buffer_size);
buffer_converter_.reset();
} else if (use_stream_params) {
audio_parameters_.Reset(AudioParameters::AUDIO_PCM_LOW_LATENCY,
stream->audio_decoder_config().channel_layout(),
stream->audio_decoder_config().samples_per_second(),
- stream->audio_decoder_config().bits_per_channel(),
preferred_buffer_size);
audio_parameters_.set_channels_for_discrete(
stream->audio_decoder_config().channels());
@@ -509,7 +512,7 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
: stream->audio_decoder_config().channel_layout();
audio_parameters_.Reset(hw_params.format(), renderer_channel_layout,
- sample_rate, hw_params.bits_per_sample(),
+ sample_rate,
media::AudioLatency::GetHighLatencyBufferSize(
sample_rate, preferred_buffer_size));
}
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index 32ffa9f8059..9f27429a4e1 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -113,7 +113,6 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
: hardware_params_(AudioParameters::AUDIO_PCM_LOW_LATENCY,
kChannelLayout,
kOutputSamplesPerSecond,
- SampleFormatToBytesPerChannel(kSampleFormat) * 8,
512),
sink_(new FakeAudioRendererSink(hardware_params_)),
demuxer_stream_(DemuxerStream::AUDIO),
@@ -130,7 +129,6 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
AudioParameters out_params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
kChannelLayout,
kOutputSamplesPerSecond,
- SampleFormatToBytesPerChannel(kSampleFormat) * 8,
512);
renderer_.reset(new AudioRendererImpl(
message_loop_.task_runner(), sink_.get(),
@@ -141,7 +139,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
}
- virtual ~AudioRendererImplTest() {
+ ~AudioRendererImplTest() override {
SCOPED_TRACE("~AudioRendererImplTest()");
}
@@ -218,7 +216,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
SetMediaClient(&media_client_);
hardware_params_.Reset(AudioParameters::AUDIO_BITSTREAM_EAC3,
- kChannelLayout, kOutputSamplesPerSecond, 1024, 512);
+ kChannelLayout, kOutputSamplesPerSecond, 512);
sink_ = new FakeAudioRendererSink(hardware_params_);
AudioDecoderConfig audio_config(kCodecAC3, kSampleFormatEac3,
kChannelLayout, kInputSamplesPerSecond,
@@ -722,10 +720,9 @@ TEST_F(AudioRendererImplTest, CapacityAppropriateForHardware) {
EXPECT_GT(buffer_capacity().value, hardware_params_.frames_per_buffer());
// Verify in the no-config-changes-expected case.
- ConfigureBasicRenderer(AudioParameters(
- AudioParameters::AUDIO_PCM_LOW_LATENCY, kChannelLayout,
- kOutputSamplesPerSecond, SampleFormatToBytesPerChannel(kSampleFormat) * 8,
- 1024 * 15));
+ ConfigureBasicRenderer(AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ kChannelLayout,
+ kOutputSamplesPerSecond, 1024 * 15));
Initialize();
EXPECT_GT(buffer_capacity().value, hardware_params_.frames_per_buffer());
@@ -736,12 +733,10 @@ TEST_F(AudioRendererImplTest, CapacityAppropriateForHardware) {
TEST_F(AudioRendererImplTest, ChannelMask) {
AudioParameters hw_params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
CHANNEL_LAYOUT_7_1, kOutputSamplesPerSecond,
- SampleFormatToBytesPerChannel(kSampleFormat) * 8,
1024);
ConfigureConfigChangeRenderer(
AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- CHANNEL_LAYOUT_STEREO, kOutputSamplesPerSecond,
- SampleFormatToBytesPerChannel(kSampleFormat) * 8, 1024),
+ CHANNEL_LAYOUT_STEREO, kOutputSamplesPerSecond, 1024),
hw_params);
Initialize();
std::vector<bool> mask = channel_mask();
diff --git a/chromium/media/renderers/default_decoder_factory.cc b/chromium/media/renderers/default_decoder_factory.cc
new file mode 100644
index 00000000000..9b1254ae4e1
--- /dev/null
+++ b/chromium/media/renderers/default_decoder_factory.cc
@@ -0,0 +1,131 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/renderers/default_decoder_factory.h"
+
+#include <memory>
+
+#include "base/feature_list.h"
+#include "base/single_thread_task_runner.h"
+#include "build/build_config.h"
+#include "build/buildflag.h"
+#include "media/base/decoder_factory.h"
+#include "media/base/media_log.h"
+#include "media/base/media_switches.h"
+#include "media/filters/gpu_video_decoder.h"
+#include "media/media_buildflags.h"
+#include "media/video/gpu_video_accelerator_factories.h"
+#include "third_party/libaom/av1_buildflags.h"
+
+#if !defined(OS_ANDROID)
+#include "media/filters/decrypting_audio_decoder.h"
+#include "media/filters/decrypting_video_decoder.h"
+#endif
+
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+#include "media/filters/aom_video_decoder.h"
+#endif
+
+#if BUILDFLAG(ENABLE_FFMPEG)
+#include "media/filters/ffmpeg_audio_decoder.h"
+#endif
+
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
+#include "media/filters/ffmpeg_video_decoder.h"
+#endif
+
+#if BUILDFLAG(ENABLE_LIBVPX)
+#include "media/filters/vpx_video_decoder.h"
+#endif
+
+namespace media {
+
+DefaultDecoderFactory::DefaultDecoderFactory(
+ std::unique_ptr<DecoderFactory> external_decoder_factory)
+ : external_decoder_factory_(std::move(external_decoder_factory)) {}
+
+DefaultDecoderFactory::~DefaultDecoderFactory() = default;
+
+void DefaultDecoderFactory::CreateAudioDecoders(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
+ std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {
+#if !defined(OS_ANDROID)
+ // DecryptingAudioDecoder is only needed in External Clear Key testing to
+ // cover the audio decrypt-and-decode path.
+ if (base::FeatureList::IsEnabled(media::kExternalClearKeyForTesting)) {
+ audio_decoders->push_back(
+ std::make_unique<DecryptingAudioDecoder>(task_runner, media_log));
+ }
+#endif
+
+#if BUILDFLAG(ENABLE_FFMPEG)
+ audio_decoders->push_back(
+ std::make_unique<FFmpegAudioDecoder>(task_runner, media_log));
+#endif
+
+ if (external_decoder_factory_) {
+ external_decoder_factory_->CreateAudioDecoders(task_runner, media_log,
+ audio_decoders);
+ }
+}
+
+void DefaultDecoderFactory::CreateVideoDecoders(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ GpuVideoAcceleratorFactories* gpu_factories,
+ MediaLog* media_log,
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
+ std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) {
+#if !defined(OS_ANDROID)
+ video_decoders->push_back(
+ std::make_unique<DecryptingVideoDecoder>(task_runner, media_log));
+#endif
+
+ // Perfer an external decoder since one will only exist if it is hardware
+ // accelerated.
+ // Remember that |gpu_factories| will be null if HW video decode is turned
+ // off in chrome://flags.
+ if (gpu_factories) {
+ // |gpu_factories_| requires that its entry points be called on its
+ // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
+ // factories, require that their message loops are identical.
+ DCHECK_EQ(gpu_factories->GetTaskRunner(), task_runner);
+
+ if (external_decoder_factory_) {
+ external_decoder_factory_->CreateVideoDecoders(
+ task_runner, gpu_factories, media_log, request_overlay_info_cb,
+ target_color_space, video_decoders);
+ }
+ // MojoVideoDecoder replaces any VDA for this platform when it's enabled.
+ bool enable_vda = !base::FeatureList::IsEnabled(media::kMojoVideoDecoder);
+#if defined(OS_WIN)
+ // D3D11VideoDecoder doesn't support as many cases as dxva yet, so don't
+ // turn off hw decode just because it's enabled.
+ // TODO(crbug.com/832171): Move the check for the most common unsupported
+ // cases for D3D11VideoDecoder to the renderer, to save an IPC hop.
+ enable_vda = true;
+#endif
+ if (enable_vda) {
+ video_decoders->push_back(std::make_unique<GpuVideoDecoder>(
+ gpu_factories, request_overlay_info_cb, target_color_space,
+ media_log));
+ }
+ }
+
+#if BUILDFLAG(ENABLE_LIBVPX)
+ video_decoders->push_back(std::make_unique<OffloadingVpxVideoDecoder>());
+#endif
+
+#if BUILDFLAG(ENABLE_AV1_DECODER)
+ if (base::FeatureList::IsEnabled(kAv1Decoder))
+ video_decoders->push_back(std::make_unique<AomVideoDecoder>(media_log));
+#endif
+
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
+ video_decoders->push_back(std::make_unique<FFmpegVideoDecoder>(media_log));
+#endif
+}
+
+} // namespace media
diff --git a/chromium/media/renderers/default_decoder_factory.h b/chromium/media/renderers/default_decoder_factory.h
new file mode 100644
index 00000000000..1b1305d3617
--- /dev/null
+++ b/chromium/media/renderers/default_decoder_factory.h
@@ -0,0 +1,44 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_RENDERERS_DEFAULT_DECODER_FACTORY_H_
+#define MEDIA_RENDERERS_DEFAULT_DECODER_FACTORY_H_
+
+#include <memory>
+
+#include "media/base/decoder_factory.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+class MEDIA_EXPORT DefaultDecoderFactory : public DecoderFactory {
+ public:
+ // |external_decoder_factory| is optional decoder factory that provides
+ // additional decoders.
+ explicit DefaultDecoderFactory(
+ std::unique_ptr<DecoderFactory> external_decoder_factory);
+ ~DefaultDecoderFactory() final;
+
+ void CreateAudioDecoders(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
+ std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
+
+ void CreateVideoDecoders(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ GpuVideoAcceleratorFactories* gpu_factories,
+ MediaLog* media_log,
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space,
+ std::vector<std::unique_ptr<VideoDecoder>>* video_decoders) final;
+
+ private:
+ std::unique_ptr<DecoderFactory> external_decoder_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(DefaultDecoderFactory);
+};
+
+} // namespace media
+
+#endif // MEDIA_RENDERERS_DEFAULT_DECODER_FACTORY_H_
diff --git a/chromium/media/renderers/default_renderer_factory.cc b/chromium/media/renderers/default_renderer_factory.cc
index 3a224d2e80b..f7c2f8da402 100644
--- a/chromium/media/renderers/default_renderer_factory.cc
+++ b/chromium/media/renderers/default_renderer_factory.cc
@@ -8,42 +8,12 @@
#include <utility>
#include "base/bind.h"
-#include "base/feature_list.h"
-#include "base/single_thread_task_runner.h"
-#include "build/build_config.h"
-#include "build/buildflag.h"
#include "media/base/decoder_factory.h"
-#include "media/base/media_log.h"
-#include "media/base/media_switches.h"
-#include "media/filters/gpu_video_decoder.h"
-#include "media/media_buildflags.h"
#include "media/renderers/audio_renderer_impl.h"
#include "media/renderers/renderer_impl.h"
#include "media/renderers/video_renderer_impl.h"
#include "media/video/gpu_memory_buffer_video_frame_pool.h"
#include "media/video/gpu_video_accelerator_factories.h"
-#include "third_party/libaom/av1_features.h"
-
-#if !defined(OS_ANDROID)
-#include "media/filters/decrypting_audio_decoder.h"
-#include "media/filters/decrypting_video_decoder.h"
-#endif
-
-#if BUILDFLAG(ENABLE_AV1_DECODER)
-#include "media/filters/aom_video_decoder.h"
-#endif
-
-#if BUILDFLAG(ENABLE_FFMPEG)
-#include "media/filters/ffmpeg_audio_decoder.h"
-#endif
-
-#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
-#include "media/filters/ffmpeg_video_decoder.h"
-#endif
-
-#if BUILDFLAG(ENABLE_LIBVPX)
-#include "media/filters/vpx_video_decoder.h"
-#endif
namespace media {
@@ -53,7 +23,9 @@ DefaultRendererFactory::DefaultRendererFactory(
const GetGpuFactoriesCB& get_gpu_factories_cb)
: media_log_(media_log),
decoder_factory_(decoder_factory),
- get_gpu_factories_cb_(get_gpu_factories_cb) {}
+ get_gpu_factories_cb_(get_gpu_factories_cb) {
+ DCHECK(decoder_factory_);
+}
DefaultRendererFactory::~DefaultRendererFactory() = default;
@@ -63,21 +35,8 @@ DefaultRendererFactory::CreateAudioDecoders(
// Create our audio decoders and renderer.
std::vector<std::unique_ptr<AudioDecoder>> audio_decoders;
-#if !defined(OS_ANDROID)
- audio_decoders.push_back(
- std::make_unique<DecryptingAudioDecoder>(media_task_runner, media_log_));
-#endif
-
-#if BUILDFLAG(ENABLE_FFMPEG)
- audio_decoders.push_back(
- std::make_unique<FFmpegAudioDecoder>(media_task_runner, media_log_));
-#endif
-
- // Use an external decoder only if we cannot otherwise decode in the
- // renderer.
- if (decoder_factory_)
- decoder_factory_->CreateAudioDecoders(media_task_runner, &audio_decoders);
-
+ decoder_factory_->CreateAudioDecoders(media_task_runner, media_log_,
+ &audio_decoders);
return audio_decoders;
}
@@ -87,51 +46,12 @@ DefaultRendererFactory::CreateVideoDecoders(
const RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space,
GpuVideoAcceleratorFactories* gpu_factories) {
- // TODO(crbug.com/789597): Move this (and CreateAudioDecoders) into a decoder
- // factory, and just call |decoder_factory_| here.
-
// Create our video decoders and renderer.
std::vector<std::unique_ptr<VideoDecoder>> video_decoders;
-#if !defined(OS_ANDROID)
- video_decoders.push_back(
- std::make_unique<DecryptingVideoDecoder>(media_task_runner, media_log_));
-#endif
-
- // Prefer an external decoder since one will only exist if it is hardware
- // accelerated.
- if (gpu_factories) {
- // |gpu_factories_| requires that its entry points be called on its
- // |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
- // factories, require that their message loops are identical.
- DCHECK_EQ(gpu_factories->GetTaskRunner(), media_task_runner);
-
- if (decoder_factory_) {
- decoder_factory_->CreateVideoDecoders(media_task_runner, gpu_factories,
- media_log_, request_overlay_info_cb,
- &video_decoders);
- }
-
- // MojoVideoDecoder replaces any VDA for this platform when it's enabled.
- if (!base::FeatureList::IsEnabled(media::kMojoVideoDecoder)) {
- video_decoders.push_back(std::make_unique<GpuVideoDecoder>(
- gpu_factories, request_overlay_info_cb, target_color_space,
- media_log_));
- }
- }
-
-#if BUILDFLAG(ENABLE_LIBVPX)
- video_decoders.push_back(std::make_unique<OffloadingVpxVideoDecoder>());
-#endif
-
-#if BUILDFLAG(ENABLE_AV1_DECODER)
- if (base::FeatureList::IsEnabled(kAv1Decoder))
- video_decoders.push_back(std::make_unique<AomVideoDecoder>(media_log_));
-#endif
-
-#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
- video_decoders.push_back(std::make_unique<FFmpegVideoDecoder>(media_log_));
-#endif
+ decoder_factory_->CreateVideoDecoders(media_task_runner, gpu_factories,
+ media_log_, request_overlay_info_cb,
+ target_color_space, &video_decoders);
return video_decoders;
}
@@ -162,8 +82,8 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
gpu_factories = get_gpu_factories_cb_.Run();
std::unique_ptr<GpuMemoryBufferVideoFramePool> gmb_pool;
- if (gpu_factories &&
- gpu_factories->ShouldUseGpuMemoryBuffersForVideoFrames()) {
+ if (gpu_factories && gpu_factories->ShouldUseGpuMemoryBuffersForVideoFrames(
+ false /* for_media_stream */)) {
gmb_pool = std::make_unique<GpuMemoryBufferVideoFramePool>(
std::move(media_task_runner), std::move(worker_task_runner),
gpu_factories);
diff --git a/chromium/media/renderers/flinging_renderer_client_factory.cc b/chromium/media/renderers/flinging_renderer_client_factory.cc
new file mode 100644
index 00000000000..0343b957bba
--- /dev/null
+++ b/chromium/media/renderers/flinging_renderer_client_factory.cc
@@ -0,0 +1,41 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/renderers/flinging_renderer_client_factory.h"
+
+#include "base/logging.h"
+#include "media/base/overlay_info.h"
+
+namespace media {
+
+FlingingRendererClientFactory::FlingingRendererClientFactory(
+ std::unique_ptr<RendererFactory> mojo_flinging_factory,
+ std::unique_ptr<RemotePlaybackClientWrapper> remote_playback_client)
+ : mojo_flinging_factory_(std::move(mojo_flinging_factory)),
+ remote_playback_client_(std::move(remote_playback_client)) {}
+
+FlingingRendererClientFactory::~FlingingRendererClientFactory() = default;
+
+std::unique_ptr<Renderer> FlingingRendererClientFactory::CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& worker_task_runner,
+ AudioRendererSink* audio_renderer_sink,
+ VideoRendererSink* video_renderer_sink,
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) {
+ DCHECK(IsFlingingActive());
+ return mojo_flinging_factory_->CreateRenderer(
+ media_task_runner, worker_task_runner, audio_renderer_sink,
+ video_renderer_sink, request_overlay_info_cb, target_color_space);
+}
+
+std::string FlingingRendererClientFactory::GetActivePresentationId() {
+ return remote_playback_client_->GetActivePresentationId();
+}
+
+bool FlingingRendererClientFactory::IsFlingingActive() {
+ return !GetActivePresentationId().empty();
+}
+
+} // namespace media
diff --git a/chromium/media/renderers/flinging_renderer_client_factory.h b/chromium/media/renderers/flinging_renderer_client_factory.h
new file mode 100644
index 00000000000..73a28a562c3
--- /dev/null
+++ b/chromium/media/renderers/flinging_renderer_client_factory.h
@@ -0,0 +1,55 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_RENDERERS_FLINGING_RENDERER_CLIENT_FACTORY_H_
+#define MEDIA_RENDERERS_FLINGING_RENDERER_CLIENT_FACTORY_H_
+
+#include "media/base/media_export.h"
+#include "media/base/renderer_factory.h"
+#include "media/renderers/remote_playback_client_wrapper.h"
+
+namespace media {
+
+// Creates a renderer for media flinging.
+// The FRCF uses a MojoRendererFactory to create a FlingingRenderer in the
+// browser process. The actual renderer returned by the FRCF is a MojoRenderer
+// directly (as opposed to a dedicated FlingingRendererClient), because all the
+// renderer needs to do is forward calls to the FlingingRenderer in the browser.
+class MEDIA_EXPORT FlingingRendererClientFactory : public RendererFactory {
+ public:
+ // |mojo_flinging_factory| should be created using
+ // HostedRendererType::kFlinging, and GetActivePresentationId()
+ // should be given to it through SetGetTypeSpecificIdCB().
+ FlingingRendererClientFactory(
+ std::unique_ptr<RendererFactory> mojo_flinging_factory,
+ std::unique_ptr<RemotePlaybackClientWrapper> remote_playback_client);
+ ~FlingingRendererClientFactory() override;
+
+ std::unique_ptr<Renderer> CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& worker_task_runner,
+ AudioRendererSink* audio_renderer_sink,
+ VideoRendererSink* video_renderer_sink,
+ const RequestOverlayInfoCB& request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) override;
+
+ // Returns whether media flinging has started, based off of whether the
+ // |remote_playback_client_| has a presentation ID or not. Called by
+ // RendererFactorySelector to determine when to create a FlingingRenderer.
+ bool IsFlingingActive();
+
+ // Used by the |mojo_flinging_factory_| to retrieve the latest presentation ID
+ // when CreateRenderer() is called.
+ std::string GetActivePresentationId();
+
+ private:
+ std::unique_ptr<RendererFactory> mojo_flinging_factory_;
+ std::unique_ptr<RemotePlaybackClientWrapper> remote_playback_client_;
+
+ DISALLOW_COPY_AND_ASSIGN(FlingingRendererClientFactory);
+};
+
+} // namespace media
+
+#endif // MEDIA_RENDERERS_FLINGING_RENDERER_CLIENT_FACTORY_H_
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index dd599caeb7b..7e9f62666be 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -19,7 +19,6 @@
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/data_buffer.h"
#include "media/base/video_frame.h"
-#include "skia/ext/texture_handle.h"
#include "third_party/libyuv/include/libyuv.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkImageGenerator.h"
@@ -898,18 +897,19 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
if (!UpdateLastImage(video_frame, context_3d))
return false;
- const GrGLTextureInfo* texture_info =
- skia::GrBackendObjectToGrGLTextureInfo(
- last_image_.GetSkImage()->getTextureHandle(true));
-
- if (!texture_info)
+ GrBackendTexture backend_texture =
+ last_image_.GetSkImage()->getBackendTexture(true);
+ if (!backend_texture.isValid())
+ return false;
+ GrGLTextureInfo texture_info;
+ if (!backend_texture.getGLTextureInfo(&texture_info))
return false;
gpu::gles2::GLES2Interface* canvas_gl = context_3d.gl;
gpu::MailboxHolder mailbox_holder;
- mailbox_holder.texture_target = texture_info->fTarget;
+ mailbox_holder.texture_target = texture_info.fTarget;
canvas_gl->GenMailboxCHROMIUM(mailbox_holder.mailbox.name);
- canvas_gl->ProduceTextureDirectCHROMIUM(texture_info->fID,
+ canvas_gl->ProduceTextureDirectCHROMIUM(texture_info.fID,
mailbox_holder.mailbox.name);
// Wait for mailbox creation on canvas context before consuming it and
@@ -1021,14 +1021,14 @@ void PaintCanvasVideoRenderer::ResetCache() {
DCHECK(thread_checker_.CalledOnValidThread());
// Clear cached values.
last_image_ = cc::PaintImage();
- last_timestamp_ = kNoTimestamp;
+ last_id_.reset();
}
bool PaintCanvasVideoRenderer::UpdateLastImage(
const scoped_refptr<VideoFrame>& video_frame,
const Context3D& context_3d) {
- if (!last_image_ || video_frame->timestamp() != last_timestamp_ ||
- !last_image_.GetSkImage()->getTextureHandle(true)) {
+ if (!last_image_ || video_frame->unique_id() != last_id_ ||
+ !last_image_.GetSkImage()->getBackendTexture(true).isValid()) {
ResetCache();
auto paint_image_builder =
@@ -1062,7 +1062,7 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
CorrectLastImageDimensions(gfx::RectToSkIRect(video_frame->visible_rect()));
if (!last_image_) // Couldn't create the SkImage.
return false;
- last_timestamp_ = video_frame->timestamp();
+ last_id_ = video_frame->unique_id();
}
last_image_deleting_timer_.Reset();
DCHECK(!!last_image_);
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index 821443bc6de..d6b559ceb79 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -10,6 +10,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "base/optional.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "base/timer/timer.h"
@@ -160,8 +161,10 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// Last image used to draw to the canvas.
cc::PaintImage last_image_;
- // Timestamp of the videoframe used to generate |last_image_|.
- base::TimeDelta last_timestamp_ = media::kNoTimestamp;
+
+ // VideoFrame::unique_id() of the videoframe used to generate |last_image_|.
+ base::Optional<int> last_id_;
+
// If |last_image_| is not used for a while, it's deleted to save memory.
base::DelayTimer last_image_deleting_timer_;
// Stable paint image id to provide to draw image calls.
diff --git a/chromium/media/renderers/remote_playback_client_wrapper.h b/chromium/media/renderers/remote_playback_client_wrapper.h
new file mode 100644
index 00000000000..878c93516bd
--- /dev/null
+++ b/chromium/media/renderers/remote_playback_client_wrapper.h
@@ -0,0 +1,22 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_RENDERERS_REMOTE_PLAYBACK_CLIENT_WRAPPER_H_
+#define MEDIA_RENDERERS_REMOTE_PLAYBACK_CLIENT_WRAPPER_H_
+
+namespace media {
+
+// Wraps a WebRemotePlaybackClient to expose only the methods used by the
+// FlingingRendererClientFactory. This avoids dependencies on the blink layer.
+class RemotePlaybackClientWrapper {
+ public:
+ RemotePlaybackClientWrapper() = default;
+ virtual ~RemotePlaybackClientWrapper() = default;
+
+ virtual std::string GetActivePresentationId() = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_RENDERERS_REMOTE_PLAYBACK_CLIENT_WRAPPER_H_
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index 88ff00ca421..5233ddf8975 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
@@ -31,8 +32,6 @@ namespace media {
// See |video_underflow_threshold_|.
static const int kDefaultVideoUnderflowThresholdMs = 3000;
-static const int kAudioRestartUnderflowThresholdMs = 2000;
-
class RendererImpl::RendererClientInternal final : public RendererClient {
public:
RendererClientInternal(DemuxerStream::Type type, RendererImpl* renderer)
@@ -95,11 +94,15 @@ RendererImpl::RendererImpl(
video_buffering_state_(BUFFERING_HAVE_NOTHING),
audio_ended_(false),
video_ended_(false),
+ audio_playing_(false),
+ video_playing_(false),
cdm_context_(nullptr),
underflow_disabled_for_testing_(false),
clockless_video_playback_enabled_for_testing_(false),
video_underflow_threshold_(
base::TimeDelta::FromMilliseconds(kDefaultVideoUnderflowThresholdMs)),
+ pending_audio_track_change_(false),
+ pending_video_track_change_(false),
weak_factory_(this) {
weak_this_ = weak_factory_.GetWeakPtr();
DVLOG(1) << __func__;
@@ -187,6 +190,7 @@ void RendererImpl::Flush(const base::Closure& flush_cb) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(flush_cb_.is_null());
+ DCHECK(!(pending_audio_track_change_ || pending_video_track_change_));
if (state_ == STATE_FLUSHED) {
task_runner_->PostTask(FROM_HERE, flush_cb);
@@ -201,24 +205,8 @@ void RendererImpl::Flush(const base::Closure& flush_cb) {
flush_cb_ = flush_cb;
state_ = STATE_FLUSHING;
- // If we are currently handling a media stream status change, then postpone
- // Flush until after that's done (because stream status changes also flush
- // audio_renderer_/video_renderer_ and they need to be restarted before they
- // can be flushed again). OnStreamRestartCompleted will resume Flush
- // processing after audio/video restart has completed and there are no other
- // pending stream status changes.
- // TODO(dalecurtis, servolk) We should abort the StartPlaying call post Flush
- // to avoid unnecessary work.
- if ((restarting_audio_ || restarting_video_) &&
- pending_flush_for_stream_change_) {
- pending_actions_.push_back(
- base::Bind(&RendererImpl::FlushInternal, weak_this_));
- return;
- }
-
// If a stream restart is pending, this Flush() will complete it. Upon flush
// completion any pending actions will be executed as well.
-
FlushInternal();
}
@@ -234,10 +222,14 @@ void RendererImpl::StartPlayingFrom(base::TimeDelta time) {
time_source_->SetMediaTime(time);
state_ = STATE_PLAYING;
- if (audio_renderer_)
+ if (audio_renderer_) {
+ audio_playing_ = true;
audio_renderer_->StartPlaying();
- if (video_renderer_)
+ }
+ if (video_renderer_) {
+ video_playing_ = true;
video_renderer_->StartPlayingFrom(time);
+ }
}
void RendererImpl::SetPlaybackRate(double playback_rate) {
@@ -274,7 +266,7 @@ base::TimeDelta RendererImpl::GetMediaTime() {
// threads.
{
base::AutoLock lock(restarting_audio_lock_);
- if (restarting_audio_) {
+ if (pending_audio_track_change_) {
DCHECK_NE(kNoTimestamp, restarting_audio_time_);
return restarting_audio_time_;
}
@@ -358,6 +350,7 @@ void RendererImpl::InitializeAudioRenderer() {
// pick the first enabled stream to preserve the existing behavior.
DemuxerStream* audio_stream =
media_resource_->GetFirstStream(DemuxerStream::AUDIO);
+
if (!audio_stream) {
audio_renderer_.reset();
task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
@@ -408,6 +401,7 @@ void RendererImpl::InitializeVideoRenderer() {
// pick the first enabled stream to preserve the existing behavior.
DemuxerStream* video_stream =
media_resource_->GetFirstStream(DemuxerStream::VIDEO);
+
if (!video_stream) {
video_renderer_.reset();
task_runner_->PostTask(FROM_HERE, base::Bind(done_cb, PIPELINE_OK));
@@ -444,9 +438,6 @@ void RendererImpl::OnVideoRendererInitializeDone(PipelineStatus status) {
return;
}
- media_resource_->SetStreamStatusChangeCB(
- base::Bind(&RendererImpl::OnStreamStatusChanged, weak_this_));
-
if (audio_renderer_) {
time_source_ = audio_renderer_->GetTimeSource();
} else if (!time_source_) {
@@ -473,19 +464,19 @@ void RendererImpl::FlushInternal() {
FlushAudioRenderer();
}
+// TODO(tmathmeyer) Combine this functionality with track switching flushing.
void RendererImpl::FlushAudioRenderer() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_FLUSHING);
DCHECK(!flush_cb_.is_null());
- if (!audio_renderer_) {
+ if (!audio_renderer_ || !audio_playing_) {
OnAudioRendererFlushDone();
- return;
+ } else {
+ audio_renderer_->Flush(base::BindRepeating(
+ &RendererImpl::OnAudioRendererFlushDone, weak_this_));
}
-
- audio_renderer_->Flush(
- base::Bind(&RendererImpl::OnAudioRendererFlushDone, weak_this_));
}
void RendererImpl::OnAudioRendererFlushDone() {
@@ -503,9 +494,10 @@ void RendererImpl::OnAudioRendererFlushDone() {
// If we had a deferred video renderer underflow prior to the flush, it should
// have been cleared by the audio renderer changing to BUFFERING_HAVE_NOTHING.
DCHECK(deferred_video_underflow_cb_.IsCancelled());
-
DCHECK_EQ(audio_buffering_state_, BUFFERING_HAVE_NOTHING);
audio_ended_ = false;
+ audio_playing_ = false;
+
FlushVideoRenderer();
}
@@ -515,13 +507,12 @@ void RendererImpl::FlushVideoRenderer() {
DCHECK_EQ(state_, STATE_FLUSHING);
DCHECK(!flush_cb_.is_null());
- if (!video_renderer_) {
+ if (!video_renderer_ || !video_playing_) {
OnVideoRendererFlushDone();
- return;
+ } else {
+ video_renderer_->Flush(base::BindRepeating(
+ &RendererImpl::OnVideoRendererFlushDone, weak_this_));
}
-
- video_renderer_->Flush(
- base::Bind(&RendererImpl::OnVideoRendererFlushDone, weak_this_));
}
void RendererImpl::OnVideoRendererFlushDone() {
@@ -538,84 +529,15 @@ void RendererImpl::OnVideoRendererFlushDone() {
DCHECK_EQ(video_buffering_state_, BUFFERING_HAVE_NOTHING);
video_ended_ = false;
+ video_playing_ = false;
state_ = STATE_FLUSHED;
base::ResetAndReturn(&flush_cb_).Run();
-
- if (!pending_actions_.empty()) {
- base::Closure closure = pending_actions_.front();
- pending_actions_.pop_front();
- closure.Run();
- }
-}
-
-void RendererImpl::OnStreamStatusChanged(DemuxerStream* stream,
- bool enabled,
- base::TimeDelta time) {
- DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(stream);
- bool video = (stream->type() == DemuxerStream::VIDEO);
- DVLOG(1) << __func__ << (video ? " video" : " audio") << " stream=" << stream
- << " enabled=" << enabled << " time=" << time.InSecondsF();
-
- if ((state_ != STATE_PLAYING && state_ != STATE_FLUSHING &&
- state_ != STATE_FLUSHED) ||
- (audio_ended_ && video_ended_))
- return;
-
- if (restarting_audio_ || restarting_video_ || state_ == STATE_FLUSHING) {
- DVLOG(3) << __func__ << ": postponed stream " << stream
- << " status change handling.";
- pending_actions_.push_back(base::Bind(&RendererImpl::OnStreamStatusChanged,
- weak_this_, stream, enabled, time));
- return;
- }
-
- DCHECK(state_ == STATE_PLAYING || state_ == STATE_FLUSHED);
- if (stream->type() == DemuxerStream::VIDEO) {
- DCHECK(video_renderer_);
- restarting_video_ = true;
- base::Closure handle_track_status_cb =
- base::Bind(stream == current_video_stream_
- ? &RendererImpl::RestartVideoRenderer
- : &RendererImpl::ReinitializeVideoRenderer,
- weak_this_, stream, time);
- if (state_ == STATE_FLUSHED) {
- handle_track_status_cb.Run();
- } else {
- pending_flush_for_stream_change_ = true;
- video_renderer_->Flush(handle_track_status_cb);
- }
- } else if (stream->type() == DemuxerStream::AUDIO) {
- DCHECK(audio_renderer_);
- DCHECK(time_source_);
- {
- base::AutoLock lock(restarting_audio_lock_);
- restarting_audio_time_ = time;
- restarting_audio_ = true;
- }
- base::Closure handle_track_status_cb =
- base::Bind(stream == current_audio_stream_
- ? &RendererImpl::RestartAudioRenderer
- : &RendererImpl::ReinitializeAudioRenderer,
- weak_this_, stream, time);
- if (state_ == STATE_FLUSHED) {
- handle_track_status_cb.Run();
- return;
- }
- // Stop ticking (transition into paused state) in audio renderer before
- // calling Flush, since after Flush we are going to restart playback by
- // calling audio renderer StartPlaying which would fail in playing state.
- if (time_ticking_) {
- time_ticking_ = false;
- time_source_->StopTicking();
- }
- pending_flush_for_stream_change_ = true;
- audio_renderer_->Flush(handle_track_status_cb);
- }
}
-void RendererImpl::ReinitializeAudioRenderer(DemuxerStream* stream,
- base::TimeDelta time) {
+void RendererImpl::ReinitializeAudioRenderer(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb) {
DVLOG(2) << __func__ << " stream=" << stream << " time=" << time.InSecondsF();
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(stream, current_audio_stream_);
@@ -623,25 +545,31 @@ void RendererImpl::ReinitializeAudioRenderer(DemuxerStream* stream,
current_audio_stream_ = stream;
audio_renderer_->Initialize(
stream, cdm_context_, audio_renderer_client_.get(),
- base::Bind(&RendererImpl::OnAudioRendererReinitialized, weak_this_,
- stream, time));
+ base::BindRepeating(&RendererImpl::OnAudioRendererReinitialized,
+ weak_this_, stream, time,
+ base::Passed(&reinitialize_completed_cb)));
}
-void RendererImpl::OnAudioRendererReinitialized(DemuxerStream* stream,
- base::TimeDelta time,
- PipelineStatus status) {
+void RendererImpl::OnAudioRendererReinitialized(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb,
+ PipelineStatus status) {
DVLOG(2) << __func__ << ": status=" << status;
DCHECK_EQ(stream, current_audio_stream_);
if (status != PIPELINE_OK) {
+ std::move(reinitialize_completed_cb).Run();
OnError(status);
return;
}
- RestartAudioRenderer(stream, time);
+ RestartAudioRenderer(stream, time, std::move(reinitialize_completed_cb));
}
-void RendererImpl::ReinitializeVideoRenderer(DemuxerStream* stream,
- base::TimeDelta time) {
+void RendererImpl::ReinitializeVideoRenderer(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb) {
DVLOG(2) << __func__ << " stream=" << stream << " time=" << time.InSecondsF();
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(stream, current_video_stream_);
@@ -650,67 +578,78 @@ void RendererImpl::ReinitializeVideoRenderer(DemuxerStream* stream,
video_renderer_->OnTimeStopped();
video_renderer_->Initialize(
stream, cdm_context_, video_renderer_client_.get(),
- base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)),
- base::Bind(&RendererImpl::OnVideoRendererReinitialized, weak_this_,
- stream, time));
-}
-
-void RendererImpl::OnVideoRendererReinitialized(DemuxerStream* stream,
- base::TimeDelta time,
- PipelineStatus status) {
+ base::BindRepeating(&RendererImpl::GetWallClockTimes,
+ base::Unretained(this)),
+ base::BindRepeating(&RendererImpl::OnVideoRendererReinitialized,
+ weak_this_, stream, time,
+ base::Passed(&reinitialize_completed_cb)));
+}
+
+void RendererImpl::OnVideoRendererReinitialized(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb,
+ PipelineStatus status) {
DVLOG(2) << __func__ << ": status=" << status;
DCHECK_EQ(stream, current_video_stream_);
if (status != PIPELINE_OK) {
+ std::move(reinitialize_completed_cb).Run();
OnError(status);
return;
}
- RestartVideoRenderer(stream, time);
+ RestartVideoRenderer(stream, time, std::move(reinitialize_completed_cb));
}
-void RendererImpl::RestartAudioRenderer(DemuxerStream* stream,
- base::TimeDelta time) {
+void RendererImpl::RestartAudioRenderer(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure restart_completed_cb) {
DVLOG(2) << __func__ << " stream=" << stream << " time=" << time.InSecondsF();
DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(state_ == STATE_PLAYING || state_ == STATE_FLUSHED ||
- state_ == STATE_FLUSHING);
- DCHECK(time_source_);
DCHECK(audio_renderer_);
DCHECK_EQ(stream, current_audio_stream_);
+ DCHECK(state_ == STATE_PLAYING || state_ == STATE_FLUSHED ||
+ state_ == STATE_FLUSHING);
- audio_ended_ = false;
if (state_ == STATE_FLUSHED) {
// If we are in the FLUSHED state, then we are done. The audio renderer will
// be restarted by a subsequent RendererImpl::StartPlayingFrom call.
- OnStreamRestartCompleted();
- } else {
- // Stream restart will be completed when the audio renderer decodes enough
- // data and reports HAVE_ENOUGH to HandleRestartedStreamBufferingChanges.
- pending_flush_for_stream_change_ = false;
- audio_renderer_->StartPlaying();
+ std::move(restart_completed_cb).Run();
+ return;
+ }
+
+ audio_renderer_->StartPlaying();
+ {
+ base::AutoLock lock(restarting_audio_lock_);
+ audio_playing_ = true;
+ pending_audio_track_change_ = false;
}
+ std::move(restart_completed_cb).Run();
}
-void RendererImpl::RestartVideoRenderer(DemuxerStream* stream,
- base::TimeDelta time) {
+void RendererImpl::RestartVideoRenderer(
+ DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure restart_completed_cb) {
DVLOG(2) << __func__ << " stream=" << stream << " time=" << time.InSecondsF();
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(video_renderer_);
+ DCHECK_EQ(stream, current_video_stream_);
DCHECK(state_ == STATE_PLAYING || state_ == STATE_FLUSHED ||
state_ == STATE_FLUSHING);
- DCHECK_EQ(stream, current_video_stream_);
- video_ended_ = false;
if (state_ == STATE_FLUSHED) {
// If we are in the FLUSHED state, then we are done. The video renderer will
// be restarted by a subsequent RendererImpl::StartPlayingFrom call.
- OnStreamRestartCompleted();
- } else {
- // Stream restart will be completed when the video renderer decodes enough
- // data and reports HAVE_ENOUGH to HandleRestartedStreamBufferingChanges.
- pending_flush_for_stream_change_ = false;
- video_renderer_->StartPlayingFrom(time);
+ std::move(restart_completed_cb).Run();
+ return;
}
+
+ video_renderer_->StartPlayingFrom(time);
+ video_playing_ = true;
+ pending_video_track_change_ = false;
+ std::move(restart_completed_cb).Run();
}
void RendererImpl::OnStatisticsUpdate(const PipelineStatistics& stats) {
@@ -718,90 +657,6 @@ void RendererImpl::OnStatisticsUpdate(const PipelineStatistics& stats) {
client_->OnStatisticsUpdate(stats);
}
-bool RendererImpl::HandleRestartedStreamBufferingChanges(
- DemuxerStream::Type type,
- BufferingState new_buffering_state) {
- DCHECK(task_runner_->BelongsToCurrentThread());
- // When restarting playback we want to defer the BUFFERING_HAVE_NOTHING for
- // the stream being restarted, to allow continuing uninterrupted playback on
- // the other stream.
- if (type == DemuxerStream::VIDEO && restarting_video_) {
- if (new_buffering_state == BUFFERING_HAVE_ENOUGH) {
- DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for video stream,"
- " resuming playback.";
- task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&RendererImpl::OnStreamRestartCompleted, weak_this_));
- if (state_ == STATE_PLAYING &&
- !deferred_video_underflow_cb_.IsCancelled()) {
- // If deferred_video_underflow_cb_ wasn't triggered, then audio should
- // still be playing, we only need to unpause the video stream.
- DVLOG(4) << "deferred_video_underflow_cb_.Cancel()";
- deferred_video_underflow_cb_.Cancel();
- video_buffering_state_ = new_buffering_state;
- if (playback_rate_ > 0)
- video_renderer_->OnTimeProgressing();
- return true;
- }
- }
- // We don't handle the BUFFERING_HAVE_NOTHING case explicitly here, since
- // the existing logic for deferring video underflow reporting in
- // OnBufferingStateChange is exactly what we need. So fall through to the
- // regular video underflow handling path in OnBufferingStateChange.
- }
-
- if (type == DemuxerStream::AUDIO && restarting_audio_) {
- if (new_buffering_state == BUFFERING_HAVE_NOTHING) {
- if (deferred_video_underflow_cb_.IsCancelled() &&
- deferred_audio_restart_underflow_cb_.IsCancelled()) {
- DVLOG(1) << __func__ << " Deferring BUFFERING_HAVE_NOTHING for "
- "audio stream which is being restarted.";
- audio_buffering_state_ = new_buffering_state;
- deferred_audio_restart_underflow_cb_.Reset(
- base::Bind(&RendererImpl::OnBufferingStateChange, weak_this_, type,
- new_buffering_state));
- task_runner_->PostDelayedTask(
- FROM_HERE, deferred_audio_restart_underflow_cb_.callback(),
- base::TimeDelta::FromMilliseconds(
- kAudioRestartUnderflowThresholdMs));
- return true;
- }
- // Cancel the deferred callback and report the underflow immediately.
- DVLOG(4) << "deferred_audio_restart_underflow_cb_.Cancel()";
- deferred_audio_restart_underflow_cb_.Cancel();
- } else if (new_buffering_state == BUFFERING_HAVE_ENOUGH) {
- DVLOG(1) << __func__ << " Got BUFFERING_HAVE_ENOUGH for audio stream,"
- " resuming playback.";
- deferred_audio_restart_underflow_cb_.Cancel();
- // Now that we have decoded enough audio, pause playback momentarily to
- // ensure video renderer is synchronised with audio.
- PausePlayback();
- task_runner_->PostTask(
- FROM_HERE,
- base::Bind(&RendererImpl::OnStreamRestartCompleted, weak_this_));
- }
- }
- return false;
-}
-
-void RendererImpl::OnStreamRestartCompleted() {
- DVLOG(3) << __func__ << " restarting_audio_=" << restarting_audio_
- << " restarting_video_=" << restarting_video_;
- DCHECK(task_runner_->BelongsToCurrentThread());
- DCHECK(restarting_audio_ || restarting_video_);
- {
- base::AutoLock lock(restarting_audio_lock_);
- restarting_audio_ = false;
- restarting_audio_time_ = kNoTimestamp;
- }
- restarting_video_ = false;
- if (!pending_actions_.empty()) {
- base::Closure closure = pending_actions_.front();
- pending_actions_.pop_front();
- closure.Run();
- }
-}
-
void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
BufferingState new_buffering_state) {
DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO));
@@ -816,9 +671,16 @@ void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
bool was_waiting_for_enough_data = WaitingForEnoughData();
- if (restarting_audio_ || restarting_video_) {
- if (HandleRestartedStreamBufferingChanges(type, new_buffering_state))
+ if (new_buffering_state == BUFFERING_HAVE_NOTHING) {
+ if ((pending_audio_track_change_ && type == DemuxerStream::AUDIO) ||
+ (pending_video_track_change_ && type == DemuxerStream::VIDEO)) {
+ // Don't pass up a nothing event if it was triggered by a track change.
+ // This would cause the renderer to effectively lie about underflow state.
+ // Even though this might cause an immediate video underflow due to
+ // changing an audio track, all playing is paused when audio is disabled.
+ *buffering_state = new_buffering_state;
return;
+ }
}
// When audio is present and has enough data, defer video underflow callbacks
@@ -869,9 +731,14 @@ void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
// Renderer prerolled.
if (was_waiting_for_enough_data && !WaitingForEnoughData()) {
- StartPlayback();
- client_->OnBufferingStateChange(BUFFERING_HAVE_ENOUGH);
- return;
+ // Prevent condition where audio or video is sputtering and flipping back
+ // and forth between NOTHING and ENOUGH mixing with a track change, causing
+ // a StartPlayback to be called while the audio renderer is being flushed.
+ if (!pending_audio_track_change_ && !pending_video_track_change_) {
+ StartPlayback();
+ client_->OnBufferingStateChange(BUFFERING_HAVE_ENOUGH);
+ return;
+ }
}
}
@@ -891,7 +758,8 @@ void RendererImpl::PausePlayback() {
DCHECK(task_runner_->BelongsToCurrentThread());
switch (state_) {
case STATE_PLAYING:
- DCHECK(PlaybackHasEnded() || WaitingForEnoughData() || restarting_audio_)
+ DCHECK(PlaybackHasEnded() || WaitingForEnoughData() ||
+ pending_audio_track_change_)
<< "Playback should only pause due to ending or underflowing or"
" when restarting audio stream";
@@ -912,11 +780,11 @@ void RendererImpl::PausePlayback() {
// An error state may occur at any time.
break;
}
-
if (time_ticking_) {
time_ticking_ = false;
time_source_->StopTicking();
}
+
if (playback_rate_ > 0 && video_renderer_)
video_renderer_->OnTimeStopped();
}
@@ -925,13 +793,17 @@ void RendererImpl::StartPlayback() {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, STATE_PLAYING);
- DCHECK(!time_ticking_);
DCHECK(!WaitingForEnoughData());
- time_ticking_ = true;
- time_source_->StartTicking();
- if (playback_rate_ > 0 && video_renderer_)
+ if (!time_ticking_) {
+ time_ticking_ = true;
+ audio_playing_ = true;
+ time_source_->StartTicking();
+ }
+ if (playback_rate_ > 0 && video_renderer_) {
+ video_playing_ = true;
video_renderer_->OnTimeProgressing();
+ }
}
void RendererImpl::OnRendererEnded(DemuxerStream::Type type) {
@@ -947,8 +819,8 @@ void RendererImpl::OnRendererEnded(DemuxerStream::Type type) {
audio_ended_ = true;
} else {
DCHECK(!video_ended_);
- video_ended_ = true;
DCHECK(video_renderer_);
+ video_ended_ = true;
video_renderer_->OnTimeStopped();
}
@@ -1031,4 +903,92 @@ void RendererImpl::OnVideoOpacityChange(bool opaque) {
client_->OnVideoOpacityChange(opaque);
}
+void RendererImpl::CleanUpTrackChange(base::RepeatingClosure on_finished,
+ bool* pending_change,
+ bool* ended,
+ bool* playing) {
+ {
+ // This lock is required for setting pending_audio_track_change_, and has
+ // no effect when setting pending_video_track_change_.
+ base::AutoLock lock(restarting_audio_lock_);
+ *pending_change = *ended = *playing = false;
+ }
+ std::move(on_finished).Run();
+}
+
+void RendererImpl::OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ DCHECK_LT(enabled_tracks.size(), 2u);
+ DemuxerStream* stream = enabled_tracks.empty() ? nullptr : enabled_tracks[0];
+
+ if (!stream && !video_playing_) {
+ std::move(change_completed_cb).Run();
+ return;
+ }
+
+ // 'fixing' the stream -> restarting if its the same stream,
+ // reinitializing if it is different.
+ base::RepeatingClosure fix_stream_cb;
+ if (stream && stream != current_video_stream_) {
+ fix_stream_cb = base::BindRepeating(
+ &RendererImpl::ReinitializeVideoRenderer, weak_this_, stream,
+ GetMediaTime(), base::Passed(&change_completed_cb));
+ } else {
+ fix_stream_cb = base::BindRepeating(
+ &RendererImpl::RestartVideoRenderer, weak_this_, current_video_stream_,
+ GetMediaTime(), base::Passed(&change_completed_cb));
+ }
+
+ pending_video_track_change_ = true;
+ video_renderer_->Flush(base::BindRepeating(
+ &RendererImpl::CleanUpTrackChange, weak_this_,
+ base::Passed(&fix_stream_cb), &pending_video_track_change_, &video_ended_,
+ &video_playing_));
+}
+
+void RendererImpl::OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ DCHECK_LT(enabled_tracks.size(), 2u);
+ DemuxerStream* stream = enabled_tracks.empty() ? nullptr : enabled_tracks[0];
+
+ if (!stream && !audio_playing_) {
+ std::move(change_completed_cb).Run();
+ return;
+ }
+
+ // 'fixing' the stream -> restarting if its the same stream,
+ // reinitializing if it is different.
+ base::RepeatingClosure fix_stream_cb;
+
+ if (stream && stream != current_audio_stream_) {
+ fix_stream_cb = base::BindRepeating(
+ &RendererImpl::ReinitializeAudioRenderer, weak_this_, stream,
+ GetMediaTime(), base::Passed(&change_completed_cb));
+ } else {
+ fix_stream_cb = base::BindRepeating(
+ &RendererImpl::RestartAudioRenderer, weak_this_, current_audio_stream_,
+ GetMediaTime(), base::Passed(&change_completed_cb));
+ }
+
+ {
+ base::AutoLock lock(restarting_audio_lock_);
+ pending_audio_track_change_ = true;
+ restarting_audio_time_ = time_source_->CurrentMediaTime();
+ }
+
+ if (audio_playing_)
+ PausePlayback();
+
+ audio_renderer_->Flush(base::BindRepeating(
+ &RendererImpl::CleanUpTrackChange, weak_this_,
+ base::Passed(&fix_stream_cb), &pending_audio_track_change_, &audio_ended_,
+ &audio_playing_));
+}
+
} // namespace media
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index cd73bf26900..d50c92881ea 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -62,6 +62,12 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
void SetPlaybackRate(double playback_rate) final;
void SetVolume(float volume) final;
base::TimeDelta GetMediaTime() final;
+ void OnSelectedVideoTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) override;
+ void OnEnabledAudioTracksChanged(
+ const std::vector<DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) override;
// Helper functions for testing purposes. Must be called before Initialize().
void DisableUnderflowForTesting();
@@ -106,25 +112,24 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
void FlushVideoRenderer();
void OnVideoRendererFlushDone();
- // This function notifies the renderer that the status of the demuxer |stream|
- // has been changed, the new status is |enabled| and the change occured while
- // playback position was |time|.
- void OnStreamStatusChanged(DemuxerStream* stream,
- bool enabled,
- base::TimeDelta time);
-
// Reinitialize audio/video renderer during a demuxer stream switching. The
// renderer must be flushed first, and when the re-init is completed the
// corresponding callback will be invoked to restart playback.
// The |stream| parameter specifies the new demuxer stream, and the |time|
// parameter specifies the time on media timeline where the switch occured.
- void ReinitializeAudioRenderer(DemuxerStream* stream, base::TimeDelta time);
+ void ReinitializeAudioRenderer(DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb);
void OnAudioRendererReinitialized(DemuxerStream* stream,
base::TimeDelta time,
+ base::OnceClosure reinitialize_completed_cb,
PipelineStatus status);
- void ReinitializeVideoRenderer(DemuxerStream* stream, base::TimeDelta time);
+ void ReinitializeVideoRenderer(DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure restart_completed_cb);
void OnVideoRendererReinitialized(DemuxerStream* stream,
base::TimeDelta time,
+ base::OnceClosure restart_completed_cb,
PipelineStatus status);
// Restart audio/video renderer playback after a demuxer stream switch or
@@ -134,8 +139,18 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
// needs to be restarted. It is necessary for demuxers with independent
// streams (e.g. MSE / ChunkDemuxer) to synchronize data reading between those
// streams.
- void RestartAudioRenderer(DemuxerStream* stream, base::TimeDelta time);
- void RestartVideoRenderer(DemuxerStream* stream, base::TimeDelta time);
+ void RestartAudioRenderer(DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure restart_completed_cb);
+ void RestartVideoRenderer(DemuxerStream* stream,
+ base::TimeDelta time,
+ base::OnceClosure restart_completed_cb);
+
+ // Fix state booleans after the stream switching is finished.
+ void CleanUpTrackChange(base::RepeatingClosure on_finished,
+ bool* pending_change,
+ bool* ended,
+ bool* playing);
// Callback executed by filters to update statistics.
void OnStatisticsUpdate(const PipelineStatistics& stats);
@@ -217,6 +232,8 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
// Whether we've received the audio/video ended events.
bool audio_ended_;
bool video_ended_;
+ bool audio_playing_;
+ bool video_playing_;
CdmContext* cdm_context_;
@@ -238,21 +255,10 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
// TODO(servolk): Get rid of the lock and replace restarting_audio_ with
// std::atomic<bool> when atomics are unbanned in Chromium.
base::Lock restarting_audio_lock_;
- bool restarting_audio_ = false;
+ bool pending_audio_track_change_ = false;
base::TimeDelta restarting_audio_time_ = kNoTimestamp;
- bool restarting_video_ = false;
-
- // Flush operations and media track status changes must be serialized to avoid
- // interfering with each other. This list will hold a list of postponed
- // actions that need to be completed after the current async operation is
- // completed.
- std::list<base::Closure> pending_actions_;
-
- // Pending flush indicates that a track change is in the middle of a Flush and
- // that another one can't be scheduled at this time. Instead it should be
- // added to |pending_actions_|.
- bool pending_flush_for_stream_change_ = false;
+ bool pending_video_track_change_ = false;
base::WeakPtr<RendererImpl> weak_this_;
base::WeakPtrFactory<RendererImpl> weak_factory_;
diff --git a/chromium/media/renderers/renderer_impl_unittest.cc b/chromium/media/renderers/renderer_impl_unittest.cc
index c10add5e7b0..5ac6881f1cf 100644
--- a/chromium/media/renderers/renderer_impl_unittest.cc
+++ b/chromium/media/renderers/renderer_impl_unittest.cc
@@ -11,6 +11,7 @@
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/message_loop/message_loop.h"
+#include "base/optional.h"
#include "base/run_loop.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -28,6 +29,7 @@ using ::testing::Return;
using ::testing::SaveArg;
using ::testing::StrictMock;
using ::testing::WithArg;
+using ::testing::WithArgs;
namespace media {
@@ -51,7 +53,7 @@ ACTION(PostCallback) {
ACTION(PostQuitWhenIdle) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE, base::MessageLoop::QuitWhenIdleClosure());
+ FROM_HERE, base::RunLoop::QuitCurrentWhenIdleClosureDeprecated());
}
class RendererImplTest : public ::testing::Test {
@@ -68,6 +70,8 @@ class RendererImplTest : public ::testing::Test {
MOCK_METHOD0(OnFlushed, void());
MOCK_METHOD1(OnCdmAttached, void(bool));
MOCK_METHOD1(OnDurationChange, void(base::TimeDelta duration));
+ MOCK_METHOD0(OnVideoTrackChangeComplete, void());
+ MOCK_METHOD0(OnAudioTrackChangeComplete, void());
private:
DISALLOW_COPY_AND_ASSIGN(CallbackHelper);
@@ -90,7 +94,7 @@ class RendererImplTest : public ::testing::Test {
EXPECT_CALL(*demuxer_, GetAllStreams()).WillRepeatedly(Return(streams_));
}
- virtual ~RendererImplTest() { Destroy(); }
+ ~RendererImplTest() override { Destroy(); }
protected:
void Destroy() {
@@ -102,8 +106,6 @@ class RendererImplTest : public ::testing::Test {
DemuxerStream::Type type) {
std::unique_ptr<StrictMock<MockDemuxerStream>> stream(
new StrictMock<MockDemuxerStream>(type));
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .Times(testing::AnyNumber());
return stream;
}
@@ -302,6 +304,37 @@ class RendererImplTest : public ::testing::Test {
base::RunLoop().RunUntilIdle();
}
+ void SetAudioTrackSwitchExpectations() {
+ InSequence track_switch_seq;
+
+ // Called from withing OnEnabledAudioTracksChanged
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(*audio_renderer_, Flush(_));
+
+ // Callback into RestartAudioRenderer
+ EXPECT_CALL(*audio_renderer_, StartPlaying());
+
+ // Callback into OnBufferingStateChange
+ EXPECT_CALL(time_source_, StartTicking());
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
+ }
+
+ void SetVideoTrackSwitchExpectations() {
+ InSequence track_switch_seq;
+
+ // Called from withing OnSelectedVideoTrackChanged
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+ EXPECT_CALL(*video_renderer_, Flush(_));
+
+ // Callback into RestartVideoRenderer
+ EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
+
+ // Callback into OnBufferingStateChange
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
+ }
+
// Fixture members.
base::MessageLoop message_loop_;
StrictMock<CallbackHelper> callbacks_;
@@ -769,365 +802,206 @@ TEST_F(RendererImplTest, VideoUnderflowWithAudioFlush) {
base::RunLoop().RunUntilIdle();
}
-TEST_F(RendererImplTest, StreamStatusNotificationHandling) {
- CreateAudioAndVideoStream();
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, AudioTrackDisableThenEnable) {
+ InitializeWithAudioAndVideo();
Play();
+ Mock::VerifyAndClearExpectations(&time_source_);
- EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
-
- // Verify that DemuxerStream status changes cause the corresponding
- // audio/video renderer to be flushed and restarted.
- EXPECT_CALL(time_source_, StopTicking());
- EXPECT_CALL(*audio_renderer_, Flush(_));
- EXPECT_CALL(*audio_renderer_, StartPlaying());
- EXPECT_CALL(time_source_, StartTicking());
- stream_status_change_cb.Run(audio_stream_.get(), false, base::TimeDelta());
+ base::RunLoop disable_wait;
+ SetAudioTrackSwitchExpectations();
+ renderer_impl_->OnEnabledAudioTracksChanged({}, disable_wait.QuitClosure());
+ disable_wait.Run();
- EXPECT_CALL(*video_renderer_, Flush(_));
- EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
- stream_status_change_cb.Run(video_stream_.get(), false, base::TimeDelta());
- base::RunLoop().RunUntilIdle();
+ base::RunLoop enable_wait;
+ SetAudioTrackSwitchExpectations();
+ renderer_impl_->OnEnabledAudioTracksChanged({streams_[0]},
+ enable_wait.QuitClosure());
+ enable_wait.Run();
}
-// Stream status changes are handled asynchronously by the renderer and may take
-// some time to process. This test verifies that all status changes are
-// processed correctly by the renderer even if status changes of the stream
-// happen much faster than the renderer can process them. In that case the
-// renderer may postpone processing status changes, but still must process all
-// of them eventually.
-TEST_F(RendererImplTest, PostponedStreamStatusNotificationHandling) {
- CreateAudioAndVideoStream();
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, VideoTrackDisableThenEnable) {
+ InitializeWithAudioAndVideo();
Play();
+ Mock::VerifyAndClearExpectations(&time_source_);
- EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH))
- .Times(2);
-
- EXPECT_CALL(time_source_, StopTicking()).Times(2);
- EXPECT_CALL(time_source_, StartTicking()).Times(2);
- EXPECT_CALL(*audio_renderer_, Flush(_)).Times(2);
- EXPECT_CALL(*audio_renderer_, StartPlaying()).Times(2);
- // The first stream status change will be processed immediately. Each status
- // change processing involves Flush + StartPlaying when the Flush is done. The
- // Flush operation is async in this case, so the second status change will be
- // postponed by renderer until after processing the first one is finished. But
- // we must still get two pairs of Flush/StartPlaying calls eventually.
- stream_status_change_cb.Run(audio_stream_.get(), false, base::TimeDelta());
- stream_status_change_cb.Run(audio_stream_.get(), true, base::TimeDelta());
- base::RunLoop().RunUntilIdle();
+ base::RunLoop disable_wait;
+ SetVideoTrackSwitchExpectations();
+ renderer_impl_->OnSelectedVideoTracksChanged({}, disable_wait.QuitClosure());
+ disable_wait.Run();
+
+ base::RunLoop enable_wait;
+ SetVideoTrackSwitchExpectations();
+ renderer_impl_->OnSelectedVideoTracksChanged({streams_[1]},
+ enable_wait.QuitClosure());
+ enable_wait.Run();
- EXPECT_CALL(*video_renderer_, Flush(_)).Times(2);
- EXPECT_CALL(*video_renderer_, StartPlayingFrom(base::TimeDelta())).Times(2);
- // The first stream status change will be processed immediately. Each status
- // change processing involves Flush + StartPlaying when the Flush is done. The
- // Flush operation is async in this case, so the second status change will be
- // postponed by renderer until after processing the first one is finished. But
- // we must still get two pairs of Flush/StartPlaying calls eventually.
- stream_status_change_cb.Run(video_stream_.get(), false, base::TimeDelta());
- stream_status_change_cb.Run(video_stream_.get(), true, base::TimeDelta());
base::RunLoop().RunUntilIdle();
}
-// Verify that a RendererImpl::Flush gets postponed until after stream status
-// change handling is completed.
-TEST_F(RendererImplTest, FlushDuringAudioReinit) {
- CreateAudioAndVideoStream();
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, AudioUnderflowDuringAudioTrackChange) {
+ InitializeWithAudioAndVideo();
Play();
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
+ base::RunLoop loop;
+
+ // Underflow should occur immediately with a single audio track.
+ EXPECT_CALL(time_source_, StopTicking());
+
+ // Capture the callback from the audio renderer flush.
base::Closure audio_renderer_flush_cb;
EXPECT_CALL(*audio_renderer_, Flush(_))
.WillOnce(SaveArg<0>(&audio_renderer_flush_cb));
- EXPECT_CALL(*audio_renderer_, StartPlaying());
- // This should start flushing the audio renderer (due to audio stream status
- // change) and should populate the |audio_renderer_flush_cb|.
- stream_status_change_cb.Run(audio_stream_.get(), false, base::TimeDelta());
- EXPECT_TRUE(audio_renderer_flush_cb);
- base::RunLoop().RunUntilIdle();
+ EXPECT_CALL(time_source_, CurrentMediaTime()).Times(2);
+ std::vector<DemuxerStream*> tracks;
+ renderer_impl_->OnEnabledAudioTracksChanged({}, loop.QuitClosure());
- bool flush_done = false;
-
- // Now that audio stream change is being handled the RendererImpl::Flush
- // should be postponed, instead of being executed immediately.
- EXPECT_CALL(callbacks_, OnFlushed()).WillOnce(SetBool(&flush_done, true));
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(flush_done);
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
- // The renderer_impl_->Flush invoked above should proceed after the first
- // audio renderer flush (initiated by the stream status change) completes.
- SetFlushExpectationsForAVRenderers();
+ EXPECT_CALL(time_source_, StartTicking());
+ EXPECT_CALL(*audio_renderer_, StartPlaying());
+ audio_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
audio_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(flush_done);
+ loop.Run();
}
-// Verify that RendererImpl::Flush is not postponed by waiting for data.
-TEST_F(RendererImplTest, FlushDuringAudioReinitWhileWaiting) {
- CreateAudioAndVideoStream();
+TEST_F(RendererImplTest, VideoUnderflowDuringVideoTrackChange) {
+ InitializeWithAudioAndVideo();
+ Play();
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+ base::RunLoop loop;
+
+ // Capture the callback from the video renderer flush.
+ base::Closure video_renderer_flush_cb;
+ {
+ InSequence track_switch_seq;
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+ EXPECT_CALL(*video_renderer_, Flush(_))
+ .WillOnce(SaveArg<0>(&video_renderer_flush_cb));
+ EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH));
+ }
+
+ renderer_impl_->OnSelectedVideoTracksChanged({}, loop.QuitClosure());
+
+ video_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
+ video_renderer_flush_cb.Run();
+ loop.Run();
+}
+
+TEST_F(RendererImplTest, VideoUnderflowDuringAudioTrackChange) {
+ InitializeWithAudioAndVideo();
Play();
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
- base::Closure audio_renderer_flush_cb;
+ base::RunLoop loop;
- // Override the standard buffering state transitions for Flush() and
- // StartPlaying() setup above.
+ // Capture the callback from the audio renderer flush.
+ base::Closure audio_renderer_flush_cb;
EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(DoAll(
- SetBufferingState(&audio_renderer_client_, BUFFERING_HAVE_NOTHING),
- SaveArg<0>(&audio_renderer_flush_cb)));
- EXPECT_CALL(*audio_renderer_, StartPlaying())
- .WillOnce(RunClosure(base::DoNothing::Repeatedly()));
-
- // This should start flushing the audio renderer (due to audio stream status
- // change) and should populate the |audio_renderer_flush_cb|.
- stream_status_change_cb.Run(audio_stream_.get(), false, base::TimeDelta());
- EXPECT_TRUE(audio_renderer_flush_cb);
- base::RunLoop().RunUntilIdle();
+ .WillOnce(SaveArg<0>(&audio_renderer_flush_cb));
- bool flush_done = false;
+ EXPECT_CALL(time_source_, CurrentMediaTime()).Times(2);
+ EXPECT_CALL(time_source_, StopTicking());
+ renderer_impl_->OnEnabledAudioTracksChanged({}, loop.QuitClosure());
- // Complete the first half of the track change, it should be stuck in the
- // StartPlaying() state after this.
- EXPECT_CALL(callbacks_, OnFlushed()).WillOnce(SetBool(&flush_done, true));
+ EXPECT_CALL(*audio_renderer_, StartPlaying());
+ video_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
audio_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(flush_done);
-
- // Ensure that even though we're stuck waiting for have_enough from the
- // audio renderer, that our flush still executes immediately.
- SetFlushExpectationsForAVRenderers();
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(flush_done);
+ loop.Run();
}
-TEST_F(RendererImplTest, FlushDuringVideoReinit) {
- CreateAudioAndVideoStream();
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, AudioUnderflowDuringVideoTrackChange) {
+ InitializeWithAudioAndVideo();
Play();
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
+ base::RunLoop loop;
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING));
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+
+ // Capture the callback from the audio renderer flush.
base::Closure video_renderer_flush_cb;
EXPECT_CALL(*video_renderer_, Flush(_))
.WillOnce(SaveArg<0>(&video_renderer_flush_cb));
- EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
- // This should start flushing the video renderer (due to video stream status
- // change) and should populate the |video_renderer_flush_cb|.
- stream_status_change_cb.Run(video_stream_.get(), false, base::TimeDelta());
- EXPECT_TRUE(video_renderer_flush_cb);
- base::RunLoop().RunUntilIdle();
+ renderer_impl_->OnSelectedVideoTracksChanged({}, loop.QuitClosure());
- bool flush_done = false;
+ EXPECT_CALL(time_source_, StopTicking());
+ EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
- // Now that video stream change is being handled the RendererImpl::Flush
- // should be postponed, instead of being executed immediately.
- EXPECT_CALL(callbacks_, OnFlushed()).WillOnce(SetBool(&flush_done, true));
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(flush_done);
+ audio_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
- // The renderer_impl_->Flush invoked above should proceed after the first
- // video renderer flush (initiated by the stream status change) completes.
- SetFlushExpectationsForAVRenderers();
video_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(flush_done);
+ loop.Run();
}
-// Verify that RendererImpl::Flush is not postponed by waiting for data.
-TEST_F(RendererImplTest, FlushDuringVideoReinitWhileWaiting) {
- CreateAudioAndVideoStream();
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, VideoResumedFromUnderflowDuringAudioTrackChange) {
+ InitializeWithAudioAndVideo();
Play();
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
- base::Closure video_renderer_flush_cb;
-
- // Override the standard buffering state transitions for Flush() and
- // StartPlaying() setup above.
- EXPECT_CALL(*video_renderer_, Flush(_))
- .WillOnce(DoAll(
- SetBufferingState(&video_renderer_client_, BUFFERING_HAVE_NOTHING),
- SaveArg<0>(&video_renderer_flush_cb)));
- EXPECT_CALL(*video_renderer_, StartPlayingFrom(_))
- .WillOnce(RunClosure(base::DoNothing::Repeatedly()));
-
- // This should start flushing the video renderer (due to video stream status
- // change) and should populate the |video_renderer_flush_cb|.
- stream_status_change_cb.Run(video_stream_.get(), false, base::TimeDelta());
- EXPECT_TRUE(video_renderer_flush_cb);
- base::RunLoop().RunUntilIdle();
+ // Underflow the renderer.
+ base::RunLoop underflow_wait;
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING))
+ .WillOnce(RunClosure(underflow_wait.QuitClosure()));
+ EXPECT_CALL(time_source_, StopTicking());
+ video_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
+ underflow_wait.Run();
- bool flush_done = false;
+ // Start a track change.
+ base::Closure audio_renderer_flush_cb;
+ base::RunLoop track_change;
+ {
+ InSequence track_switch_seq;
+ EXPECT_CALL(time_source_, CurrentMediaTime()).Times(2);
+ EXPECT_CALL(*audio_renderer_, Flush(_))
+ .WillOnce(SaveArg<0>(&audio_renderer_flush_cb));
+ }
+ renderer_impl_->OnEnabledAudioTracksChanged({}, track_change.QuitClosure());
- // Complete the first half of the track change, it should be stuck in the
- // StartPlaying() state after this.
- EXPECT_CALL(callbacks_, OnFlushed()).WillOnce(SetBool(&flush_done, true));
- video_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(flush_done);
+ // Signal that the renderer has enough data to resume from underflow.
+ // Nothing should bubble up, since we are pending audio track change.
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(_)).Times(0);
+ EXPECT_CALL(time_source_, StartTicking()).Times(0);
+ video_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_ENOUGH);
- // Ensure that even though we're stuck waiting for have_enough from the
- // video renderer, that our flush still executes immediately.
- SetFlushExpectationsForAVRenderers();
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(flush_done);
+ // Finish the track change.
+ EXPECT_CALL(*audio_renderer_, StartPlaying());
+ audio_renderer_flush_cb.Run();
+ track_change.Run();
}
-// Test audio track switching when the RendererImpl is in STATE_FLUSHING/FLUSHED
-TEST_F(RendererImplTest, AudioTrackSwitchDuringFlush) {
- CreateAudioAndVideoStream();
- std::unique_ptr<StrictMock<MockDemuxerStream>> primary_audio_stream =
- std::move(audio_stream_);
- CreateAudioStream();
- std::unique_ptr<StrictMock<MockDemuxerStream>> secondary_audio_stream =
- std::move(audio_stream_);
- audio_stream_ = std::move(primary_audio_stream);
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
+TEST_F(RendererImplTest, AudioResumedFromUnderflowDuringVideoTrackChange) {
+ InitializeWithAudioAndVideo();
Play();
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
- EXPECT_CALL(*video_renderer_, Flush(_));
-
- // Initiate RendererImpl::Flush, but postpone its completion by not calling
- // audio renderer flush callback right away, i.e. pretending audio renderer
- // flush takes a while.
- base::Closure audio_renderer_flush_cb;
- EXPECT_CALL(*audio_renderer_, Flush(_))
- .WillOnce(SaveArg<0>(&audio_renderer_flush_cb));
- EXPECT_CALL(callbacks_, OnFlushed());
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(audio_renderer_flush_cb);
-
- // Now, while the RendererImpl::Flush is pending, perform an audio track
- // switch. The handling of the track switch will be postponed until after
- // RendererImpl::Flush completes.
- stream_status_change_cb.Run(audio_stream_.get(), false, base::TimeDelta());
- stream_status_change_cb.Run(secondary_audio_stream.get(), true,
- base::TimeDelta());
-
- // Ensure that audio track switch occurs after Flush by verifying that the
- // audio renderer is reinitialized with the secondary audio stream.
- EXPECT_CALL(*audio_renderer_,
- Initialize(secondary_audio_stream.get(), _, _, _));
-
- // Complete the audio renderer flush, thus completing the renderer_impl_ Flush
- // initiated above. This will transition the RendererImpl into the FLUSHED
- // state and will process pending track switch, which should result in the
- // reinitialization of the audio renderer for the secondary audio stream.
+ // Underflow the renderer.
+ base::RunLoop underflow_wait;
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(BUFFERING_HAVE_NOTHING))
+ .WillOnce(RunClosure(underflow_wait.QuitClosure()));
+ EXPECT_CALL(time_source_, StopTicking());
audio_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
- audio_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
-}
+ underflow_wait.Run();
-// Test video track switching when the RendererImpl is in STATE_FLUSHING/FLUSHED
-TEST_F(RendererImplTest, VideoTrackSwitchDuringFlush) {
- CreateAudioAndVideoStream();
- std::unique_ptr<StrictMock<MockDemuxerStream>> primary_video_stream =
- std::move(video_stream_);
- CreateVideoStream();
- std::unique_ptr<StrictMock<MockDemuxerStream>> secondary_video_stream =
- std::move(video_stream_);
- video_stream_ = std::move(primary_video_stream);
-
- StreamStatusChangeCB stream_status_change_cb;
- EXPECT_CALL(*demuxer_, SetStreamStatusChangeCB(_))
- .WillOnce(SaveArg<0>(&stream_status_change_cb));
- SetAudioRendererInitializeExpectations(PIPELINE_OK);
- SetVideoRendererInitializeExpectations(PIPELINE_OK);
- InitializeAndExpect(PIPELINE_OK);
- Play();
+ // Start a track change.
+ base::Closure video_renderer_flush_cb;
+ base::RunLoop track_change;
+ {
+ InSequence track_switch_seq;
+ EXPECT_CALL(time_source_, CurrentMediaTime());
+ EXPECT_CALL(*video_renderer_, Flush(_))
+ .WillOnce(SaveArg<0>(&video_renderer_flush_cb));
+ }
+ renderer_impl_->OnSelectedVideoTracksChanged({}, track_change.QuitClosure());
- EXPECT_CALL(time_source_, StopTicking()).Times(testing::AnyNumber());
- EXPECT_CALL(*video_renderer_, OnTimeStopped()).Times(testing::AnyNumber());
- EXPECT_CALL(*audio_renderer_, Flush(_));
+ // Signal that the renderer has enough data to resume from underflow.
+ // Nothing should bubble up, since we are pending audio track change.
+ EXPECT_CALL(callbacks_, OnBufferingStateChange(_)).Times(0);
+ EXPECT_CALL(time_source_, StartTicking()).Times(0);
+ audio_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_ENOUGH);
- // Initiate RendererImpl::Flush, but postpone its completion by not calling
- // video renderer flush callback right away, i.e. pretending video renderer
- // flush takes a while.
- base::Closure video_renderer_flush_cb;
- EXPECT_CALL(*video_renderer_, Flush(_))
- .WillOnce(SaveArg<0>(&video_renderer_flush_cb));
- EXPECT_CALL(callbacks_, OnFlushed());
- renderer_impl_->Flush(
- base::Bind(&CallbackHelper::OnFlushed, base::Unretained(&callbacks_)));
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(video_renderer_flush_cb);
-
- // Now, while the RendererImpl::Flush is pending, perform a video track
- // switch. The handling of the track switch will be postponed until after
- // RendererImpl::Flush completes.
- stream_status_change_cb.Run(video_stream_.get(), false, base::TimeDelta());
- stream_status_change_cb.Run(secondary_video_stream.get(), true,
- base::TimeDelta());
-
- // Ensure that video track switch occurs after Flush by verifying that the
- // video renderer is reinitialized with the secondary video stream.
- EXPECT_CALL(*video_renderer_,
- Initialize(secondary_video_stream.get(), _, _, _, _));
-
- // Complete the video renderer flush, thus completing the renderer_impl_ Flush
- // initiated above. This will transition the RendererImpl into the FLUSHED
- // state and will process pending track switch, which should result in the
- // reinitialization of the video renderer for the secondary video stream.
- video_renderer_client_->OnBufferingStateChange(BUFFERING_HAVE_NOTHING);
+ // Finish the track change.
+ EXPECT_CALL(*video_renderer_, StartPlayingFrom(_));
video_renderer_flush_cb.Run();
- base::RunLoop().RunUntilIdle();
+ track_change.Run();
}
} // namespace media
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index 443ea3045e0..42598c8427e 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -223,6 +223,7 @@ void VideoRendererImpl::Initialize(
DCHECK(!time_progressing_);
video_frame_stream_.reset(new VideoFrameStream(
+ std::make_unique<VideoFrameStream::StreamTraits>(media_log_),
task_runner_, create_video_decoders_cb_, media_log_));
video_frame_stream_->set_config_change_observer(base::Bind(
&VideoRendererImpl::OnConfigChange, weak_factory_.GetWeakPtr()));
diff --git a/chromium/media/renderers/video_renderer_impl_unittest.cc b/chromium/media/renderers/video_renderer_impl_unittest.cc
index 926d3b31e67..ce74ddd075f 100644
--- a/chromium/media/renderers/video_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/video_renderer_impl_unittest.cc
@@ -15,6 +15,7 @@
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_loop_current.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
@@ -102,7 +103,7 @@ class VideoRendererImplTest : public testing::Test {
scoped_refptr<DecoderBuffer>(new DecoderBuffer(0))));
}
- virtual ~VideoRendererImplTest() = default;
+ ~VideoRendererImplTest() override = default;
void Initialize() {
InitializeWithLowDelay(false);
@@ -280,13 +281,13 @@ class VideoRendererImplTest : public testing::Test {
}
void AdvanceWallclockTimeInMs(int time_ms) {
- DCHECK_EQ(&message_loop_, base::MessageLoop::current());
+ DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
base::AutoLock l(lock_);
tick_clock_.Advance(base::TimeDelta::FromMilliseconds(time_ms));
}
void AdvanceTimeInMs(int time_ms) {
- DCHECK_EQ(&message_loop_, base::MessageLoop::current());
+ DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
base::AutoLock l(lock_);
time_ += base::TimeDelta::FromMilliseconds(time_ms);
time_source_.StopTicking();
@@ -470,7 +471,7 @@ class VideoRendererImplTest : public testing::Test {
private:
void DecodeRequested(scoped_refptr<DecoderBuffer> buffer,
const VideoDecoder::DecodeCB& decode_cb) {
- DCHECK_EQ(&message_loop_, base::MessageLoop::current());
+ DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
CHECK(decode_cb_.is_null());
decode_cb_ = decode_cb;
@@ -488,7 +489,7 @@ class VideoRendererImplTest : public testing::Test {
}
void FlushRequested(const base::Closure& callback) {
- DCHECK_EQ(&message_loop_, base::MessageLoop::current());
+ DCHECK_EQ(&message_loop_, base::MessageLoopCurrent::Get());
decode_results_.clear();
if (!decode_cb_.is_null()) {
QueueFrames("abort");
diff --git a/chromium/media/test/BUILD.gn b/chromium/media/test/BUILD.gn
index 4badd3dc056..2e6bdabd49e 100644
--- a/chromium/media/test/BUILD.gn
+++ b/chromium/media/test/BUILD.gn
@@ -44,7 +44,7 @@ source_set("pipeline_integration_test_base") {
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
"//url",
]
}
@@ -69,7 +69,7 @@ source_set("pipeline_integration_tests") {
"//media:test_support",
"//media/mojo/clients",
"//services/service_manager/public/cpp:service_test_support",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
# Needed for the opus_config
"//third_party/opus",
@@ -127,7 +127,7 @@ source_set("mojo_pipeline_integration_tests") {
"//media/mojo/services",
"//services/service_manager/public/cpp:service_test_support",
"//testing/gtest",
- "//third_party/libaom:av1_features",
+ "//third_party/libaom:av1_buildflags",
"//ui/gfx:test_support",
"//ui/gfx/geometry",
@@ -161,6 +161,8 @@ pipeline_integration_fuzzer_variants = [
"WEBM_VP8",
"WEBM_VP9",
"WEBM_OPUS_VP9",
+ "MP4_FLAC",
+ "MP3",
# See below for additional variants depending on build configuration.
]
@@ -168,7 +170,6 @@ pipeline_integration_fuzzer_variants = [
if (proprietary_codecs) {
pipeline_integration_fuzzer_variants += [
"ADTS",
- "MP3",
"MP4_AACLC",
"MP4_AACSBR",
@@ -176,7 +177,6 @@ if (proprietary_codecs) {
# SourceBufferState::Init differentiate kinds of AVC, we use "AVC1" here to
# retain corpus associated with this fuzzer target name.
"MP4_AVC1",
- "MP4_FLAC",
"MP4_AACLC_AVC",
]
if (enable_mse_mpeg2ts_stream_parser) {
diff --git a/chromium/media/video/BUILD.gn b/chromium/media/video/BUILD.gn
index 863d98ed56b..fb74e4154e5 100644
--- a/chromium/media/video/BUILD.gn
+++ b/chromium/media/video/BUILD.gn
@@ -88,6 +88,7 @@ static_library("test_support") {
"//gpu/command_buffer/client:gles2_interface",
"//gpu/command_buffer/common",
"//media/base:test_support",
+ "//services/ui/public/cpp/gpu",
"//testing/gmock",
"//ui/gfx",
]
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index 688ec3a7279..2bcbac3231c 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -16,6 +16,7 @@
#include "base/barrier_closure.h"
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/containers/stack_container.h"
#include "base/location.h"
@@ -31,6 +32,7 @@
#include "media/video/gpu_video_accelerator_factories.h"
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/buffer_format_util.h"
+#include "ui/gfx/color_space.h"
#include "ui/gl/trace_util.h"
namespace media {
@@ -360,27 +362,29 @@ void CopyRowsToI420Buffer(int first_row,
uint8_t* output,
int dest_stride,
base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
TRACE_EVENT2("media", "CopyRowsToI420Buffer", "bytes_per_row", bytes_per_row,
"rows", rows);
- if (output) {
- DCHECK_NE(dest_stride, 0);
- DCHECK_LE(bytes_per_row, std::abs(dest_stride));
- DCHECK_LE(bytes_per_row, source_stride);
- DCHECK_GE(bit_depth, 8u);
-
- if (bit_depth == 8) {
- libyuv::CopyPlane(source + source_stride * first_row, source_stride,
- output + dest_stride * first_row, dest_stride,
- bytes_per_row, rows);
- } else {
- const int scale = 0x10000 >> (bit_depth - 8);
- libyuv::Convert16To8Plane(
- reinterpret_cast<const uint16*>(source + source_stride * first_row),
- source_stride / 2, output + dest_stride * first_row, dest_stride,
- scale, bytes_per_row, rows);
- }
+
+ if (!output)
+ return;
+
+ DCHECK_NE(dest_stride, 0);
+ DCHECK_LE(bytes_per_row, std::abs(dest_stride));
+ DCHECK_LE(bytes_per_row, source_stride);
+ DCHECK_GE(bit_depth, 8u);
+
+ if (bit_depth == 8) {
+ libyuv::CopyPlane(source + source_stride * first_row, source_stride,
+ output + dest_stride * first_row, dest_stride,
+ bytes_per_row, rows);
+ } else {
+ const int scale = 0x10000 >> (bit_depth - 8);
+ libyuv::Convert16To8Plane(
+ reinterpret_cast<const uint16*>(source + source_stride * first_row),
+ source_stride / 2, output + dest_stride * first_row, dest_stride, scale,
+ bytes_per_row, rows);
}
- std::move(done).Run();
}
void CopyRowsToNV12Buffer(int first_row,
@@ -392,30 +396,32 @@ void CopyRowsToNV12Buffer(int first_row,
uint8_t* dest_uv,
int dest_stride_uv,
base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
TRACE_EVENT2("media", "CopyRowsToNV12Buffer", "bytes_per_row", bytes_per_row,
"rows", rows);
- if (dest_y && dest_uv) {
- DCHECK_NE(dest_stride_y, 0);
- DCHECK_NE(dest_stride_uv, 0);
- DCHECK_LE(bytes_per_row, std::abs(dest_stride_y));
- DCHECK_LE(bytes_per_row, std::abs(dest_stride_uv));
- DCHECK_EQ(0, first_row % 2);
-
- libyuv::I420ToNV12(
- source_frame->visible_data(VideoFrame::kYPlane) +
- first_row * source_frame->stride(VideoFrame::kYPlane),
- source_frame->stride(VideoFrame::kYPlane),
- source_frame->visible_data(VideoFrame::kUPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kUPlane),
- source_frame->stride(VideoFrame::kUPlane),
- source_frame->visible_data(VideoFrame::kVPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kVPlane),
- source_frame->stride(VideoFrame::kVPlane),
- dest_y + first_row * dest_stride_y, dest_stride_y,
- dest_uv + first_row / 2 * dest_stride_uv, dest_stride_uv, bytes_per_row,
- rows);
- }
- std::move(done).Run();
+
+ if (!dest_y || !dest_uv)
+ return;
+
+ DCHECK_NE(dest_stride_y, 0);
+ DCHECK_NE(dest_stride_uv, 0);
+ DCHECK_LE(bytes_per_row, std::abs(dest_stride_y));
+ DCHECK_LE(bytes_per_row, std::abs(dest_stride_uv));
+ DCHECK_EQ(0, first_row % 2);
+
+ libyuv::I420ToNV12(
+ source_frame->visible_data(VideoFrame::kYPlane) +
+ first_row * source_frame->stride(VideoFrame::kYPlane),
+ source_frame->stride(VideoFrame::kYPlane),
+ source_frame->visible_data(VideoFrame::kUPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kUPlane),
+ source_frame->stride(VideoFrame::kUPlane),
+ source_frame->visible_data(VideoFrame::kVPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kVPlane),
+ source_frame->stride(VideoFrame::kVPlane),
+ dest_y + first_row * dest_stride_y, dest_stride_y,
+ dest_uv + first_row / 2 * dest_stride_uv, dest_stride_uv, bytes_per_row,
+ rows);
}
void CopyRowsToUYVYBuffer(int first_row,
@@ -425,25 +431,27 @@ void CopyRowsToUYVYBuffer(int first_row,
uint8_t* output,
int dest_stride,
base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
TRACE_EVENT2("media", "CopyRowsToUYVYBuffer", "bytes_per_row", width * 2,
"rows", rows);
- if (output) {
- DCHECK_NE(dest_stride, 0);
- DCHECK_LE(width, std::abs(dest_stride / 2));
- DCHECK_EQ(0, first_row % 2);
- libyuv::I420ToUYVY(
- source_frame->visible_data(VideoFrame::kYPlane) +
- first_row * source_frame->stride(VideoFrame::kYPlane),
- source_frame->stride(VideoFrame::kYPlane),
- source_frame->visible_data(VideoFrame::kUPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kUPlane),
- source_frame->stride(VideoFrame::kUPlane),
- source_frame->visible_data(VideoFrame::kVPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kVPlane),
- source_frame->stride(VideoFrame::kVPlane),
- output + first_row * dest_stride, dest_stride, width, rows);
- }
- std::move(done).Run();
+
+ if (!output)
+ return;
+
+ DCHECK_NE(dest_stride, 0);
+ DCHECK_LE(width, std::abs(dest_stride / 2));
+ DCHECK_EQ(0, first_row % 2);
+ libyuv::I420ToUYVY(
+ source_frame->visible_data(VideoFrame::kYPlane) +
+ first_row * source_frame->stride(VideoFrame::kYPlane),
+ source_frame->stride(VideoFrame::kYPlane),
+ source_frame->visible_data(VideoFrame::kUPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kUPlane),
+ source_frame->stride(VideoFrame::kUPlane),
+ source_frame->visible_data(VideoFrame::kVPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kVPlane),
+ source_frame->stride(VideoFrame::kVPlane),
+ output + first_row * dest_stride, dest_stride, width, rows);
}
void CopyRowsToRGB10Buffer(bool is_argb,
@@ -454,60 +462,54 @@ void CopyRowsToRGB10Buffer(bool is_argb,
uint8_t* output,
int dest_stride,
base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
TRACE_EVENT2("media", "CopyRowsToXR30Buffer", "bytes_per_row", width * 2,
"rows", rows);
- if (output) {
- DCHECK_NE(dest_stride, 0);
- DCHECK_LE(width, std::abs(dest_stride / 2));
- DCHECK_EQ(0, first_row % 2);
-
- int color_space = COLOR_SPACE_UNSPECIFIED;
- if (source_frame->metadata()->GetInteger(VideoFrameMetadata::COLOR_SPACE,
- &color_space)) {
- color_space = COLOR_SPACE_UNSPECIFIED;
+ if (!output)
+ return;
+
+ DCHECK_NE(dest_stride, 0);
+ DCHECK_LE(width, std::abs(dest_stride / 2));
+ DCHECK_EQ(0, first_row % 2);
+
+ const uint16_t* y_plane = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kYPlane) +
+ first_row * source_frame->stride(VideoFrame::kYPlane));
+ const size_t y_plane_stride = source_frame->stride(VideoFrame::kYPlane) / 2;
+ const uint16_t* v_plane = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kVPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kVPlane));
+ const size_t v_plane_stride = source_frame->stride(VideoFrame::kVPlane) / 2;
+ const uint16_t* u_plane = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kUPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kUPlane));
+ const size_t u_plane_stride = source_frame->stride(VideoFrame::kUPlane) / 2;
+ uint8_t* dest_rgb10 = output + first_row * dest_stride;
+
+ SkYUVColorSpace skyuv = kRec709_SkYUVColorSpace;
+ source_frame->ColorSpace().ToSkYUVColorSpace(&skyuv);
+
+ if (skyuv == kRec601_SkYUVColorSpace) {
+ if (is_argb) {
+ libyuv::I010ToAR30(y_plane, y_plane_stride, u_plane, u_plane_stride,
+ v_plane, v_plane_stride, dest_rgb10, dest_stride,
+ width, rows);
+ } else {
+ libyuv::I010ToAB30(y_plane, y_plane_stride, u_plane, u_plane_stride,
+ v_plane, v_plane_stride, dest_rgb10, dest_stride,
+ width, rows);
}
- const uint16_t* y_plane = reinterpret_cast<const uint16_t*>(
- source_frame->visible_data(VideoFrame::kYPlane) +
- first_row * source_frame->stride(VideoFrame::kYPlane));
- const size_t y_plane_stride = source_frame->stride(VideoFrame::kYPlane) / 2;
- const uint16_t* v_plane = reinterpret_cast<const uint16_t*>(
- source_frame->visible_data(VideoFrame::kVPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kVPlane));
- const size_t v_plane_stride = source_frame->stride(VideoFrame::kVPlane) / 2;
- const uint16_t* u_plane = reinterpret_cast<const uint16_t*>(
- source_frame->visible_data(VideoFrame::kUPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kUPlane));
- const size_t u_plane_stride = source_frame->stride(VideoFrame::kUPlane) / 2;
- uint8_t* dest_ar30 = output + first_row * dest_stride;
-
- switch (color_space) {
- case COLOR_SPACE_HD_REC709:
- if (is_argb) {
- libyuv::H010ToAR30(y_plane, y_plane_stride, u_plane, u_plane_stride,
- v_plane, v_plane_stride, dest_ar30, dest_stride,
- width, rows);
- } else {
- libyuv::H010ToAB30(y_plane, y_plane_stride, u_plane, u_plane_stride,
- v_plane, v_plane_stride, dest_ar30, dest_stride,
- width, rows);
- }
- break;
- case COLOR_SPACE_UNSPECIFIED:
- case COLOR_SPACE_JPEG:
- case COLOR_SPACE_SD_REC601:
- if (is_argb) {
- libyuv::I010ToAR30(y_plane, y_plane_stride, u_plane, u_plane_stride,
- v_plane, v_plane_stride, dest_ar30, dest_stride,
- width, rows);
- } else {
- libyuv::I010ToAB30(y_plane, y_plane_stride, u_plane, u_plane_stride,
- v_plane, v_plane_stride, dest_ar30, dest_stride,
- width, rows);
- }
- break;
+ } else {
+ if (is_argb) {
+ libyuv::H010ToAR30(y_plane, y_plane_stride, u_plane, u_plane_stride,
+ v_plane, v_plane_stride, dest_rgb10, dest_stride,
+ width, rows);
+ } else {
+ libyuv::H010ToAB30(y_plane, y_plane_stride, u_plane, u_plane_stride,
+ v_plane, v_plane_stride, dest_rgb10, dest_stride,
+ width, rows);
}
}
- std::move(done).Run();
}
gfx::Size CodedSize(const scoped_refptr<VideoFrame>& video_frame,
@@ -901,7 +903,13 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
#endif
break;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
+ case GpuVideoAcceleratorFactories::OutputFormat::XB30:
allow_overlay = true;
+ // We've converted the YUV to RGB, fix the color space.
+ // TODO(hubbe): The libyuv YUV to RGB conversion may not have
+ // honored the color space conversion 100%. We should either fix
+ // libyuv or find a way for later passes to make up the difference.
+ frame->set_color_space(video_frame->ColorSpace().GetAsRGB());
break;
default:
break;
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index 026c5918bec..ca6af30c90e 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -128,6 +128,15 @@ class GpuMemoryBufferVideoFramePoolTest : public ::testing::Test {
return video_frame;
}
+ // Note, the X portion is set to 1 since it may use ARGB instead of
+ // XRGB on some platforms.
+ uint32_t as_xr30(uint32_t r, uint32_t g, uint32_t b) {
+ return (3 << 30) | (r << 20) | (g << 10) | b;
+ }
+ uint32_t as_xb30(uint32_t r, uint32_t g, uint32_t b) {
+ return (3 << 30) | (b << 20) | (g << 10) | r;
+ }
+
protected:
base::SimpleTestTickClock test_clock_;
std::unique_ptr<MockGpuVideoAcceleratorFactories> mock_gpu_factories_;
@@ -331,6 +340,62 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
EXPECT_EQ(1u, gles2_->gen_textures_count());
EXPECT_TRUE(frame->metadata()->IsTrue(
media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+
+ EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
+ mock_gpu_factories_->created_memory_buffers()[0]->Map();
+
+ void* memory = mock_gpu_factories_->created_memory_buffers()[0]->memory(0);
+ EXPECT_EQ(as_xr30(0, 311, 0), *static_cast<uint32_t*>(memory));
+}
+
+TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT709) {
+ scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10, 10);
+ software_frame->set_color_space(gfx::ColorSpace::CreateREC709());
+ scoped_refptr<VideoFrame> frame;
+ mock_gpu_factories_->SetVideoFrameOutputFormat(
+ media::GpuVideoAcceleratorFactories::OutputFormat::XR30);
+ gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
+ software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
+
+ RunUntilIdle();
+
+ EXPECT_NE(software_frame.get(), frame.get());
+ EXPECT_EQ(PIXEL_FORMAT_ARGB, frame->format());
+ EXPECT_EQ(1u, frame->NumTextures());
+ EXPECT_EQ(1u, gles2_->gen_textures_count());
+ EXPECT_TRUE(frame->metadata()->IsTrue(
+ media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+
+ EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
+ mock_gpu_factories_->created_memory_buffers()[0]->Map();
+
+ void* memory = mock_gpu_factories_->created_memory_buffers()[0]->memory(0);
+ EXPECT_EQ(as_xr30(0, 311, 0), *static_cast<uint32_t*>(memory));
+}
+
+TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT601) {
+ scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10, 10);
+ software_frame->set_color_space(gfx::ColorSpace::CreateREC601());
+ scoped_refptr<VideoFrame> frame;
+ mock_gpu_factories_->SetVideoFrameOutputFormat(
+ media::GpuVideoAcceleratorFactories::OutputFormat::XR30);
+ gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
+ software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
+
+ RunUntilIdle();
+
+ EXPECT_NE(software_frame.get(), frame.get());
+ EXPECT_EQ(PIXEL_FORMAT_ARGB, frame->format());
+ EXPECT_EQ(1u, frame->NumTextures());
+ EXPECT_EQ(1u, gles2_->gen_textures_count());
+ EXPECT_TRUE(frame->metadata()->IsTrue(
+ media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+
+ EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
+ mock_gpu_factories_->created_memory_buffers()[0]->Map();
+
+ void* memory = mock_gpu_factories_->created_memory_buffers()[0]->memory(0);
+ EXPECT_EQ(as_xr30(0, 543, 0), *static_cast<uint32_t*>(memory));
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXB30Frame) {
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index 3d8065c0688..1461b6f13a3 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -35,9 +35,9 @@ namespace gpu {
struct SyncToken;
};
-namespace viz {
-class ContextProvider;
-}
+namespace ui {
+class ContextProviderCommandBuffer;
+} // namespace ui
namespace media {
@@ -101,7 +101,9 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
gfx::BufferFormat format,
gfx::BufferUsage usage) = 0;
- virtual bool ShouldUseGpuMemoryBuffersForVideoFrames() const = 0;
+ // |for_media_stream| specifies webrtc use case of media streams.
+ virtual bool ShouldUseGpuMemoryBuffersForVideoFrames(
+ bool for_media_stream) const = 0;
// The GLContextLock must be taken when calling this.
virtual unsigned ImageTextureTarget(gfx::BufferFormat format) = 0;
@@ -132,7 +134,8 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual VideoEncodeAccelerator::SupportedProfiles
GetVideoEncodeAcceleratorSupportedProfiles() = 0;
- virtual viz::ContextProvider* GetMediaContextProvider() = 0;
+ virtual scoped_refptr<ui::ContextProviderCommandBuffer>
+ GetMediaContextProvider() = 0;
// Sets the current pipeline rendering color space.
virtual void SetRenderingColorSpace(const gfx::ColorSpace& color_space) = 0;
diff --git a/chromium/media/video/h264_parser.cc b/chromium/media/video/h264_parser.cc
index bc842576333..dc1c43f2332 100644
--- a/chromium/media/video/h264_parser.cc
+++ b/chromium/media/video/h264_parser.cc
@@ -16,6 +16,45 @@
namespace media {
+namespace {
+// Converts [|start|, |end|) range with |encrypted_ranges| into a vector of
+// SubsampleEntry. |encrypted_ranges| must be with in the range defined by
+// |start| and |end|.
+// It is OK to pass in empty |encrypted_ranges|; this will return a vector
+// with single SubsampleEntry with clear_bytes set to the size of the buffer.
+std::vector<SubsampleEntry> EncryptedRangesToSubsampleEntry(
+ const uint8_t* start,
+ const uint8_t* end,
+ const Ranges<const uint8_t*>& encrypted_ranges) {
+ std::vector<SubsampleEntry> subsamples;
+ const uint8_t* cur = start;
+ for (size_t i = 0; i < encrypted_ranges.size(); ++i) {
+ SubsampleEntry subsample = {};
+
+ const uint8_t* encrypted_start = encrypted_ranges.start(i);
+ DCHECK_GE(encrypted_start, cur)
+ << "Encrypted range started before the current buffer pointer.";
+ subsample.clear_bytes = encrypted_start - cur;
+
+ const uint8_t* encrypted_end = encrypted_ranges.end(i);
+ subsample.cypher_bytes = encrypted_end - encrypted_start;
+
+ subsamples.push_back(subsample);
+ cur = encrypted_end;
+ DCHECK_LE(cur, end) << "Encrypted range is outside the buffer range.";
+ }
+
+ // If there is more data in the buffer but not covered by encrypted_ranges,
+ // then it must be in the clear.
+ if (cur < end) {
+ SubsampleEntry subsample = {};
+ subsample.clear_bytes = end - cur;
+ subsamples.push_back(subsample);
+ }
+ return subsamples;
+}
+} // namespace
+
bool H264SliceHeader::IsPSlice() const {
return (slice_type % 5 == kPSlice);
}
@@ -221,6 +260,7 @@ void H264Parser::Reset() {
stream_ = NULL;
bytes_left_ = 0;
encrypted_ranges_.clear();
+ previous_nalu_range_.clear();
}
void H264Parser::SetStream(const uint8_t* stream, off_t stream_size) {
@@ -237,6 +277,7 @@ void H264Parser::SetEncryptedStream(
stream_ = stream;
bytes_left_ = stream_size;
+ previous_nalu_range_.clear();
encrypted_ranges_.clear();
const uint8_t* start = stream;
@@ -548,6 +589,8 @@ H264Parser::Result H264Parser::AdvanceToNextNALU(H264NALU* nalu) {
<< " size: " << nalu->size
<< " ref: " << static_cast<int>(nalu->nal_ref_idc);
+ previous_nalu_range_.clear();
+ previous_nalu_range_.Add(nalu->data, nalu->data + nalu->size);
return kOk;
}
@@ -1508,4 +1551,14 @@ H264Parser::Result H264Parser::ParseSEI(H264SEIMessage* sei_msg) {
return kOk;
}
+std::vector<SubsampleEntry> H264Parser::GetCurrentSubsamples() {
+ DCHECK_EQ(previous_nalu_range_.size(), 1u)
+ << "This should only be called after a "
+ "successful call to AdvanceToNextNalu()";
+
+ auto intersection = encrypted_ranges_.IntersectionWith(previous_nalu_range_);
+ return EncryptedRangesToSubsampleEntry(
+ previous_nalu_range_.start(0), previous_nalu_range_.end(0), intersection);
+}
+
} // namespace media
diff --git a/chromium/media/video/h264_parser.h b/chromium/media/video/h264_parser.h
index 021bf89c8fa..141a0c1833a 100644
--- a/chromium/media/video/h264_parser.h
+++ b/chromium/media/video/h264_parser.h
@@ -446,6 +446,12 @@ class MEDIA_EXPORT H264Parser {
// by the caller.
Result ParseSEI(H264SEIMessage* sei_msg);
+ // The return value of this method changes for every successful call to
+ // AdvanceToNextNALU().
+ // This returns the subsample information for the last NALU that was output
+ // from AdvanceToNextNALU().
+ std::vector<SubsampleEntry> GetCurrentSubsamples();
+
private:
// Move the stream pointer to the beginning of the next NALU,
// i.e. pointing at the next start code.
@@ -507,6 +513,10 @@ class MEDIA_EXPORT H264Parser {
// SetEncryptedStream().
Ranges<const uint8_t*> encrypted_ranges_;
+ // This contains the range of the previous NALU found in
+ // AdvanceToNextNalu(). Holds exactly one range.
+ Ranges<const uint8_t*> previous_nalu_range_;
+
DISALLOW_COPY_AND_ASSIGN(H264Parser);
};
diff --git a/chromium/media/video/h264_parser_unittest.cc b/chromium/media/video/h264_parser_unittest.cc
index 44688a0c9fb..7a67fe81e4d 100644
--- a/chromium/media/video/h264_parser_unittest.cc
+++ b/chromium/media/video/h264_parser_unittest.cc
@@ -10,7 +10,9 @@
#include "base/files/memory_mapped_file.h"
#include "base/logging.h"
#include "base/optional.h"
+#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
+#include "media/base/subsample_entry.h"
#include "media/base/test_data_util.h"
#include "media/video/h264_parser.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -167,4 +169,109 @@ TEST(H264ParserTest, ParseNALUsFromStreamFile) {
ASSERT_EQ(num_nalus, nalus.size());
}
+// Verify that GetCurrentSubsamples works.
+TEST(H264ParserTest, GetCurrentSubsamplesNormal) {
+ const uint8_t kStream[] = {
+ // First NALU.
+ // Clear bytes = 4.
+ 0x00, 0x00, 0x01, // start code.
+ 0x65, // Nalu type = 5, IDR slice.
+ // Below is bogus data.
+ // Encrypted bytes = 15.
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03,
+ 0x04, 0x05, 0x06,
+ // Clear bytes = 5.
+ 0x07, 0x00, 0x01, 0x02, 0x03,
+ // Encrypted until next NALU. Encrypted bytes = 20.
+ 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Note that this is still in the encrypted region but looks like a start
+ // code.
+ 0x00, 0x00, 0x01, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Second NALU. Completely clear.
+ // Clear bytes = 10.
+ 0x00, 0x00, 0x01, // start code.
+ 0x06, // nalu type = 6, SEI.
+ // Bogus data.
+ 0xff, 0xfe, 0xfd, 0xee, 0x12, 0x33,
+ };
+ std::vector<SubsampleEntry> subsamples;
+ subsamples.emplace_back(4u, 15u);
+ subsamples.emplace_back(5u, 20u);
+ subsamples.emplace_back(10u, 0u);
+ H264Parser parser;
+ parser.SetEncryptedStream(kStream, base::size(kStream), subsamples);
+
+ H264NALU nalu;
+ ASSERT_EQ(H264Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ auto nalu_subsamples = parser.GetCurrentSubsamples();
+ ASSERT_EQ(2u, nalu_subsamples.size());
+
+ // Note that nalu->data starts from the NALU header, i.e. does not include
+ // the start code.
+ EXPECT_EQ(1u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(15u, nalu_subsamples[0].cypher_bytes);
+ EXPECT_EQ(5u, nalu_subsamples[1].clear_bytes);
+ EXPECT_EQ(20u, nalu_subsamples[1].cypher_bytes);
+
+ // Make sure that it reached the next NALU.
+ EXPECT_EQ(H264Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ nalu_subsamples = parser.GetCurrentSubsamples();
+ ASSERT_EQ(1u, nalu_subsamples.size());
+
+ EXPECT_EQ(7u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[0].cypher_bytes);
+}
+
+// Verify that subsamples starting at non-NALU boundary also works.
+TEST(H264ParserTest, GetCurrentSubsamplesSubsampleNotStartingAtNaluBoundary) {
+ const uint8_t kStream[] = {
+ // First NALU.
+ // Clear bytes = 4.
+ 0x00, 0x00, 0x01, // start code.
+ 0x65, // Nalu type = 5, IDR slice.
+ // Below is bogus data.
+ // Encrypted bytes = 24.
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03,
+ 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Clear bytes = 18. The rest is in the clear. Note that this is not at
+ // a NALU boundary and a NALU starts below.
+ 0xaa, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Second NALU. Completely clear.
+ 0x00, 0x00, 0x01, // start code.
+ 0x06, // nalu type = 6, SEI.
+ // Bogus data.
+ 0xff, 0xfe, 0xfd, 0xee, 0x12, 0x33,
+ };
+
+ std::vector<SubsampleEntry> subsamples;
+ subsamples.emplace_back(4u, 24u);
+ subsamples.emplace_back(18, 0);
+ H264Parser parser;
+ parser.SetEncryptedStream(kStream, base::size(kStream), subsamples);
+
+ H264NALU nalu;
+ ASSERT_EQ(H264Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ auto nalu_subsamples = parser.GetCurrentSubsamples();
+ ASSERT_EQ(2u, nalu_subsamples.size());
+
+ // Note that nalu->data starts from the NALU header, i.e. does not include
+ // the start code.
+ EXPECT_EQ(1u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(24u, nalu_subsamples[0].cypher_bytes);
+
+ // The nalu ends with 8 more clear bytes. The last 10 bytes should be
+ // associated with the next nalu.
+ EXPECT_EQ(8u, nalu_subsamples[1].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[1].cypher_bytes);
+
+ ASSERT_EQ(H264Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ nalu_subsamples = parser.GetCurrentSubsamples();
+ ASSERT_EQ(1u, nalu_subsamples.size());
+
+ // Although the input had 10 more bytes, since nalu->data starts from the nalu
+ // header, there's only 7 more bytes left.
+ EXPECT_EQ(7u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[0].cypher_bytes);
+}
+
} // namespace media
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.cc b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
index fadb1e23b94..f892569f1e4 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.cc
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
@@ -99,7 +99,10 @@ MockGpuVideoAcceleratorFactories::CreateGpuMemoryBuffer(
gfx::BufferUsage /* usage */) {
if (fail_to_allocate_gpu_memory_buffer_)
return nullptr;
- return std::make_unique<GpuMemoryBufferImpl>(size, format);
+ std::unique_ptr<gfx::GpuMemoryBuffer> ret(
+ new GpuMemoryBufferImpl(size, format));
+ created_memory_buffers_.push_back(ret.get());
+ return ret;
}
std::unique_ptr<base::SharedMemory>
@@ -120,8 +123,8 @@ MockGpuVideoAcceleratorFactories::CreateVideoEncodeAccelerator() {
return base::WrapUnique(DoCreateVideoEncodeAccelerator());
}
-bool MockGpuVideoAcceleratorFactories::ShouldUseGpuMemoryBuffersForVideoFrames()
- const {
+bool MockGpuVideoAcceleratorFactories::ShouldUseGpuMemoryBuffersForVideoFrames(
+ bool for_media_stream) const {
return false;
}
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index d5fe8a506cb..c46ba23cc52 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -9,6 +9,7 @@
#include <stdint.h>
#include <memory>
+#include <vector>
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -16,6 +17,7 @@
#include "media/video/gpu_video_accelerator_factories.h"
#include "media/video/video_decode_accelerator.h"
#include "media/video/video_encode_accelerator.h"
+#include "services/ui/public/cpp/gpu/context_provider_command_buffer.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace base {
@@ -54,7 +56,8 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
VideoDecodeAccelerator::Capabilities());
MOCK_METHOD0(GetVideoEncodeAcceleratorSupportedProfiles,
VideoEncodeAccelerator::SupportedProfiles());
- MOCK_METHOD0(GetMediaContextProvider, viz::ContextProvider*());
+ MOCK_METHOD0(GetMediaContextProvider,
+ scoped_refptr<ui::ContextProviderCommandBuffer>());
MOCK_METHOD1(SetRenderingColorSpace, void(const gfx::ColorSpace&));
std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
@@ -62,7 +65,8 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
gfx::BufferFormat format,
gfx::BufferUsage usage) override;
- bool ShouldUseGpuMemoryBuffersForVideoFrames() const override;
+ bool ShouldUseGpuMemoryBuffersForVideoFrames(
+ bool for_media_stream) const override;
unsigned ImageTextureTarget(gfx::BufferFormat format) override;
OutputFormat VideoFrameOutputFormat(size_t bit_depth) override {
return video_frame_output_format_;
@@ -90,6 +94,10 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
gpu::gles2::GLES2Interface* GetGLES2Interface() { return gles2_; }
+ const std::vector<gfx::GpuMemoryBuffer*>& created_memory_buffers() {
+ return created_memory_buffers_;
+ }
+
private:
DISALLOW_COPY_AND_ASSIGN(MockGpuVideoAcceleratorFactories);
@@ -99,6 +107,8 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
bool fail_to_allocate_gpu_memory_buffer_ = false;
gpu::gles2::GLES2Interface* gles2_;
+
+ std::vector<gfx::GpuMemoryBuffer*> created_memory_buffers_;
};
} // namespace media
diff --git a/chromium/media/video/mock_video_decode_accelerator.h b/chromium/media/video/mock_video_decode_accelerator.h
index 89978987236..bcbddede81d 100644
--- a/chromium/media/video/mock_video_decode_accelerator.h
+++ b/chromium/media/video/mock_video_decode_accelerator.h
@@ -24,10 +24,12 @@ namespace media {
class MockVideoDecodeAccelerator : public VideoDecodeAccelerator {
public:
MockVideoDecodeAccelerator();
- virtual ~MockVideoDecodeAccelerator();
+ ~MockVideoDecodeAccelerator() override;
MOCK_METHOD2(Initialize, bool(const Config& config, Client* client));
MOCK_METHOD1(Decode, void(const BitstreamBuffer& bitstream_buffer));
+ MOCK_METHOD2(Decode,
+ void(scoped_refptr<DecoderBuffer> buffer, int32_t bitstream_id));
MOCK_METHOD1(AssignPictureBuffers,
void(const std::vector<PictureBuffer>& buffers));
MOCK_METHOD1(ReusePictureBuffer, void(int32_t picture_buffer_id));
diff --git a/chromium/media/video/mock_video_encode_accelerator.h b/chromium/media/video/mock_video_encode_accelerator.h
index 89c928612b9..1557b7e0d5f 100644
--- a/chromium/media/video/mock_video_encode_accelerator.h
+++ b/chromium/media/video/mock_video_encode_accelerator.h
@@ -15,7 +15,7 @@ namespace media {
class MockVideoEncodeAccelerator : public VideoEncodeAccelerator {
public:
MockVideoEncodeAccelerator();
- virtual ~MockVideoEncodeAccelerator();
+ ~MockVideoEncodeAccelerator() override;
MOCK_METHOD0(GetSupportedProfiles,
VideoEncodeAccelerator::SupportedProfiles());
diff --git a/chromium/media/video/picture.cc b/chromium/media/video/picture.cc
index dc1d48eb004..e7ab1dbb9af 100644
--- a/chromium/media/video/picture.cc
+++ b/chromium/media/video/picture.cc
@@ -75,7 +75,7 @@ Picture::Picture(int32_t picture_buffer_id,
color_space_(color_space),
allow_overlay_(allow_overlay),
size_changed_(false),
- surface_texture_(false),
+ texture_owner_(false),
wants_promotion_hint_(false) {}
Picture::Picture(const Picture& other) = default;
diff --git a/chromium/media/video/picture.h b/chromium/media/video/picture.h
index 5da84c279a2..0dedb51ee55 100644
--- a/chromium/media/video/picture.h
+++ b/chromium/media/video/picture.h
@@ -116,11 +116,9 @@ class MEDIA_EXPORT Picture {
void set_size_changed(bool size_changed) { size_changed_ = size_changed; }
- bool surface_texture() const { return surface_texture_; }
+ bool texture_owner() const { return texture_owner_; }
- void set_surface_texture(bool surface_texture) {
- surface_texture_ = surface_texture;
- }
+ void set_texture_owner(bool texture_owner) { texture_owner_ = texture_owner; }
bool wants_promotion_hint() const { return wants_promotion_hint_; }
@@ -135,7 +133,7 @@ class MEDIA_EXPORT Picture {
gfx::ColorSpace color_space_;
bool allow_overlay_;
bool size_changed_;
- bool surface_texture_;
+ bool texture_owner_;
bool wants_promotion_hint_;
};
diff --git a/chromium/media/video/video_decode_accelerator.cc b/chromium/media/video/video_decode_accelerator.cc
index eba47f33d69..adfc4a7dcd5 100644
--- a/chromium/media/video/video_decode_accelerator.cc
+++ b/chromium/media/video/video_decode_accelerator.cc
@@ -31,6 +31,11 @@ void VideoDecodeAccelerator::Client::NotifyInitializationComplete(
VideoDecodeAccelerator::~VideoDecodeAccelerator() = default;
+void VideoDecodeAccelerator::Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id) {
+ NOTREACHED() << "By default DecoderBuffer is not supported.";
+}
+
bool VideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
const base::WeakPtr<Client>& decode_client,
const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) {
diff --git a/chromium/media/video/video_decode_accelerator.h b/chromium/media/video/video_decode_accelerator.h
index ea058c43d29..b4768003fdc 100644
--- a/chromium/media/video/video_decode_accelerator.h
+++ b/chromium/media/video/video_decode_accelerator.h
@@ -10,12 +10,13 @@
#include <memory>
#include <vector>
-#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/unguessable_token.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/cdm_context.h"
+#include "media/base/decoder_buffer.h"
#include "media/base/encryption_scheme.h"
#include "media/base/overlay_info.h"
#include "media/base/surface_manager.h"
@@ -267,6 +268,16 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
// |bitstream_buffer| is the input bitstream that is sent for decoding.
virtual void Decode(const BitstreamBuffer& bitstream_buffer) = 0;
+ // Decodes given decoder buffer that contains at most one frame. Once
+ // decoder is done with processing |buffer| it will call
+ // NotifyEndOfBitstreamBuffer() with the bitstream id.
+ // Parameters:
+ // |buffer| is the input buffer that is sent for decoding.
+ // |bitstream_id| identifies the buffer for PictureReady() and
+ // NotifyEndOfBitstreamBuffer()
+ virtual void Decode(scoped_refptr<DecoderBuffer> buffer,
+ int32_t bitstream_id);
+
// Assigns a set of texture-backed picture buffers to the video decoder.
//
// Ownership of each picture buffer remains with the client, but the client
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 4b344b7029f..1f6c0dcb778 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -24,6 +24,12 @@ void VideoEncodeAccelerator::Flush(FlushCallback flush_callback) {
std::move(flush_callback).Run(false);
}
+void VideoEncodeAccelerator::RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate,
+ uint32_t framerate) {
+ RequestEncodingParametersChange(bitrate.GetSumBps(), framerate);
+}
+
} // namespace media
namespace std {
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index 0162dde3cc4..ddc1d7736de 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -17,6 +17,7 @@
#include "base/single_thread_task_runner.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/media_export.h"
+#include "media/base/video_bitrate_allocation.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
@@ -142,7 +143,7 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// |buffer| is the bitstream buffer to use for output.
virtual void UseOutputBitstreamBuffer(const BitstreamBuffer& buffer) = 0;
- // Request a change to the encoding parameters. This is only a request,
+ // Request a change to the encoding parameters. This is only a request,
// fulfilled on a best-effort basis.
// Parameters:
// |bitrate| is the requested new bitrate, in bits per second.
@@ -150,6 +151,16 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
virtual void RequestEncodingParametersChange(uint32_t bitrate,
uint32_t framerate) = 0;
+ // Request a change to the encoding parameters. This is only a request,
+ // fulfilled on a best-effort basis. If not implemented, default behavior is
+ // to get the sum over layers and pass to version with bitrate as uint32_t.
+ // Parameters:
+ // |bitrate| is the requested new bitrate, per spatial and temporal layer.
+ // |framerate| is the requested new framerate, in frames per second.
+ virtual void RequestEncodingParametersChange(
+ const VideoBitrateAllocation& bitrate,
+ uint32_t framerate);
+
// Destroys the encoder: all pending inputs and outputs are dropped
// immediately and the component is freed. This call may asynchronously free
// system resources, but its client-visible effects are synchronous. After