summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-12 14:27:29 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:35:20 +0000
commitc30a6232df03e1efbd9f3b226777b07e087a1122 (patch)
treee992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/media
parent7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff)
downloadqtwebengine-chromium-c30a6232df03e1efbd9f3b226777b07e087a1122.tar.gz
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn6
-rw-r--r--chromium/media/audio/alsa/alsa_input.cc20
-rw-r--r--chromium/media/audio/alsa/alsa_output.cc54
-rw-r--r--chromium/media/audio/alsa/alsa_output.h2
-rw-r--r--chromium/media/audio/alsa/alsa_output_unittest.cc24
-rw-r--r--chromium/media/audio/android/audio_android_unittest.cc1
-rw-r--r--chromium/media/audio/android/opensles_util.h2
-rw-r--r--chromium/media/audio/audio_debug_recording_helper.cc1
-rw-r--r--chromium/media/audio/audio_input_device.cc29
-rw-r--r--chromium/media/audio/audio_input_device.h10
-rw-r--r--chromium/media/audio/audio_input_device_unittest.cc25
-rw-r--r--chromium/media/audio/audio_input_unittest.cc8
-rw-r--r--chromium/media/audio/audio_low_latency_input_output_unittest.cc1
-rw-r--r--chromium/media/audio/audio_manager_base.cc1
-rw-r--r--chromium/media/audio/audio_manager_unittest.cc251
-rw-r--r--chromium/media/audio/audio_output_device.cc3
-rw-r--r--chromium/media/audio/audio_output_device_thread_callback.cc1
-rw-r--r--chromium/media/audio/audio_output_resampler.cc1
-rw-r--r--chromium/media/audio/audio_output_stream_sink.cc12
-rw-r--r--chromium/media/audio/audio_output_unittest.cc6
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor.cc1
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.cc54
-rw-r--r--chromium/media/audio/fake_audio_input_stream.cc1
-rw-r--r--chromium/media/audio/fake_audio_output_stream.cc4
-rw-r--r--chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc5
-rw-r--r--chromium/media/audio/linux/audio_manager_linux.cc1
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.cc98
-rw-r--r--chromium/media/audio/mac/audio_manager_mac.h19
-rw-r--r--chromium/media/audio/null_audio_sink.cc2
-rw-r--r--chromium/media/audio/power_observer_helper.cc1
-rw-r--r--chromium/media/audio/pulse/pulse_output.cc3
-rw-r--r--chromium/media/audio/pulse/pulse_util.cc1
-rw-r--r--chromium/media/audio/wav_audio_handler.cc1
-rw-r--r--chromium/media/audio/win/audio_device_listener_win.cc6
-rw-r--r--chromium/media/audio/win/audio_device_listener_win.h1
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win_unittest.cc1
-rw-r--r--chromium/media/audio/win/audio_output_win_unittest.cc1
-rw-r--r--chromium/media/audio/win/audio_session_event_listener_win.cc2
-rw-r--r--chromium/media/base/BUILD.gn7
-rw-r--r--chromium/media/base/android/android_cdm_factory.cc12
-rw-r--r--chromium/media/base/android/android_cdm_factory.h1
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc1
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.cc3
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.h1
-rw-r--r--chromium/media/base/android/media_player_bridge.cc10
-rw-r--r--chromium/media/base/android/media_player_bridge.h3
-rw-r--r--chromium/media/base/async_destroy_video_decoder.h91
-rw-r--r--chromium/media/base/audio_buffer.cc7
-rw-r--r--chromium/media/base/audio_converter.cc1
-rw-r--r--chromium/media/base/audio_renderer.h4
-rw-r--r--chromium/media/base/audio_renderer_mixer.cc42
-rw-r--r--chromium/media/base/audio_renderer_mixer.h11
-rw-r--r--chromium/media/base/audio_renderer_mixer_input_unittest.cc8
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc7
-rw-r--r--chromium/media/base/audio_shifter.cc1
-rw-r--r--chromium/media/base/bit_reader_core.h2
-rw-r--r--chromium/media/base/cdm_context.cc2
-rw-r--r--chromium/media/base/cdm_context.h11
-rw-r--r--chromium/media/base/cdm_factory.h1
-rw-r--r--chromium/media/base/cdm_promise.h1
-rw-r--r--chromium/media/base/cdm_session_tracker.h1
-rw-r--r--chromium/media/base/channel_layout.cc2
-rw-r--r--chromium/media/base/data_buffer.h2
-rw-r--r--chromium/media/base/decode_status.cc2
-rw-r--r--chromium/media/base/decoder_buffer.cc22
-rw-r--r--chromium/media/base/decoder_buffer.h4
-rw-r--r--chromium/media/base/fake_audio_renderer_sink.cc2
-rw-r--r--chromium/media/base/format_utils.cc6
-rw-r--r--chromium/media/base/key_systems.cc1
-rw-r--r--chromium/media/base/mac/video_frame_mac.cc1
-rw-r--r--chromium/media/base/mac/videotoolbox_helpers.cc2
-rw-r--r--chromium/media/base/media_observer.h1
-rw-r--r--chromium/media/base/media_switches.cc43
-rw-r--r--chromium/media/base/media_switches.h8
-rw-r--r--chromium/media/base/media_url_demuxer.cc1
-rw-r--r--chromium/media/base/memory_dump_provider_proxy.h1
-rw-r--r--chromium/media/base/mime_util_internal.cc10
-rw-r--r--chromium/media/base/mime_util_unittest.cc4
-rw-r--r--chromium/media/base/mock_filters.cc7
-rw-r--r--chromium/media/base/mock_filters.h7
-rw-r--r--chromium/media/base/output_device_info.cc2
-rw-r--r--chromium/media/base/pipeline.h7
-rw-r--r--chromium/media/base/pipeline_impl.cc38
-rw-r--r--chromium/media/base/pipeline_impl.h1
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc24
-rw-r--r--chromium/media/base/ranges.h2
-rw-r--r--chromium/media/base/renderer.cc14
-rw-r--r--chromium/media/base/renderer.h13
-rw-r--r--chromium/media/base/renderer_factory_selector.h3
-rw-r--r--chromium/media/base/speech_recognition_client.h10
-rw-r--r--chromium/media/base/status.h4
-rw-r--r--chromium/media/base/status_codes.h9
-rw-r--r--chromium/media/base/supported_types.cc1
-rw-r--r--chromium/media/base/test_helpers.h23
-rw-r--r--chromium/media/base/test_random.h2
-rw-r--r--chromium/media/base/user_input_monitor_linux.cc31
-rw-r--r--chromium/media/base/vector_math.cc14
-rw-r--r--chromium/media/base/video_codecs.cc1
-rw-r--r--chromium/media/base/video_decoder.cc13
-rw-r--r--chromium/media/base/video_decoder.h31
-rw-r--r--chromium/media/base/video_decoder_config.h2
-rw-r--r--chromium/media/base/video_frame.cc9
-rw-r--r--chromium/media/base/video_frame.h10
-rw-r--r--chromium/media/base/video_frame_metadata.cc212
-rw-r--r--chromium/media/base/video_frame_metadata.h324
-rw-r--r--chromium/media/base/video_frame_pool.cc3
-rw-r--r--chromium/media/base/video_frame_unittest.cc253
-rw-r--r--chromium/media/base/video_renderer_sink.h1
-rw-r--r--chromium/media/base/win/BUILD.gn5
-rw-r--r--chromium/media/base/win/mf_cdm_proxy.h (renamed from chromium/media/renderers/win/mf_cdm_proxy.h)6
-rw-r--r--chromium/media/base/win/mf_helpers.cc2
-rw-r--r--chromium/media/base/win/mf_initializer.h2
-rw-r--r--chromium/media/blink/BUILD.gn2
-rw-r--r--chromium/media/blink/cdm_session_adapter.cc10
-rw-r--r--chromium/media/blink/cdm_session_adapter.h8
-rw-r--r--chromium/media/blink/interval_map.h2
-rw-r--r--chromium/media/blink/multibuffer.cc1
-rw-r--r--chromium/media/blink/multibuffer_unittest.cc1
-rw-r--r--chromium/media/blink/url_index.cc1
-rw-r--r--chromium/media/blink/video_frame_compositor.h8
-rw-r--r--chromium/media/blink/webcontentdecryptionmodule_impl.cc7
-rw-r--r--chromium/media/blink/webcontentdecryptionmodule_impl.h4
-rw-r--r--chromium/media/blink/webcontentdecryptionmodulesession_impl.cc20
-rw-r--r--chromium/media/blink/webcontentdecryptionmodulesession_impl.h11
-rw-r--r--chromium/media/blink/webinbandtexttrack_impl.cc14
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc84
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h26
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc188
-rw-r--r--chromium/media/blink/webmediaplayer_params.cc2
-rw-r--r--chromium/media/blink/webmediaplayer_params.h7
-rw-r--r--chromium/media/blink/websourcebuffer_impl.cc8
-rw-r--r--chromium/media/capabilities/video_decode_stats_db.h2
-rw-r--r--chromium/media/capture/BUILD.gn1
-rw-r--r--chromium/media/capture/content/android/thread_safe_capture_oracle.cc15
-rw-r--r--chromium/media/capture/content/video_capture_oracle.cc1
-rw-r--r--chromium/media/capture/mojom/BUILD.gn1
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom12
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc2
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.h10
-rw-r--r--chromium/media/capture/run_all_unittests.cc1
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.cc1
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc3
-rw-r--r--chromium/media/capture/video/chromeos/camera_buffer_factory.cc32
-rw-r--r--chromium/media/capture/video/chromeos/camera_buffer_factory.h15
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.cc4
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc284
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h41
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc27
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc48
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h5
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc7
-rw-r--r--chromium/media/capture/video/chromeos/capture_metadata_dispatcher.h43
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc10
-rw-r--r--chromium/media/capture/video/chromeos/mojom/BUILD.gn1
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera3.mojom1
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h27
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc10
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc36
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h2
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc9
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc30
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.h12
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_factory.cc22
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_unittest.cc23
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc4
-rw-r--r--chromium/media/capture/video/gpu_memory_buffer_utils.cc3
-rw-r--r--chromium/media/capture/video/linux/fake_v4l2_impl.cc19
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc25
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.h1
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc32
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm1
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac.mm4
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm15
-rw-r--r--chromium/media/capture/video/video_capture_device.h1
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc9
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc3
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.cc12
-rw-r--r--chromium/media/capture/video/video_capture_device_descriptor.h15
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc77
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc204
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.h3
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc125
-rw-r--r--chromium/media/cast/common/expanded_value_base.h2
-rw-r--r--chromium/media/cast/common/mod_util.h2
-rw-r--r--chromium/media/cast/common/transport_encryption_handler.cc1
-rw-r--r--chromium/media/cast/net/pacing/paced_sender.cc1
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_session.cc4
-rw-r--r--chromium/media/cast/net/rtcp/rtcp_builder.cc6
-rw-r--r--chromium/media/cast/net/rtcp/sender_rtcp_session.cc1
-rw-r--r--chromium/media/cast/net/rtcp/test_rtcp_packet_builder.cc6
-rw-r--r--chromium/media/cast/net/rtp/cast_message_builder.cc1
-rw-r--r--chromium/media/cast/net/udp_packet_pipe_unittest.cc1
-rw-r--r--chromium/media/cast/sender/audio_encoder.cc1
-rw-r--r--chromium/media/cast/sender/audio_encoder_unittest.cc1
-rw-r--r--chromium/media/cast/sender/audio_sender_unittest.cc1
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc8
-rw-r--r--chromium/media/cast/sender/frame_sender.cc1
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder.cc15
-rw-r--r--chromium/media/cast/sender/performance_metrics_overlay.cc23
-rw-r--r--chromium/media/cast/sender/size_adaptable_video_encoder_base.cc1
-rw-r--r--chromium/media/cast/sender/video_encoder_unittest.cc1
-rw-r--r--chromium/media/cast/sender/video_sender.cc29
-rw-r--r--chromium/media/cast/sender/video_sender_unittest.cc12
-rw-r--r--chromium/media/cast/sender/vp8_encoder.cc8
-rw-r--r--chromium/media/cdm/aes_decryptor.cc3
-rw-r--r--chromium/media/cdm/aes_decryptor_unittest.cc1
-rw-r--r--chromium/media/cdm/cbcs_decryptor_fuzzer.cc1
-rw-r--r--chromium/media/cdm/cdm_adapter.cc28
-rw-r--r--chromium/media/cdm/cdm_adapter.h6
-rw-r--r--chromium/media/cdm/cdm_adapter_factory.cc11
-rw-r--r--chromium/media/cdm/cdm_adapter_factory.h1
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc5
-rw-r--r--chromium/media/cdm/cdm_auxiliary_helper.cc4
-rw-r--r--chromium/media/cdm/cdm_auxiliary_helper.h5
-rw-r--r--chromium/media/cdm/cdm_host_file.cc5
-rw-r--r--chromium/media/cdm/cdm_host_files.h1
-rw-r--r--chromium/media/cdm/cdm_module.cc1
-rw-r--r--chromium/media/cdm/cdm_wrapper.h2
-rw-r--r--chromium/media/cdm/cenc_decryptor_fuzzer.cc1
-rw-r--r--chromium/media/cdm/default_cdm_factory.cc8
-rw-r--r--chromium/media/cdm/default_cdm_factory.h1
-rw-r--r--chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h1
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc6
-rw-r--r--chromium/media/cdm/library_cdm/mock_library_cdm.cc1
-rw-r--r--chromium/media/ffmpeg/ffmpeg_decoding_loop.cc1
-rw-r--r--chromium/media/filters/BUILD.gn5
-rw-r--r--chromium/media/filters/audio_renderer_algorithm.cc29
-rw-r--r--chromium/media/filters/audio_renderer_algorithm.h15
-rw-r--r--chromium/media/filters/audio_renderer_algorithm_unittest.cc41
-rw-r--r--chromium/media/filters/dav1d_video_decoder.cc74
-rw-r--r--chromium/media/filters/dav1d_video_decoder.h7
-rw-r--r--chromium/media/filters/dav1d_video_decoder_unittest.cc57
-rw-r--r--chromium/media/filters/decoder_selector.cc12
-rw-r--r--chromium/media/filters/decoder_stream_traits.cc22
-rw-r--r--chromium/media/filters/decoder_stream_traits.h1
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.h1
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.cc1
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.h2
-rw-r--r--chromium/media/filters/decrypting_video_decoder.cc2
-rw-r--r--chromium/media/filters/decrypting_video_decoder.h1
-rw-r--r--chromium/media/filters/fake_video_decoder.cc13
-rw-r--r--chromium/media/filters/fake_video_decoder.h5
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc39
-rw-r--r--chromium/media/filters/ffmpeg_glue.h2
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.cc3
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/frame_processor.cc60
-rw-r--r--chromium/media/filters/frame_processor.h30
-rw-r--r--chromium/media/filters/frame_processor_unittest.cc408
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.cc16
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc14
-rw-r--r--chromium/media/filters/gav1_video_decoder.cc32
-rw-r--r--chromium/media/filters/gav1_video_decoder.h3
-rw-r--r--chromium/media/filters/gav1_video_decoder_unittest.cc82
-rw-r--r--chromium/media/filters/ivf_parser.cc2
-rw-r--r--chromium/media/filters/pipeline_controller.cc4
-rw-r--r--chromium/media/filters/pipeline_controller.h5
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc9
-rw-r--r--chromium/media/filters/source_buffer_range.cc1
-rw-r--r--chromium/media/filters/source_buffer_range.h9
-rw-r--r--chromium/media/filters/source_buffer_stream.cc40
-rw-r--r--chromium/media/filters/video_decoder_stream_unittest.cc38
-rw-r--r--chromium/media/filters/video_renderer_algorithm.cc19
-rw-r--r--chromium/media/filters/video_renderer_algorithm_unittest.cc21
-rw-r--r--chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc1
-rw-r--r--chromium/media/filters/vp9_parser_fuzzertest.cc1
-rw-r--r--chromium/media/filters/vpx_video_decoder.cc11
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/wsola_internals.cc2
-rw-r--r--chromium/media/filters/wsola_internals.h2
-rw-r--r--chromium/media/formats/common/offset_byte_queue.cc9
-rw-r--r--chromium/media/formats/mp2t/es_adapter_video.cc1
-rw-r--r--chromium/media/formats/mp2t/mp2t_common.h2
-rw-r--r--chromium/media/formats/mp2t/ts_packet.cc1
-rw-r--r--chromium/media/formats/mp4/box_reader.cc6
-rw-r--r--chromium/media/formats/mp4/box_reader_unittest.cc5
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.cc3
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser_unittest.cc64
-rw-r--r--chromium/media/formats/webm/webm_parser.cc2
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc18
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.h3
-rw-r--r--chromium/media/fuchsia/camera/fake_fuchsia_camera.cc4
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc6
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h1
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_pool.cc4
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer.cc38
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer.h22
-rw-r--r--chromium/media/fuchsia/metrics/BUILD.gn28
-rw-r--r--chromium/media/fuchsia/metrics/DEPS3
-rw-r--r--chromium/media/gpu/BUILD.gn4
-rw-r--r--chromium/media/gpu/OWNERS1
-rw-r--r--chromium/media/gpu/android/codec_image.cc34
-rw-r--r--chromium/media/gpu/android/codec_image.h22
-rw-r--r--chromium/media/gpu/android/codec_image_unittest.cc8
-rw-r--r--chromium/media/gpu/android/frame_info_helper.cc229
-rw-r--r--chromium/media/gpu/android/frame_info_helper.h21
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc76
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h37
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc46
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc65
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.h16
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl_unittest.cc120
-rw-r--r--chromium/media/gpu/chromeos/BUILD.gn1
-rw-r--r--chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc28
-rw-r--r--chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h9
-rw-r--r--chromium/media/gpu/chromeos/fourcc.cc13
-rw-r--r--chromium/media/gpu/chromeos/fourcc.h4
-rw-r--r--chromium/media/gpu/chromeos/fourcc_unittests.cc10
-rw-r--r--chromium/media/gpu/chromeos/image_processor.cc7
-rw-r--r--chromium/media/gpu/chromeos/image_processor.h2
-rw-r--r--chromium/media/gpu/chromeos/image_processor_backend.cc2
-rw-r--r--chromium/media/gpu/chromeos/image_processor_backend.h5
-rw-r--r--chromium/media/gpu/chromeos/image_processor_factory.cc11
-rw-r--r--chromium/media/gpu/chromeos/image_processor_factory.h1
-rw-r--r--chromium/media/gpu/chromeos/image_processor_test.cc42
-rw-r--r--chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc82
-rw-r--r--chromium/media/gpu/chromeos/libyuv_image_processor_backend.h2
-rw-r--r--chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc11
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.cc29
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.h15
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc115
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils.cc14
-rw-r--r--chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc7
-rw-r--r--chromium/media/gpu/chromeos/vd_video_decode_accelerator.h7
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.cc144
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.h57
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc229
-rw-r--r--chromium/media/gpu/h264_decoder.cc8
-rw-r--r--chromium/media/gpu/h264_dpb.cc1
-rw-r--r--chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc4
-rw-r--r--chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc4
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager.cc11
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc64
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.h62
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc17
-rw-r--r--chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc8
-rw-r--r--chromium/media/gpu/test/BUILD.gn14
-rw-r--r--chromium/media/gpu/v4l2/BUILD.gn6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_decode_surface.cc5
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.cc104
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.h28
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc18
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor_backend.h3
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.cc7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.h3
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc21
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.cc (renamed from chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc)171
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.h (renamed from chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h)39
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h16
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc608
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h151
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h3
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc15
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn15
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.cc17
-rw-r--r--chromium/media/gpu/vaapi/accelerated_video_encoder.h27
-rw-r--r--chromium/media/gpu/vaapi/test_utils.cc12
-rw-r--r--chromium/media/gpu/vaapi/va.sigs3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc18
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_processor_backend.h2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_unittest.cc37
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.cc43
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.h4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc134
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc299
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc313
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h69
-rw-r--r--chromium/media/gpu/vaapi/vp9_encoder.cc151
-rw-r--r--chromium/media/gpu/vaapi/vp9_encoder.h13
-rw-r--r--chromium/media/gpu/vaapi/vp9_encoder_unittest.cc381
-rw-r--r--chromium/media/gpu/vaapi/vp9_rate_control.cc53
-rw-r--r--chromium/media/gpu/vaapi/vp9_rate_control.h38
-rw-r--r--chromium/media/gpu/video_encode_accelerator_perf_tests.cc32
-rw-r--r--chromium/media/gpu/video_encode_accelerator_tests.cc275
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc158
-rw-r--r--chromium/media/gpu/vp8_decoder.cc3
-rw-r--r--chromium/media/gpu/vp9_reference_frame_vector.cc22
-rw-r--r--chromium/media/gpu/vp9_reference_frame_vector.h5
-rw-r--r--chromium/media/gpu/windows/av1_guids.h52
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.cc5
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.h4
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.cc16
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.h3
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.cc13
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.h15
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.cc19
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc418
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h21
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_client.h10
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.cc25
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.h3
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_picture.cc3
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_picture.h4
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc140
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h18
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.cc218
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.h56
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers_unittest.cc261
-rw-r--r--chromium/media/media_options.gni1
-rw-r--r--chromium/media/mojo/clients/BUILD.gn2
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.cc8
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.h6
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.cc14
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.h1
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_unittest.cc8
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.cc1
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_unittest.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_wrapper.cc10
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_wrapper.h6
-rw-r--r--chromium/media/mojo/mojom/BUILD.gn208
-rw-r--r--chromium/media/mojo/mojom/content_decryption_module.mojom5
-rw-r--r--chromium/media/mojo/mojom/frame_interface_factory.mojom5
-rw-r--r--chromium/media/mojo/mojom/media_types.mojom85
-rw-r--r--chromium/media/mojo/mojom/media_types.typemap70
-rw-r--r--chromium/media/mojo/mojom/media_types_enum_mojom_traits.h63
-rw-r--r--chromium/media/mojo/mojom/pipeline_status.typemap11
-rw-r--r--chromium/media/mojo/mojom/remoting.mojom58
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_service.mojom28
-rw-r--r--chromium/media/mojo/mojom/status.typemap24
-rw-r--r--chromium/media/mojo/mojom/traits_test_service.mojom7
-rw-r--r--chromium/media/mojo/mojom/typemaps.gni5
-rw-r--r--chromium/media/mojo/mojom/video_color_space.typemap17
-rw-r--r--chromium/media/mojo/mojom/video_frame.typemap37
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc86
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h148
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc193
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.cc8
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.h6
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc17
-rw-r--r--chromium/media/mojo/mojom/video_transformation_mojom_traits.h1
-rw-r--r--chromium/media/mojo/services/BUILD.gn7
-rw-r--r--chromium/media/mojo/services/cdm_service_unittest.cc9
-rw-r--r--chromium/media/mojo/services/deferred_destroy_unique_receiver_set.h1
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc4
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.cc55
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.h4
-rw-r--r--chromium/media/mojo/services/media_service_unittest.cc11
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.h1
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc13
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper.cc10
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper.h1
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper_unittest.cc2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.cc4
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.h1
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h4
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.h1
-rw-r--r--chromium/media/mojo/services/playback_events_recorder.cc (renamed from chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.cc)47
-rw-r--r--chromium/media/mojo/services/playback_events_recorder.h (renamed from chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.h)19
-rw-r--r--chromium/media/mojo/services/playback_events_recorder_test.cc (renamed from chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder_test.cc)29
-rw-r--r--chromium/media/muxers/webm_muxer.cc8
-rw-r--r--chromium/media/parsers/vp8_bool_decoder.cc11
-rw-r--r--chromium/media/parsers/vp8_bool_decoder.h1
-rw-r--r--chromium/media/parsers/vp8_parser.cc1
-rw-r--r--chromium/media/remoting/BUILD.gn41
-rw-r--r--chromium/media/remoting/courier_renderer.cc120
-rw-r--r--chromium/media/remoting/courier_renderer.h4
-rw-r--r--chromium/media/remoting/courier_renderer_unittest.cc161
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.cc4
-rw-r--r--chromium/media/remoting/end2end_test_renderer.cc237
-rw-r--r--chromium/media/remoting/end2end_test_renderer.h46
-rw-r--r--chromium/media/remoting/fake_media_resource.cc6
-rw-r--r--chromium/media/remoting/fake_media_resource.h5
-rw-r--r--chromium/media/remoting/integration_test.cc3
-rw-r--r--chromium/media/remoting/media_remoting_rpc.proto10
-rw-r--r--chromium/media/remoting/mock_receiver_controller.cc118
-rw-r--r--chromium/media/remoting/mock_receiver_controller.h96
-rw-r--r--chromium/media/remoting/receiver.cc295
-rw-r--r--chromium/media/remoting/receiver.h119
-rw-r--r--chromium/media/remoting/receiver_controller.cc116
-rw-r--r--chromium/media/remoting/receiver_controller.h70
-rw-r--r--chromium/media/remoting/receiver_unittest.cc471
-rw-r--r--chromium/media/remoting/remoting_constants.h18
-rw-r--r--chromium/media/remoting/remoting_renderer_factory.cc122
-rw-r--r--chromium/media/remoting/remoting_renderer_factory.h72
-rw-r--r--chromium/media/remoting/rpc_broker.h6
-rw-r--r--chromium/media/remoting/stream_provider.cc687
-rw-r--r--chromium/media/remoting/stream_provider.h280
-rw-r--r--chromium/media/remoting/stream_provider_unittest.cc316
-rw-r--r--chromium/media/remoting/test_utils.cc17
-rw-r--r--chromium/media/remoting/test_utils.h19
-rw-r--r--chromium/media/renderers/BUILD.gn1
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc47
-rw-r--r--chromium/media/renderers/audio_renderer_impl.h17
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc22
-rw-r--r--chromium/media/renderers/decrypting_renderer.cc5
-rw-r--r--chromium/media/renderers/decrypting_renderer.h1
-rw-r--r--chromium/media/renderers/decrypting_renderer_unittest.cc2
-rw-r--r--chromium/media/renderers/default_renderer_factory.cc19
-rw-r--r--chromium/media/renderers/default_renderer_factory.h2
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc183
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h19
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc4
-rw-r--r--chromium/media/renderers/renderer_impl.cc20
-rw-r--r--chromium/media/renderers/renderer_impl.h1
-rw-r--r--chromium/media/renderers/video_overlay_factory.cc1
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc26
-rw-r--r--chromium/media/renderers/video_resource_updater.cc52
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc6
-rw-r--r--chromium/media/renderers/win/media_foundation_protection_manager.h2
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.cc7
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.h2
-rw-r--r--chromium/media/renderers/win/media_foundation_source_wrapper.h2
-rw-r--r--chromium/media/renderers/yuv_util.cc245
-rw-r--r--chromium/media/renderers/yuv_util.h7
-rw-r--r--chromium/media/video/fake_gpu_memory_buffer.cc60
-rw-r--r--chromium/media/video/fake_gpu_memory_buffer.h2
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.cc2
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc10
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc40
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h54
-rw-r--r--chromium/media/video/h264_bit_reader.cc2
-rw-r--r--chromium/media/video/h264_parser.cc1
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h4
-rw-r--r--chromium/media/video/picture.cc1
-rw-r--r--chromium/media/video/video_encode_accelerator.cc2
-rw-r--r--chromium/media/video/video_encode_accelerator.h1
-rw-r--r--chromium/media/video/vpx_video_encoder.cc11
-rw-r--r--chromium/media/webcodecs/BUILD.gn39
-rw-r--r--chromium/media/webcodecs/wc_decoder_selector.cc142
-rw-r--r--chromium/media/webcodecs/wc_decoder_selector.h83
-rw-r--r--chromium/media/webcodecs/wc_decoder_selector_unittest.cc240
530 files changed, 12345 insertions, 5641 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index 13ec8a79c74..eb3e09595e3 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -157,7 +157,6 @@ test("media_unittests") {
"//media/test:pipeline_integration_tests",
"//media/test:run_all_unittests",
"//media/video:unit_tests",
- "//media/webcodecs:unit_tests",
"//media/webrtc:unit_tests",
]
@@ -182,10 +181,7 @@ test("media_unittests") {
}
if (is_fuchsia) {
- deps += [
- "//media/fuchsia/audio:unittests",
- "//media/fuchsia/metrics:unittests",
- ]
+ deps += [ "//media/fuchsia/audio:unittests" ]
}
if (enable_media_remoting) {
diff --git a/chromium/media/audio/alsa/alsa_input.cc b/chromium/media/audio/alsa/alsa_input.cc
index a70748274ae..ee7c5d0305d 100644
--- a/chromium/media/audio/alsa/alsa_input.cc
+++ b/chromium/media/audio/alsa/alsa_input.cc
@@ -41,10 +41,10 @@ AlsaPcmInputStream::AlsaPcmInputStream(AudioManagerBase* audio_manager,
buffer_duration_(base::TimeDelta::FromMicroseconds(
params.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
static_cast<float>(params.sample_rate()))),
- callback_(NULL),
- device_handle_(NULL),
- mixer_handle_(NULL),
- mixer_element_handle_(NULL),
+ callback_(nullptr),
+ device_handle_(nullptr),
+ mixer_handle_(nullptr),
+ mixer_element_handle_(nullptr),
read_callback_behind_schedule_(false),
audio_bus_(AudioBus::Create(params)),
capture_thread_("AlsaInput"),
@@ -91,7 +91,7 @@ bool AlsaPcmInputStream::Open() {
}
}
- return device_handle_ != NULL;
+ return device_handle_ != nullptr;
}
void AlsaPcmInputStream::Start(AudioInputCallback* callback) {
@@ -108,7 +108,7 @@ void AlsaPcmInputStream::Start(AudioInputCallback* callback) {
}
if (error < 0) {
- callback_ = NULL;
+ callback_ = nullptr;
} else {
base::Thread::Options options;
options.priority = base::ThreadPriority::REALTIME_AUDIO;
@@ -268,7 +268,7 @@ void AlsaPcmInputStream::Stop() {
if (error < 0)
HandleError("PcmDrop", error);
- callback_ = NULL;
+ callback_ = nullptr;
}
void AlsaPcmInputStream::Close() {
@@ -282,9 +282,9 @@ void AlsaPcmInputStream::Close() {
alsa_util::CloseMixer(wrapper_, mixer_handle_, device_name_);
audio_buffer_.reset();
- device_handle_ = NULL;
- mixer_handle_ = NULL;
- mixer_element_handle_ = NULL;
+ device_handle_ = nullptr;
+ mixer_handle_ = nullptr;
+ mixer_element_handle_ = nullptr;
}
audio_manager_->ReleaseInputStream(this);
diff --git a/chromium/media/audio/alsa/alsa_output.cc b/chromium/media/audio/alsa/alsa_output.cc
index 74ea72b610c..c2cc61d1d8c 100644
--- a/chromium/media/audio/alsa/alsa_output.cc
+++ b/chromium/media/audio/alsa/alsa_output.cc
@@ -75,7 +75,7 @@ static const ChannelLayout kDefaultOutputChannelLayout = CHANNEL_LAYOUT_STEREO;
// http://0pointer.de/blog/projects/guide-to-sound-apis.html
//
// This function makes a best guess at the specific > 2 channel device name
-// based on the number of channels requested. NULL is returned if no device
+// based on the number of channels requested. nullptr is returned if no device
// can be found to match the channel numbers. In this case, using
// kDefaultDevice is probably the best bet.
//
@@ -103,7 +103,7 @@ static const char* GuessSpecificDeviceName(uint32_t channels) {
return "surround40";
default:
- return NULL;
+ return nullptr;
}
}
@@ -165,11 +165,11 @@ AlsaPcmOutputStream::AlsaPcmOutputStream(const std::string& device_name,
wrapper_(wrapper),
manager_(manager),
task_runner_(base::ThreadTaskRunnerHandle::Get()),
- playback_handle_(NULL),
+ playback_handle_(nullptr),
frames_per_packet_(packet_size_ / bytes_per_frame_),
state_(kCreated),
volume_(1.0f),
- source_callback_(NULL),
+ source_callback_(nullptr),
audio_bus_(AudioBus::Create(params)),
tick_clock_(base::DefaultTickClock::GetInstance()) {
DCHECK(manager_->GetTaskRunner()->BelongsToCurrentThread());
@@ -221,7 +221,7 @@ bool AlsaPcmOutputStream::Open() {
}
// Finish initializing the stream if the device was opened successfully.
- if (playback_handle_ == NULL) {
+ if (playback_handle_ == nullptr) {
stop_stream_ = true;
TransitionTo(kInError);
return false;
@@ -260,7 +260,7 @@ void AlsaPcmOutputStream::Close() {
if (alsa_util::CloseDevice(wrapper_, playback_handle_) < 0) {
LOG(WARNING) << "Unable to close audio device. Leaking handle.";
}
- playback_handle_ = NULL;
+ playback_handle_ = nullptr;
// Release the buffer.
buffer_.reset();
@@ -330,7 +330,7 @@ void AlsaPcmOutputStream::Stop() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Reset the callback, so that it is not called anymore.
- set_source_callback(NULL);
+ set_source_callback(nullptr);
weak_factory_.InvalidateWeakPtrs();
TransitionTo(kIsStopped);
@@ -559,19 +559,19 @@ std::string AlsaPcmOutputStream::FindDeviceForChannels(uint32_t channels) {
return std::string();
std::string guessed_device;
- void** hints = NULL;
+ void** hints = nullptr;
int error = wrapper_->DeviceNameHint(kGetAllDevices,
kPcmInterfaceName,
&hints);
if (error == 0) {
// NOTE: Do not early return from inside this if statement. The
// hints above need to be freed.
- for (void** hint_iter = hints; *hint_iter != NULL; hint_iter++) {
+ for (void** hint_iter = hints; *hint_iter != nullptr; hint_iter++) {
// Only examine devices that are output capable.. Valid values are
- // "Input", "Output", and NULL which means both input and output.
+ // "Input", "Output", and nullptr which means both input and output.
std::unique_ptr<char, base::FreeDeleter> io(
wrapper_->DeviceNameGetHint(*hint_iter, kIoHintName));
- if (io != NULL && strcmp(io.get(), "Input") == 0)
+ if (io != nullptr && strcmp(io.get(), "Input") == 0)
continue;
// Attempt to select the closest device for number of channels.
@@ -585,7 +585,7 @@ std::string AlsaPcmOutputStream::FindDeviceForChannels(uint32_t channels) {
// Destroy the hint now that we're done with it.
wrapper_->DeviceNameFreeHint(hints);
- hints = NULL;
+ hints = nullptr;
} else {
LOG(ERROR) << "Unable to get hints for devices: "
<< wrapper_->StrError(error);
@@ -673,24 +673,22 @@ snd_pcm_t* AlsaPcmOutputStream::AutoSelectDevice(unsigned int latency) {
// 4) Fallback to kDefaultDevice.
// 5) If that fails too, try the "plug:" version of kDefaultDevice.
// 6) Give up.
- snd_pcm_t* handle = NULL;
+ snd_pcm_t* handle = nullptr;
device_name_ = FindDeviceForChannels(channels_);
// Step 1.
if (!device_name_.empty()) {
- if ((handle = alsa_util::OpenPlaybackDevice(wrapper_, device_name_.c_str(),
- channels_, sample_rate_,
- pcm_format_,
- latency)) != NULL) {
+ if ((handle = alsa_util::OpenPlaybackDevice(
+ wrapper_, device_name_.c_str(), channels_, sample_rate_,
+ pcm_format_, latency)) != nullptr) {
return handle;
}
// Step 2.
device_name_ = kPlugPrefix + device_name_;
- if ((handle = alsa_util::OpenPlaybackDevice(wrapper_, device_name_.c_str(),
- channels_, sample_rate_,
- pcm_format_,
- latency)) != NULL) {
+ if ((handle = alsa_util::OpenPlaybackDevice(
+ wrapper_, device_name_.c_str(), channels_, sample_rate_,
+ pcm_format_, latency)) != nullptr) {
return handle;
}
@@ -700,7 +698,7 @@ snd_pcm_t* AlsaPcmOutputStream::AutoSelectDevice(unsigned int latency) {
device_name_ = kPlugPrefix + device_name_;
if ((handle = alsa_util::OpenPlaybackDevice(
wrapper_, device_name_.c_str(), channels_, sample_rate_,
- pcm_format_, latency)) != NULL) {
+ pcm_format_, latency)) != nullptr) {
return handle;
}
}
@@ -722,22 +720,22 @@ snd_pcm_t* AlsaPcmOutputStream::AutoSelectDevice(unsigned int latency) {
// Step 4.
device_name_ = kDefaultDevice;
if ((handle = alsa_util::OpenPlaybackDevice(
- wrapper_, device_name_.c_str(), default_channels, sample_rate_,
- pcm_format_, latency)) != NULL) {
+ wrapper_, device_name_.c_str(), default_channels, sample_rate_,
+ pcm_format_, latency)) != nullptr) {
return handle;
}
// Step 5.
device_name_ = kPlugPrefix + device_name_;
if ((handle = alsa_util::OpenPlaybackDevice(
- wrapper_, device_name_.c_str(), default_channels, sample_rate_,
- pcm_format_, latency)) != NULL) {
+ wrapper_, device_name_.c_str(), default_channels, sample_rate_,
+ pcm_format_, latency)) != nullptr) {
return handle;
}
// Unable to open any device.
device_name_.clear();
- return NULL;
+ return nullptr;
}
bool AlsaPcmOutputStream::CanTransitionTo(InternalState to) {
@@ -800,7 +798,7 @@ void AlsaPcmOutputStream::RunErrorCallback(int code) {
source_callback_->OnError(AudioSourceCallback::ErrorType::kUnknown);
}
-// Changes the AudioSourceCallback to proxy calls to. Pass in NULL to
+// Changes the AudioSourceCallback to proxy calls to. Pass in nullptr to
// release ownership of the currently registered callback.
void AlsaPcmOutputStream::set_source_callback(AudioSourceCallback* callback) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/chromium/media/audio/alsa/alsa_output.h b/chromium/media/audio/alsa/alsa_output.h
index 6070361a032..9922eae8763 100644
--- a/chromium/media/audio/alsa/alsa_output.h
+++ b/chromium/media/audio/alsa/alsa_output.h
@@ -159,7 +159,7 @@ class MEDIA_EXPORT AlsaPcmOutputStream : public AudioOutputStream {
AudioBus* audio_bus);
void RunErrorCallback(int code);
- // Changes the AudioSourceCallback to proxy calls to. Pass in NULL to
+ // Changes the AudioSourceCallback to proxy calls to. Pass in nullptr to
// release ownership of the currently registered callback.
void set_source_callback(AudioSourceCallback* callback);
diff --git a/chromium/media/audio/alsa/alsa_output_unittest.cc b/chromium/media/audio/alsa/alsa_output_unittest.cc
index bca316b71d4..9183721af1b 100644
--- a/chromium/media/audio/alsa/alsa_output_unittest.cc
+++ b/chromium/media/audio/alsa/alsa_output_unittest.cc
@@ -5,6 +5,7 @@
#include <stdint.h>
#include <memory>
+#include "base/logging.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
@@ -177,7 +178,7 @@ char AlsaPcmOutputStreamTest::kSurround70[] = "surround70:CARD=foo,DEV=0";
char AlsaPcmOutputStreamTest::kSurround71[] = "surround71:CARD=foo,DEV=0";
void* AlsaPcmOutputStreamTest::kFakeHints[] = {
kSurround40, kSurround41, kSurround50, kSurround51,
- kSurround70, kSurround71, NULL };
+ kSurround70, kSurround71, nullptr};
char AlsaPcmOutputStreamTest::kGenericSurround50[] = "surround50";
// Custom action to clear a memory buffer.
@@ -315,7 +316,7 @@ TEST_F(AlsaPcmOutputStreamTest, PcmOpenFailed) {
// Ensure internal state is set for a no-op stream if PcmOpen() failes.
EXPECT_TRUE(test_stream->stop_stream_);
- EXPECT_TRUE(test_stream->playback_handle_ == NULL);
+ EXPECT_FALSE(test_stream->playback_handle_);
EXPECT_FALSE(test_stream->buffer_.get());
// Close the stream since we opened it to make destruction happy.
@@ -342,7 +343,7 @@ TEST_F(AlsaPcmOutputStreamTest, PcmSetParamsFailed) {
// Ensure internal state is set for a no-op stream if PcmSetParams() failes.
EXPECT_TRUE(test_stream->stop_stream_);
- EXPECT_TRUE(test_stream->playback_handle_ == NULL);
+ EXPECT_FALSE(test_stream->playback_handle_);
EXPECT_FALSE(test_stream->buffer_.get());
// Close the stream since we opened it to make destruction happy.
@@ -636,13 +637,16 @@ TEST_F(AlsaPcmOutputStreamTest, AutoSelectDevice_DeviceSelect) {
//
// Note that the loop starts at "1", so the first parameter is ignored in
// these arrays.
- const char* kExpectedDeviceName[] = { NULL,
- AlsaPcmOutputStream::kDefaultDevice,
- AlsaPcmOutputStream::kDefaultDevice,
- AlsaPcmOutputStream::kDefaultDevice,
- kSurround40, kSurround50, kSurround51,
- kSurround70, kSurround71,
- AlsaPcmOutputStream::kDefaultDevice };
+ const char* kExpectedDeviceName[] = {nullptr,
+ AlsaPcmOutputStream::kDefaultDevice,
+ AlsaPcmOutputStream::kDefaultDevice,
+ AlsaPcmOutputStream::kDefaultDevice,
+ kSurround40,
+ kSurround50,
+ kSurround51,
+ kSurround70,
+ kSurround71,
+ AlsaPcmOutputStream::kDefaultDevice};
bool kExpectedDownmix[] = { false, false, false, false, false, true,
false, false, false, false };
ChannelLayout kExpectedLayouts[] = { CHANNEL_LAYOUT_NONE,
diff --git a/chromium/media/audio/android/audio_android_unittest.cc b/chromium/media/audio/android/audio_android_unittest.cc
index c3cd2ba0e67..ea3a4024d3e 100644
--- a/chromium/media/audio/android/audio_android_unittest.cc
+++ b/chromium/media/audio/android/audio_android_unittest.cc
@@ -9,6 +9,7 @@
#include "base/android/build_info.h"
#include "base/bind.h"
#include "base/files/file_util.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/path_service.h"
#include "base/run_loop.h"
diff --git a/chromium/media/audio/android/opensles_util.h b/chromium/media/audio/android/opensles_util.h
index 2673d14f3b6..2e8d27055bf 100644
--- a/chromium/media/audio/android/opensles_util.h
+++ b/chromium/media/audio/android/opensles_util.h
@@ -7,7 +7,7 @@
#include <SLES/OpenSLES.h>
-#include "base/logging.h"
+#include "base/check.h"
#include "media/base/media_export.h"
namespace media {
diff --git a/chromium/media/audio/audio_debug_recording_helper.cc b/chromium/media/audio/audio_debug_recording_helper.cc
index bad2c5b2f72..7b0341d3a45 100644
--- a/chromium/media/audio/audio_debug_recording_helper.cc
+++ b/chromium/media/audio/audio_debug_recording_helper.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/files/file.h"
+#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/single_thread_task_runner.h"
#include "media/audio/audio_debug_file_writer.h"
diff --git a/chromium/media/audio/audio_input_device.cc b/chromium/media/audio/audio_input_device.cc
index aad6984307d..b6bc304fbbd 100644
--- a/chromium/media/audio/audio_input_device.cc
+++ b/chromium/media/audio/audio_input_device.cc
@@ -11,6 +11,7 @@
#include "base/bind.h"
#include "base/callback_forward.h"
#include "base/format_macros.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
@@ -98,13 +99,15 @@ class AudioInputDevice::AudioThreadCallback
};
AudioInputDevice::AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc,
- Purpose purpose)
+ Purpose purpose,
+ DeadStreamDetection detect_dead_stream)
: thread_priority_(ThreadPriorityFromPurpose(purpose)),
enable_uma_(purpose == AudioInputDevice::Purpose::kUserInput),
callback_(nullptr),
ipc_(std::move(ipc)),
state_(IDLE),
- agc_is_enabled_(false) {
+ agc_is_enabled_(false),
+ detect_dead_stream_(detect_dead_stream) {
CHECK(ipc_);
// The correctness of the code depends on the relative values assigned in the
@@ -142,9 +145,11 @@ void AudioInputDevice::Stop() {
TRACE_EVENT0("audio", "AudioInputDevice::Stop");
if (enable_uma_) {
- UMA_HISTOGRAM_BOOLEAN(
- "Media.Audio.Capture.DetectedMissingCallbacks",
- alive_checker_ ? alive_checker_->DetectedDead() : false);
+ if (detect_dead_stream_ == DeadStreamDetection::kEnabled) {
+ UMA_HISTOGRAM_BOOLEAN(
+ "Media.Audio.Capture.DetectedMissingCallbacks",
+ alive_checker_ ? alive_checker_->DetectedDead() : false);
+ }
UMA_HISTOGRAM_ENUMERATION("Media.Audio.Capture.StreamCallbackError2",
had_error_);
@@ -247,6 +252,7 @@ void AudioInputDevice::OnStreamCreated(
// also a risk of false positives if we are suspending when starting the stream
// here. See comments in AliveChecker and PowerObserverHelper for details and
// todos.
+ if (detect_dead_stream_ == DeadStreamDetection::kEnabled) {
#if defined(OS_LINUX)
const bool stop_at_first_alive_notification = true;
const bool pause_check_during_suspend = false;
@@ -259,13 +265,19 @@ void AudioInputDevice::OnStreamCreated(
base::TimeDelta::FromSeconds(kCheckMissingCallbacksIntervalSeconds),
base::TimeDelta::FromSeconds(kMissingCallbacksTimeBeforeErrorSeconds),
stop_at_first_alive_notification, pause_check_during_suspend);
+ }
// Unretained is safe since |alive_checker_| outlives |audio_callback_|.
+ base::RepeatingClosure notify_alive_closure =
+ alive_checker_
+ ? base::BindRepeating(&AliveChecker::NotifyAlive,
+ base::Unretained(alive_checker_.get()))
+ : base::DoNothing::Repeatedly();
+
audio_callback_ = std::make_unique<AudioInputDevice::AudioThreadCallback>(
audio_parameters_, std::move(shared_memory_region),
kRequestedSharedMemoryCount, enable_uma_, callback_,
- base::BindRepeating(&AliveChecker::NotifyAlive,
- base::Unretained(alive_checker_.get())));
+ notify_alive_closure);
audio_thread_ = std::make_unique<AudioDeviceThread>(
audio_callback_.get(), std::move(socket_handle), "AudioInputDevice",
thread_priority_);
@@ -274,7 +286,8 @@ void AudioInputDevice::OnStreamCreated(
ipc_->RecordStream();
// Start detecting missing audio data.
- alive_checker_->Start();
+ if (alive_checker_)
+ alive_checker_->Start();
}
void AudioInputDevice::OnError() {
diff --git a/chromium/media/audio/audio_input_device.h b/chromium/media/audio/audio_input_device.h
index 96a5b4d72d5..219b79301d8 100644
--- a/chromium/media/audio/audio_input_device.h
+++ b/chromium/media/audio/audio_input_device.h
@@ -68,11 +68,15 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
public AudioInputIPCDelegate {
public:
enum Purpose : int8_t { kUserInput, kLoopback };
+ enum class DeadStreamDetection : bool { kDisabled = false, kEnabled = true };
// NOTE: Clients must call Initialize() before using.
// |enable_uma| controls logging of UMA stats. It is used to ensure that
// stats are not logged for mirroring service streams.
- AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc, Purpose purpose);
+ // |detect_dead_stream| controls the dead stream detection.
+ AudioInputDevice(std::unique_ptr<AudioInputIPC> ipc,
+ Purpose purpose,
+ DeadStreamDetection detect_dead_stream);
// AudioCapturerSource implementation.
void Initialize(const AudioParameters& params,
@@ -142,6 +146,10 @@ class MEDIA_EXPORT AudioInputDevice : public AudioCapturerSource,
// Stores the Automatic Gain Control state. Default is false.
bool agc_is_enabled_;
+ // Controls the dead stream detection. Only the DSP hotword devices set this
+ // to kDisabled to disable dead stream detection.
+ const DeadStreamDetection detect_dead_stream_;
+
// Checks regularly that the input stream is alive and notifies us if it
// isn't by calling DetectedDeadInputStream(). Must outlive |audio_callback_|.
std::unique_ptr<AliveChecker> alive_checker_;
diff --git a/chromium/media/audio/audio_input_device_unittest.cc b/chromium/media/audio/audio_input_device_unittest.cc
index 2b20c59cf00..d951d344522 100644
--- a/chromium/media/audio/audio_input_device_unittest.cc
+++ b/chromium/media/audio/audio_input_device_unittest.cc
@@ -63,13 +63,17 @@ class MockCaptureCallback : public AudioCapturerSource::CaptureCallback {
} // namespace.
+class AudioInputDeviceTest
+ : public ::testing::TestWithParam<AudioInputDevice::DeadStreamDetection> {};
+
// Regular construction.
-TEST(AudioInputDeviceTest, Noop) {
+TEST_P(AudioInputDeviceTest, Noop) {
base::test::SingleThreadTaskEnvironment task_environment(
base::test::SingleThreadTaskEnvironment::MainThreadType::IO);
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
- base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput));
+ base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput,
+ AudioInputDeviceTest::GetParam()));
}
ACTION_P(ReportStateChange, device) {
@@ -77,23 +81,25 @@ ACTION_P(ReportStateChange, device) {
}
// Verify that we get an OnCaptureError() callback if CreateStream fails.
-TEST(AudioInputDeviceTest, FailToCreateStream) {
+TEST_P(AudioInputDeviceTest, FailToCreateStream) {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
CHANNEL_LAYOUT_STEREO, 48000, 480);
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
- base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput));
+ base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput,
+ AudioInputDeviceTest::GetParam()));
device->Initialize(params, &callback);
EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
.WillOnce(ReportStateChange(device.get()));
EXPECT_CALL(callback, OnCaptureError(_));
+ EXPECT_CALL(*input_ipc, CloseStream());
device->Start();
device->Stop();
}
-TEST(AudioInputDeviceTest, CreateStream) {
+TEST_P(AudioInputDeviceTest, CreateStream) {
AudioParameters params(AudioParameters::AUDIO_PCM_LOW_LATENCY,
CHANNEL_LAYOUT_STEREO, 48000, 480);
base::MappedReadOnlyRegion shared_memory;
@@ -117,7 +123,8 @@ TEST(AudioInputDeviceTest, CreateStream) {
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
- base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput));
+ base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput,
+ AudioInputDeviceTest::GetParam()));
device->Initialize(params, &callback);
EXPECT_CALL(*input_ipc, CreateStream(_, _, _, _))
@@ -134,4 +141,10 @@ TEST(AudioInputDeviceTest, CreateStream) {
device->Stop();
}
+INSTANTIATE_TEST_SUITE_P(
+ AudioInputDeviceGroup,
+ AudioInputDeviceTest,
+ ::testing::Values(AudioInputDevice::DeadStreamDetection::kDisabled,
+ AudioInputDevice::DeadStreamDetection::kEnabled));
+
} // namespace media.
diff --git a/chromium/media/audio/audio_input_unittest.cc b/chromium/media/audio/audio_input_unittest.cc
index 133673a58be..5a38c8d6b89 100644
--- a/chromium/media/audio/audio_input_unittest.cc
+++ b/chromium/media/audio/audio_input_unittest.cc
@@ -108,7 +108,7 @@ class AudioInputTest : public testing::Test {
void CloseAudioInputStreamOnAudioThread() {
RunOnAudioThread(base::BindOnce(&AudioInputStream::Close,
base::Unretained(audio_input_stream_)));
- audio_input_stream_ = NULL;
+ audio_input_stream_ = nullptr;
}
void OpenAndCloseAudioInputStreamOnAudioThread() {
@@ -148,7 +148,7 @@ class AudioInputTest : public testing::Test {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
EXPECT_TRUE(audio_input_stream_->Open());
audio_input_stream_->Close();
- audio_input_stream_ = NULL;
+ audio_input_stream_ = nullptr;
}
void OpenAndStart(AudioInputStream::AudioInputCallback* sink) {
@@ -162,14 +162,14 @@ class AudioInputTest : public testing::Test {
EXPECT_TRUE(audio_input_stream_->Open());
audio_input_stream_->Stop();
audio_input_stream_->Close();
- audio_input_stream_ = NULL;
+ audio_input_stream_ = nullptr;
}
void StopAndClose() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
audio_input_stream_->Stop();
audio_input_stream_->Close();
- audio_input_stream_ = NULL;
+ audio_input_stream_ = nullptr;
}
// Synchronously runs the provided callback/closure on the audio thread.
diff --git a/chromium/media/audio/audio_low_latency_input_output_unittest.cc b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
index 00250cc4d22..a6e11feb192 100644
--- a/chromium/media/audio/audio_low_latency_input_output_unittest.cc
+++ b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
@@ -11,6 +11,7 @@
#include "base/bind.h"
#include "base/environment.h"
#include "base/files/file_util.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/path_service.h"
#include "base/run_loop.h"
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index 7bb71fe0cdf..473d9e4979a 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -26,6 +26,7 @@
#include "media/audio/fake_audio_output_stream.h"
#include "media/base/media_switches.h"
+#include "base/logging.h"
#include "media/audio/audio_input_stream_data_interceptor.h"
namespace media {
diff --git a/chromium/media/audio/audio_manager_unittest.cc b/chromium/media/audio/audio_manager_unittest.cc
index 65948647255..f9db1276a2b 100644
--- a/chromium/media/audio/audio_manager_unittest.cc
+++ b/chromium/media/audio/audio_manager_unittest.cc
@@ -102,6 +102,12 @@ struct TestAudioManagerFactory<std::nullptr_t> {
using chromeos::AudioNode;
using chromeos::AudioNodeList;
+const int kDefaultSampleRate = 48000;
+
+const uint64_t kInternalSpeakerId = 10001;
+const uint64_t kInternalSpeakerStableDeviceId = 10001;
+const uint64_t kInternalMicId = 10002;
+const uint64_t kInternalMicStableDeviceId = 10002;
const uint64_t kJabraSpeaker1Id = 30001;
const uint64_t kJabraSpeaker1StableDeviceId = 80001;
const uint64_t kJabraSpeaker2Id = 30002;
@@ -115,6 +121,30 @@ const uint64_t kJabraMic2StableDeviceId = 90002;
const uint64_t kWebcamMicId = 40003;
const uint64_t kWebcamMicStableDeviceId = 90003;
+const AudioNode kInternalSpeaker(false,
+ kInternalSpeakerId,
+ true,
+ kInternalSpeakerStableDeviceId,
+ kInternalSpeakerStableDeviceId ^ 0xFF,
+ "Internal Speaker",
+ "INTERNAL_SPEAKER",
+ "Speaker",
+ false,
+ 0,
+ 2);
+
+const AudioNode kInternalMic(true,
+ kInternalMicId,
+ true,
+ kInternalMicStableDeviceId,
+ kInternalMicStableDeviceId ^ 0xFF,
+ "Internal Mic",
+ "INTERNAL_MIC",
+ "Internal Mic",
+ false,
+ 0,
+ 1);
+
const AudioNode kJabraSpeaker1(false,
kJabraSpeaker1Id,
true,
@@ -124,7 +154,8 @@ const AudioNode kJabraSpeaker1(false,
"USB",
"Jabra Speaker 1",
false,
- 0);
+ 0,
+ 2); // expects CHANNEL_LAYOUT_STEREO
const AudioNode kJabraSpeaker2(false,
kJabraSpeaker2Id,
@@ -135,7 +166,8 @@ const AudioNode kJabraSpeaker2(false,
"USB",
"Jabra Speaker 2",
false,
- 0);
+ 0,
+ 6); // expects CHANNEL_LAYOUT_5_1
const AudioNode kHDMIOutput(false,
kHDMIOutputId,
@@ -146,7 +178,8 @@ const AudioNode kHDMIOutput(false,
"HDMI",
"HDA Intel MID",
false,
- 0);
+ 0,
+ 8); // expects CHANNEL_LAYOUT_7_1
const AudioNode kJabraMic1(true,
kJabraMic1Id,
@@ -157,7 +190,8 @@ const AudioNode kJabraMic1(true,
"USB",
"Jabra Mic 1",
false,
- 0);
+ 0,
+ 1);
const AudioNode kJabraMic2(true,
kJabraMic2Id,
@@ -168,7 +202,8 @@ const AudioNode kJabraMic2(true,
"USB",
"Jabra Mic 2",
false,
- 0);
+ 0,
+ 1);
const AudioNode kUSBCameraMic(true,
kWebcamMicId,
@@ -179,7 +214,8 @@ const AudioNode kUSBCameraMic(true,
"USB",
"Logitech Webcam",
false,
- 0);
+ 0,
+ 1);
#endif // defined(USE_CRAS)
const char kRealDefaultInputDeviceID[] = "input2";
@@ -274,6 +310,27 @@ class AudioManagerTest : public ::testing::Test {
cras_audio_handler_ = chromeos::CrasAudioHandler::Get();
base::RunLoop().RunUntilIdle();
}
+
+ void SetActiveOutputNode(uint64_t node_id) {
+ cras_audio_handler_->SwitchToDevice(
+ *cras_audio_handler_->GetDeviceFromId(node_id), true /* notify */,
+ chromeos::CrasAudioHandler::ACTIVATE_BY_USER /* activate_by */);
+ }
+
+ AudioParameters GetPreferredOutputStreamParameters(
+ ChannelLayout channel_layout, int32_t user_buffer_size = 0) {
+ // Generated AudioParameters should follow the same rule as in
+ // AudioManagerCras::GetPreferredOutputStreamParameters().
+ int sample_rate = kDefaultSampleRate;
+ int32_t buffer_size = user_buffer_size;
+ if (buffer_size == 0) // Not user-provided.
+ cras_audio_handler_->GetDefaultOutputBufferSize(&buffer_size);
+ return AudioParameters(
+ AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout, sample_rate,
+ buffer_size,
+ AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
+ limits::kMaxAudioBufferSize));
+ }
#endif // defined(USE_CRAS)
protected:
@@ -367,6 +424,19 @@ class AudioManagerTest : public ::testing::Test {
LOG(WARNING) << "No input devices detected";
}
}
+
+ // Helper method for (USE_CRAS) which returns |group_id| from |device_id|.
+ std::string getGroupID(const AudioDeviceDescriptions& device_descriptions,
+ const std::string device_id) {
+ AudioDeviceDescriptions::const_iterator it =
+ std::find_if(device_descriptions.begin(), device_descriptions.end(),
+ [&device_id](const auto& audio_device_desc) {
+ return audio_device_desc.unique_id == device_id;
+ });
+
+ EXPECT_NE(it, device_descriptions.end());
+ return it->group_id;
+ }
#endif // defined(USE_CRAS)
bool InputDevicesAvailable() {
@@ -464,6 +534,175 @@ TEST_F(AudioManagerTest, EnumerateOutputDevicesCras) {
device_info_accessor_->GetAudioOutputDeviceDescriptions(&device_descriptions);
CheckDeviceDescriptionsCras(device_descriptions, expectation);
}
+
+TEST_F(AudioManagerTest, CheckOutputStreamParametersCras) {
+ // Setup the devices without internal mic, so that it doesn't exist
+ // beamforming capable mic.
+ AudioNodeList audio_nodes;
+ audio_nodes.push_back(kJabraMic1);
+ audio_nodes.push_back(kJabraMic2);
+ audio_nodes.push_back(kUSBCameraMic);
+ audio_nodes.push_back(kHDMIOutput);
+ audio_nodes.push_back(kJabraSpeaker1);
+ audio_nodes.push_back(kJabraSpeaker2);
+
+ SetUpCrasAudioHandlerWithTestingNodes(audio_nodes);
+
+ ABORT_AUDIO_TEST_IF_NOT(OutputDevicesAvailable());
+
+ DVLOG(2) << "Testing AudioManagerCras.";
+ CreateAudioManagerForTesting<AudioManagerCras>();
+ AudioParameters params, golden_params;
+
+ // channel_layout:
+ // JabraSpeaker1 (2-channel): CHANNEL_LAYOUT_STEREO
+ // JabraSpeaker2 (6-channel): CHANNEL_LAYOUT_5_1
+ // HDMIOutput (8-channel): CHANNEL_LAYOUT_7_1
+
+ // Check GetOutputStreamParameters() with device ID. The returned parameters
+ // should be reflected to the specific output device.
+ params = device_info_accessor_->GetOutputStreamParameters(
+ base::NumberToString(kJabraSpeaker1Id));
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_STEREO);
+ EXPECT_TRUE(params.Equals(golden_params));
+ params = device_info_accessor_->GetOutputStreamParameters(
+ base::NumberToString(kJabraSpeaker2Id));
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_5_1);
+ EXPECT_TRUE(params.Equals(golden_params));
+ params = device_info_accessor_->GetOutputStreamParameters(
+ base::NumberToString(kHDMIOutputId));
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_7_1);
+ EXPECT_TRUE(params.Equals(golden_params));
+
+ // Set user-provided audio buffer size by command line, then check the buffer
+ // size in stream parameters is equal to the user-provided one.
+ int argc = 2;
+ char const *argv0 = "dummy";
+ char const *argv1 = "--audio-buffer-size=2048";
+ const char* argv[] = {argv0, argv1, 0};
+ base::CommandLine::Reset();
+ EXPECT_TRUE(base::CommandLine::Init(argc, argv));
+
+ // Check GetOutputStreamParameters() with default ID. The returned parameters
+ // should reflect the currently active output device.
+ SetActiveOutputNode(kJabraSpeaker1Id);
+ params = device_info_accessor_->GetOutputStreamParameters(
+ AudioDeviceDescription::kDefaultDeviceId);
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 2048);
+ EXPECT_TRUE(params.Equals(golden_params));
+ SetActiveOutputNode(kJabraSpeaker2Id);
+ params = device_info_accessor_->GetOutputStreamParameters(
+ AudioDeviceDescription::kDefaultDeviceId);
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_5_1, 2048);
+ EXPECT_TRUE(params.Equals(golden_params));
+ SetActiveOutputNode(kHDMIOutputId);
+ params = device_info_accessor_->GetOutputStreamParameters(
+ AudioDeviceDescription::kDefaultDeviceId);
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_7_1, 2048);
+ EXPECT_TRUE(params.Equals(golden_params));
+
+ // Check non-default device again.
+ params = device_info_accessor_->GetOutputStreamParameters(
+ base::NumberToString(kJabraSpeaker1Id));
+ golden_params = GetPreferredOutputStreamParameters(
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 2048);
+ EXPECT_TRUE(params.Equals(golden_params));
+}
+
+TEST_F(AudioManagerTest, LookupDefaultInputDeviceWithProperGroupId) {
+ // Setup devices with external microphone as active device.
+ // Switch active device to the internal microphone.
+ // Check if default device has the same group id as internal microphone.
+ AudioNodeList audio_nodes;
+ audio_nodes.push_back(kInternalMic);
+ audio_nodes.push_back(kJabraMic1);
+ SetUpCrasAudioHandlerWithTestingNodes(audio_nodes);
+
+ ABORT_AUDIO_TEST_IF_NOT(InputDevicesAvailable());
+
+ // Setup expectation with physical devices.
+ std::map<uint64_t, std::string> expectation;
+ expectation[kInternalMic.id] =
+ cras_audio_handler_->GetDeviceFromId(kInternalMic.id)->display_name;
+ expectation[kJabraMic1.id] =
+ cras_audio_handler_->GetDeviceFromId(kJabraMic1.id)->display_name;
+
+ CreateAudioManagerForTesting<AudioManagerCras>();
+ auto previous_default_device_id =
+ device_info_accessor_->GetDefaultInputDeviceID();
+ EXPECT_EQ(base::NumberToString(kJabraMic1.id), previous_default_device_id);
+ AudioDeviceDescriptions device_descriptions;
+ device_info_accessor_->GetAudioInputDeviceDescriptions(&device_descriptions);
+
+ CheckDeviceDescriptions(device_descriptions);
+
+ // Set internal microphone as active.
+ chromeos::AudioDevice internal_microphone(kInternalMic);
+ cras_audio_handler_->SwitchToDevice(
+ internal_microphone, true, chromeos::CrasAudioHandler::ACTIVATE_BY_USER);
+ auto new_default_device_id = device_info_accessor_->GetDefaultInputDeviceID();
+ EXPECT_NE(previous_default_device_id, new_default_device_id);
+
+ auto default_device_group_id =
+ getGroupID(device_descriptions, new_default_device_id);
+ auto mic_group_id =
+ getGroupID(device_descriptions, base::NumberToString(kInternalMic.id));
+
+ EXPECT_EQ(default_device_group_id, mic_group_id);
+ EXPECT_EQ(base::NumberToString(kInternalMic.id), new_default_device_id);
+}
+
+TEST_F(AudioManagerTest, LookupDefaultOutputDeviceWithProperGroupId) {
+ // Setup devices with external speaker as active device.
+ // Switch active device to the internal speaker.
+ // Check if default device has the same group id as internal speaker.
+ AudioNodeList audio_nodes;
+ audio_nodes.push_back(kInternalSpeaker);
+ audio_nodes.push_back(kJabraSpeaker1);
+
+ SetUpCrasAudioHandlerWithTestingNodes(audio_nodes);
+
+ ABORT_AUDIO_TEST_IF_NOT(OutputDevicesAvailable());
+
+ // Setup expectation with physical devices.
+ std::map<uint64_t, std::string> expectation;
+ expectation[kInternalSpeaker.id] =
+ cras_audio_handler_->GetDeviceFromId(kInternalSpeaker.id)->display_name;
+ expectation[kJabraSpeaker1.id] =
+ cras_audio_handler_->GetDeviceFromId(kJabraSpeaker1.id)->display_name;
+
+ CreateAudioManagerForTesting<AudioManagerCras>();
+ auto previous_default_device_id =
+ device_info_accessor_->GetDefaultOutputDeviceID();
+ EXPECT_EQ(base::NumberToString(kJabraSpeaker1.id),
+ previous_default_device_id);
+ AudioDeviceDescriptions device_descriptions;
+ device_info_accessor_->GetAudioOutputDeviceDescriptions(&device_descriptions);
+
+ CheckDeviceDescriptions(device_descriptions);
+
+ // Set internal speaker as active.
+ chromeos::AudioDevice internal_speaker(kInternalSpeaker);
+ cras_audio_handler_->SwitchToDevice(
+ internal_speaker, true, chromeos::CrasAudioHandler::ACTIVATE_BY_USER);
+ auto new_default_device_id =
+ device_info_accessor_->GetDefaultOutputDeviceID();
+ EXPECT_NE(previous_default_device_id, new_default_device_id);
+
+ auto default_device_group_id =
+ getGroupID(device_descriptions, new_default_device_id);
+ auto speaker_group_id = getGroupID(device_descriptions,
+ base::NumberToString(kInternalSpeaker.id));
+
+ EXPECT_EQ(default_device_group_id, speaker_group_id);
+ EXPECT_EQ(base::NumberToString(kInternalSpeaker.id), new_default_device_id);
+}
#else // !defined(USE_CRAS)
TEST_F(AudioManagerTest, HandleDefaultDeviceIDs) {
diff --git a/chromium/media/audio/audio_output_device.cc b/chromium/media/audio/audio_output_device.cc
index 92448e7469f..d18408f0002 100644
--- a/chromium/media/audio/audio_output_device.cc
+++ b/chromium/media/audio/audio_output_device.cc
@@ -11,6 +11,7 @@
#include <utility>
#include "base/callback_helpers.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
@@ -33,7 +34,7 @@ AudioOutputDevice::AudioOutputDevice(
const AudioSinkParameters& sink_params,
base::TimeDelta authorization_timeout)
: io_task_runner_(io_task_runner),
- callback_(NULL),
+ callback_(nullptr),
ipc_(std::move(ipc)),
state_(IDLE),
session_id_(sink_params.session_id),
diff --git a/chromium/media/audio/audio_output_device_thread_callback.cc b/chromium/media/audio/audio_output_device_thread_callback.cc
index c834b0d77d8..37a88086aea 100644
--- a/chromium/media/audio/audio_output_device_thread_callback.cc
+++ b/chromium/media/audio/audio_output_device_thread_callback.cc
@@ -6,6 +6,7 @@
#include <utility>
+#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/trace_event/trace_event.h"
diff --git a/chromium/media/audio/audio_output_resampler.cc b/chromium/media/audio/audio_output_resampler.cc
index 7e7c94c00d5..49ed554f8df 100644
--- a/chromium/media/audio/audio_output_resampler.cc
+++ b/chromium/media/audio/audio_output_resampler.cc
@@ -14,6 +14,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/compiler_specific.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_functions.h"
diff --git a/chromium/media/audio/audio_output_stream_sink.cc b/chromium/media/audio/audio_output_stream_sink.cc
index b70b58e13e1..682d05e4813 100644
--- a/chromium/media/audio/audio_output_stream_sink.cc
+++ b/chromium/media/audio/audio_output_stream_sink.cc
@@ -19,10 +19,10 @@ namespace media {
AudioOutputStreamSink::AudioOutputStreamSink()
: initialized_(false),
started_(false),
- render_callback_(NULL),
- active_render_callback_(NULL),
+ render_callback_(nullptr),
+ active_render_callback_(nullptr),
audio_task_runner_(AudioManager::Get()->GetTaskRunner()),
- stream_(NULL) {}
+ stream_(nullptr) {}
AudioOutputStreamSink::~AudioOutputStreamSink() = default;
@@ -137,7 +137,7 @@ void AudioOutputStreamSink::DoStart(const AudioParameters& params) {
}
if (stream_)
stream_->Close();
- stream_ = NULL;
+ stream_ = nullptr;
}
}
@@ -149,7 +149,7 @@ void AudioOutputStreamSink::DoStop() {
DoPause();
stream_->Close();
- stream_ = NULL;
+ stream_ = nullptr;
}
void AudioOutputStreamSink::DoPause() {
@@ -176,7 +176,7 @@ void AudioOutputStreamSink::DoSetVolume(double volume) {
void AudioOutputStreamSink::ClearCallback() {
base::AutoLock al(callback_lock_);
- active_render_callback_ = NULL;
+ active_render_callback_ = nullptr;
}
} // namespace media
diff --git a/chromium/media/audio/audio_output_unittest.cc b/chromium/media/audio/audio_output_unittest.cc
index 99f8bbb78d7..8191243e21f 100644
--- a/chromium/media/audio/audio_output_unittest.cc
+++ b/chromium/media/audio/audio_output_unittest.cc
@@ -41,12 +41,6 @@ class AudioOutputTest : public testing::TestWithParam<bool> {
if (GetParam())
features_.InitAndEnableFeature(features::kUseAAudioDriver);
#endif
-#if defined(OS_LINUX)
- // Due to problems with PulseAudio failing to start, use a fake audio
- // stream. https://crbug.com/1047655#c70
- base::CommandLine::ForCurrentProcess()->AppendSwitch(
- switches::kDisableAudioOutput);
-#endif
base::RunLoop().RunUntilIdle();
}
~AudioOutputTest() override {
diff --git a/chromium/media/audio/audio_thread_hang_monitor.cc b/chromium/media/audio/audio_thread_hang_monitor.cc
index f07dea512e9..c26d3f59590 100644
--- a/chromium/media/audio/audio_thread_hang_monitor.cc
+++ b/chromium/media/audio/audio_thread_hang_monitor.cc
@@ -11,6 +11,7 @@
#include "base/callback.h"
#include "base/debug/dump_without_crashing.h"
#include "base/location.h"
+#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "base/power_monitor/power_monitor.h"
#include "base/process/process.h"
diff --git a/chromium/media/audio/cras/audio_manager_cras.cc b/chromium/media/audio/cras/audio_manager_cras.cc
index fb4f792d5d0..24540cbfa73 100644
--- a/chromium/media/audio/cras/audio_manager_cras.cc
+++ b/chromium/media/audio/cras/audio_manager_cras.cc
@@ -374,29 +374,59 @@ int32_t AudioManagerCras::GetSystemAecGroupIdPerBoard() {
AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
+ DCHECK(GetTaskRunner()->BelongsToCurrentThread());
+
ChannelLayout channel_layout = CHANNEL_LAYOUT_STEREO;
int sample_rate = kDefaultSampleRate;
- int buffer_size = GetDefaultOutputBufferSizePerBoard();
+ int buffer_size = GetUserBufferSize();
if (input_params.IsValid()) {
- sample_rate = input_params.sample_rate();
channel_layout = input_params.channel_layout();
- buffer_size =
- std::min(static_cast<int>(limits::kMaxAudioBufferSize),
- std::max(static_cast<int>(limits::kMinAudioBufferSize),
- input_params.frames_per_buffer()));
+ sample_rate = input_params.sample_rate();
+ if (!buffer_size) // Not user-provided.
+ buffer_size =
+ std::min(static_cast<int>(limits::kMaxAudioBufferSize),
+ std::max(static_cast<int>(limits::kMinAudioBufferSize),
+ input_params.frames_per_buffer()));
+ return AudioParameters(
+ AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout, sample_rate,
+ buffer_size,
+ AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
+ limits::kMaxAudioBufferSize));
}
- int user_buffer_size = GetUserBufferSize();
- if (user_buffer_size)
- buffer_size = user_buffer_size;
+ // Get max supported channels from |output_device_id| or the primary active
+ // one if |output_device_id| is the default device.
+ uint64_t preferred_device_id;
+ if (AudioDeviceDescription::IsDefaultDevice(output_device_id)) {
+ preferred_device_id = GetPrimaryActiveOutputNode();
+ } else {
+ if (!base::StringToUint64(output_device_id, &preferred_device_id))
+ preferred_device_id = 0; // 0 represents invalid |output_device_id|.
+ }
- AudioParameters params(
+ if (preferred_device_id) {
+ chromeos::AudioDeviceList devices;
+ GetAudioDevices(&devices);
+ const chromeos::AudioDevice* device =
+ GetDeviceFromId(devices, preferred_device_id);
+ if (device && device->is_input == false) {
+ channel_layout = GuessChannelLayout(
+ static_cast<int>(device->max_supported_channels));
+ // Fall-back to old fashion: always fixed to STEREO layout.
+ if (channel_layout == CHANNEL_LAYOUT_UNSUPPORTED) {
+ channel_layout = CHANNEL_LAYOUT_STEREO;
+ }
+ }
+ }
+
+ if (!buffer_size) // Not user-provided.
+ buffer_size = GetDefaultOutputBufferSizePerBoard();
+
+ return AudioParameters(
AudioParameters::AUDIO_PCM_LOW_LATENCY, channel_layout, sample_rate,
buffer_size,
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
-
- return params;
}
AudioOutputStream* AudioManagerCras::MakeOutputStream(
diff --git a/chromium/media/audio/fake_audio_input_stream.cc b/chromium/media/audio/fake_audio_input_stream.cc
index ce75d1e175e..89af0a9f506 100644
--- a/chromium/media/audio/fake_audio_input_stream.cc
+++ b/chromium/media/audio/fake_audio_input_stream.cc
@@ -12,6 +12,7 @@
#include "base/bind_helpers.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/memory/ref_counted.h"
diff --git a/chromium/media/audio/fake_audio_output_stream.cc b/chromium/media/audio/fake_audio_output_stream.cc
index 04a624e321f..067172b0617 100644
--- a/chromium/media/audio/fake_audio_output_stream.cc
+++ b/chromium/media/audio/fake_audio_output_stream.cc
@@ -23,7 +23,7 @@ FakeAudioOutputStream::FakeAudioOutputStream(AudioManagerBase* manager,
const AudioParameters& params)
: audio_manager_(manager),
fixed_data_delay_(FakeAudioWorker::ComputeFakeOutputDelay(params)),
- callback_(NULL),
+ callback_(nullptr),
fake_worker_(manager->GetWorkerTaskRunner(), params),
audio_bus_(AudioBus::Create(params)) {}
@@ -47,7 +47,7 @@ void FakeAudioOutputStream::Start(AudioSourceCallback* callback) {
void FakeAudioOutputStream::Stop() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
fake_worker_.Stop();
- callback_ = NULL;
+ callback_ = nullptr;
}
void FakeAudioOutputStream::Close() {
diff --git a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
index a3f7a94e91e..2a5f8dcf8da 100644
--- a/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
+++ b/chromium/media/audio/fuchsia/audio_output_stream_fuchsia.cc
@@ -8,7 +8,8 @@
#include <zircon/syscalls.h>
#include "base/bind.h"
-#include "base/fuchsia/default_context.h"
+#include "base/fuchsia/process_context.h"
+#include "base/logging.h"
#include "base/memory/writable_shared_memory_region.h"
#include "media/audio/fuchsia/audio_manager_fuchsia.h"
#include "media/base/audio_sample_types.h"
@@ -38,7 +39,7 @@ bool AudioOutputStreamFuchsia::Open() {
// Connect |audio_renderer_| to the audio service.
fuchsia::media::AudioPtr audio_server =
- base::fuchsia::ComponentContextForCurrentProcess()
+ base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::media::Audio>();
audio_server->CreateAudioRenderer(audio_renderer_.NewRequest());
diff --git a/chromium/media/audio/linux/audio_manager_linux.cc b/chromium/media/audio/linux/audio_manager_linux.cc
index 2986b094580..cb8f271efbd 100644
--- a/chromium/media/audio/linux/audio_manager_linux.cc
+++ b/chromium/media/audio/linux/audio_manager_linux.cc
@@ -5,6 +5,7 @@
#include <memory>
#include "base/command_line.h"
+#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
#include "media/audio/fake_audio_manager.h"
#include "media/base/media_switches.h"
diff --git a/chromium/media/audio/mac/audio_manager_mac.cc b/chromium/media/audio/mac/audio_manager_mac.cc
index 26436b47942..a994f74bb68 100644
--- a/chromium/media/audio/mac/audio_manager_mac.cc
+++ b/chromium/media/audio/mac/audio_manager_mac.cc
@@ -1054,14 +1054,7 @@ bool AudioManagerMac::MaybeChangeBufferSize(AudioDeviceID device_id,
DVLOG_IF(1, result == noErr) << "IO buffer size changed to: " << buffer_size;
// Store the currently used (after a change) I/O buffer frame size.
*io_buffer_frame_size = buffer_size;
-
- // If the size was changed, update the actual output buffer size used for the
- // given device ID.
- if (!is_input && (result == noErr)) {
- output_io_buffer_size_map_[device_id] = buffer_size;
- }
-
- return (result == noErr);
+ return result == noErr;
}
// static
@@ -1192,76 +1185,6 @@ void AudioManagerMac::UnsuppressNoiseReduction(AudioDeviceID device_id) {
}
}
-void AudioManagerMac::IncreaseIOBufferSizeIfPossible(AudioDeviceID device_id) {
- DCHECK(GetTaskRunner()->BelongsToCurrentThread());
- DVLOG(1) << "IncreaseIOBufferSizeIfPossible(id=0x" << std::hex << device_id
- << ")";
- if (in_shutdown_) {
- DVLOG(1) << "Disabled since we are shutting down";
- return;
- }
-
- // Start by getting the actual I/O buffer size. Then scan all active output
- // streams using the specified |device_id| and find the minimum requested
- // buffer size. In addition, store a reference to the audio unit of the first
- // output stream using |device_id|.
- // All active output streams use the same actual I/O buffer size given
- // a unique device ID.
- // TODO(henrika): it would also be possible to use AudioUnitGetProperty(...,
- // kAudioDevicePropertyBufferFrameSize,...) instead of caching the actual
- // buffer size but I have chosen to use the map instead to avoid possibly
- // expensive Core Audio API calls and the risk of failure when asking while
- // closing a stream.
- // TODO(http://crbug.com/961629): There seems to be bugs in the caching.
- const size_t actual_size =
- output_io_buffer_size_map_.find(device_id) !=
- output_io_buffer_size_map_.end()
- ? output_io_buffer_size_map_[device_id]
- : 0; // This leads to trying to update the buffer size below.
- AudioUnit audio_unit;
- size_t min_requested_size = std::numeric_limits<std::size_t>::max();
- for (auto* stream : output_streams_) {
- if (stream->device_id() == device_id) {
- if (min_requested_size == std::numeric_limits<std::size_t>::max()) {
- // Store reference to the first audio unit using the specified ID.
- audio_unit = stream->audio_unit();
- }
- if (stream->requested_buffer_size() < min_requested_size)
- min_requested_size = stream->requested_buffer_size();
- DVLOG(1) << "requested:" << stream->requested_buffer_size()
- << " actual: " << actual_size;
- }
- }
-
- if (min_requested_size == std::numeric_limits<std::size_t>::max()) {
- DVLOG(1) << "No action since there is no active stream for given device id";
- return;
- }
-
- // It is only possible to revert to a larger buffer size if the lowest
- // requested is not in use. Example: if the actual I/O buffer size is 256 and
- // at least one output stream has asked for 256 as its buffer size, we can't
- // start using a larger I/O buffer size.
- DCHECK_GE(min_requested_size, actual_size);
- if (min_requested_size == actual_size) {
- DVLOG(1) << "No action since lowest possible size is already in use: "
- << actual_size;
- return;
- }
-
- // It should now be safe to increase the I/O buffer size to a new (higher)
- // value using the |min_requested_size|. Doing so will save system resources.
- // All active output streams with the same |device_id| are affected by this
- // change but it is only required to apply the change to one of the streams.
- // We ignore the result from MaybeChangeBufferSize(). Logging is done in that
- // function and it could fail if the device was removed during the operation.
- DVLOG(1) << "min_requested_size: " << min_requested_size;
- bool size_was_changed = false;
- size_t io_buffer_frame_size = 0;
- MaybeChangeBufferSize(device_id, audio_unit, 0, min_requested_size,
- &size_was_changed, &io_buffer_frame_size);
-}
-
bool AudioManagerMac::AudioDeviceIsUsedForInput(AudioDeviceID device_id) {
DCHECK(GetTaskRunner()->BelongsToCurrentThread());
if (!basic_input_streams_.empty()) {
@@ -1299,25 +1222,6 @@ void AudioManagerMac::ReleaseOutputStreamUsingRealDevice(
// Start by closing down the specified output stream.
output_streams_.remove(static_cast<AUHALStream*>(stream));
AudioManagerBase::ReleaseOutputStream(stream);
-
- // Prevent attempt to alter buffer size if the released stream was the last
- // output stream.
- if (output_streams_.empty())
- return;
-
- // If the audio device exists (i.e. has not been removed from the system) and
- // is not used for input, see if it is possible to increase the IO buffer size
- // (saves power) given the remaining output audio streams and their buffer
- // size requirements.
- // TODO(grunell): When closing several idle streams
- // (AudioOutputDispatcherImpl::CloseIdleStreams), we should ideally only
- // update the buffer size once after closing all those streams.
- std::vector<AudioObjectID> device_ids =
- core_audio_mac::GetAllAudioDeviceIDs();
- const bool device_exists = std::find(device_ids.begin(), device_ids.end(),
- device_id) != device_ids.end();
- if (device_exists && !AudioDeviceIsUsedForInput(device_id))
- IncreaseIOBufferSizeIfPossible(device_id);
}
void AudioManagerMac::ReleaseInputStream(AudioInputStream* stream) {
diff --git a/chromium/media/audio/mac/audio_manager_mac.h b/chromium/media/audio/mac/audio_manager_mac.h
index c2fb8dec46c..74de0a11bdf 100644
--- a/chromium/media/audio/mac/audio_manager_mac.h
+++ b/chromium/media/audio/mac/audio_manager_mac.h
@@ -169,21 +169,6 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
// Returns true if any active input stream is using the specified |device_id|.
bool AudioDeviceIsUsedForInput(AudioDeviceID device_id);
- // This method is called when an output stream has been released and it takes
- // the given |device_id| and scans all active output streams that are
- // using this id. The goal is to find a new (larger) I/O buffer size which
- // can be applied to all active output streams since doing so will save
- // system resources.
- // Note that, it is only called if no input stream is also using the device.
- // Example: two active output streams where #1 wants 1024 as buffer size but
- // is using 256 since stream #2 wants it. Now, if stream #2 is closed down,
- // the native I/O buffer size will be increased to 1024 instead of 256.
- // Returns true if it was possible to increase the I/O buffer size and
- // false otherwise.
- // TODO(henrika): possibly extend the scheme to also take input streams into
- // account.
- void IncreaseIOBufferSizeIfPossible(AudioDeviceID device_id);
-
std::string GetDefaultDeviceID(bool is_input);
std::unique_ptr<AudioDeviceListenerMac> output_device_listener_;
@@ -208,10 +193,6 @@ class MEDIA_EXPORT AudioManagerMac : public AudioManagerBase {
std::list<AUAudioInputStream*> low_latency_input_streams_;
std::list<AUHALStream*> output_streams_;
- // Maps device IDs and their corresponding actual (I/O) buffer sizes for
- // all output streams using the specific device.
- std::map<AudioDeviceID, size_t> output_io_buffer_size_map_;
-
// Set to true in the destructor. Ensures that methods that touches native
// Core Audio APIs are not executed during shutdown.
bool in_shutdown_;
diff --git a/chromium/media/audio/null_audio_sink.cc b/chromium/media/audio/null_audio_sink.cc
index 56d7c23b9a1..ceda2ae30ac 100644
--- a/chromium/media/audio/null_audio_sink.cc
+++ b/chromium/media/audio/null_audio_sink.cc
@@ -18,7 +18,7 @@ NullAudioSink::NullAudioSink(
: initialized_(false),
started_(false),
playing_(false),
- callback_(NULL),
+ callback_(nullptr),
task_runner_(task_runner) {}
NullAudioSink::~NullAudioSink() = default;
diff --git a/chromium/media/audio/power_observer_helper.cc b/chromium/media/audio/power_observer_helper.cc
index aa0dc32bb01..d8e6d8199bd 100644
--- a/chromium/media/audio/power_observer_helper.cc
+++ b/chromium/media/audio/power_observer_helper.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/power_monitor/power_monitor.h"
namespace media {
diff --git a/chromium/media/audio/pulse/pulse_output.cc b/chromium/media/audio/pulse/pulse_output.cc
index cdcda5f9c60..2b773a39157 100644
--- a/chromium/media/audio/pulse/pulse_output.cc
+++ b/chromium/media/audio/pulse/pulse_output.cc
@@ -253,6 +253,9 @@ void PulseAudioOutputStream::Stop() {
// outstanding callbacks have completed.
AutoPulseLock auto_lock(pa_mainloop_);
+ if (!source_callback_)
+ return;
+
// Set |source_callback_| to nullptr so all FulfillWriteRequest() calls which
// may occur while waiting on the flush and cork exit immediately.
auto* callback = source_callback_;
diff --git a/chromium/media/audio/pulse/pulse_util.cc b/chromium/media/audio/pulse/pulse_util.cc
index 085036bb967..d8e6ce1ca5c 100644
--- a/chromium/media/audio/pulse/pulse_util.cc
+++ b/chromium/media/audio/pulse/pulse_util.cc
@@ -13,6 +13,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
+#include "base/notreached.h"
#include "base/synchronization/waitable_event.h"
#include "build/branding_buildflags.h"
#include "media/audio/audio_device_description.h"
diff --git a/chromium/media/audio/wav_audio_handler.cc b/chromium/media/audio/wav_audio_handler.cc
index e5a8a7eb41d..4d0f5d29a32 100644
--- a/chromium/media/audio/wav_audio_handler.cc
+++ b/chromium/media/audio/wav_audio_handler.cc
@@ -9,6 +9,7 @@
#include "base/logging.h"
#include "base/memory/ptr_util.h"
+#include "base/notreached.h"
#include "base/sys_byteorder.h"
#include "build/build_config.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/audio/win/audio_device_listener_win.cc b/chromium/media/audio/win/audio_device_listener_win.cc
index 166a1e53851..cdf7bec9061 100644
--- a/chromium/media/audio/win/audio_device_listener_win.cc
+++ b/chromium/media/audio/win/audio_device_listener_win.cc
@@ -19,7 +19,7 @@ using base::win::ScopedCoMem;
namespace media {
static std::string FlowToString(EDataFlow flow) {
- return flow == eRender ? "eRender" : "eConsole";
+ return flow == eRender ? "eRender" : "eCapture";
}
static std::string RoleToString(ERole role) {
@@ -134,8 +134,10 @@ HRESULT AudioDeviceListenerWin::OnDefaultDeviceChanged(
// it provides a substantially faster resumption of playback.
bool did_run_listener_cb = false;
const base::TimeTicks now = tick_clock_->NowTicks();
- if (flow == eRender && now - last_device_change_time_ > kDeviceChangeLimit) {
+ if (flow == eRender && (now - last_device_change_time_ > kDeviceChangeLimit ||
+ new_device_id.compare(last_device_id_) != 0)) {
last_device_change_time_ = now;
+ last_device_id_ = new_device_id;
listener_cb_.Run();
did_run_listener_cb = true;
}
diff --git a/chromium/media/audio/win/audio_device_listener_win.h b/chromium/media/audio/win/audio_device_listener_win.h
index c40054a723d..4ea3568482c 100644
--- a/chromium/media/audio/win/audio_device_listener_win.h
+++ b/chromium/media/audio/win/audio_device_listener_win.h
@@ -63,6 +63,7 @@ class MEDIA_EXPORT AudioDeviceListenerWin : public IMMNotificationClient {
// Used to rate limit device change events.
base::TimeTicks last_device_change_time_;
+ std::string last_device_id_;
// AudioDeviceListenerWin must be constructed and destructed on one thread.
THREAD_CHECKER(thread_checker_);
diff --git a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
index 84b87e21428..c796ab32a0c 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
@@ -13,6 +13,7 @@
#include "base/environment.h"
#include "base/files/file_util.h"
+#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/path_service.h"
#include "base/run_loop.h"
diff --git a/chromium/media/audio/win/audio_output_win_unittest.cc b/chromium/media/audio/win/audio_output_win_unittest.cc
index 7a4fc8648c4..e3c0377f163 100644
--- a/chromium/media/audio/win/audio_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_output_win_unittest.cc
@@ -10,6 +10,7 @@
#include <memory>
#include "base/base_paths.h"
+#include "base/logging.h"
#include "base/memory/aligned_memory.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
diff --git a/chromium/media/audio/win/audio_session_event_listener_win.cc b/chromium/media/audio/win/audio_session_event_listener_win.cc
index 7d568e7bd35..d344295c077 100644
--- a/chromium/media/audio/win/audio_session_event_listener_win.cc
+++ b/chromium/media/audio/win/audio_session_event_listener_win.cc
@@ -4,6 +4,8 @@
#include "media/audio/win/audio_session_event_listener_win.h"
+#include "base/logging.h"
+
namespace media {
AudioSessionEventListener::AudioSessionEventListener(
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index e5f3cb49ffd..e45cb89a96a 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -37,6 +37,7 @@ jumbo_source_set("base") {
"android_overlay_config.cc",
"android_overlay_config.h",
"android_overlay_mojo_factory.h",
+ "async_destroy_video_decoder.h",
"audio_block_fifo.cc",
"audio_block_fifo.h",
"audio_buffer.cc",
@@ -345,6 +346,7 @@ jumbo_source_set("base") {
]
deps = [
"//base/allocator:buildflags",
+ "//base/util/values:values_util",
"//components/system_media_controls/linux/buildflags",
"//gpu/command_buffer/common",
"//gpu/ipc/common:common",
@@ -411,7 +413,10 @@ jumbo_source_set("base") {
}
if (is_win) {
- public_deps += [ "//media/base/win:d3d11" ]
+ public_deps += [
+ "//media/base/win:d3d11",
+ "//media/base/win:mf_cdm_proxy",
+ ]
}
if (is_chromecast || is_fuchsia) {
diff --git a/chromium/media/base/android/android_cdm_factory.cc b/chromium/media/base/android/android_cdm_factory.cc
index a2c20394f23..faa670ffb1d 100644
--- a/chromium/media/base/android/android_cdm_factory.cc
+++ b/chromium/media/base/android/android_cdm_factory.cc
@@ -43,7 +43,6 @@ AndroidCdmFactory::~AndroidCdmFactory() {
void AndroidCdmFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -56,11 +55,6 @@ void AndroidCdmFactory::Create(
CdmCreatedCB bound_cdm_created_cb =
BindToCurrentLoop(std::move(cdm_created_cb));
- if (security_origin.opaque()) {
- std::move(bound_cdm_created_cb).Run(nullptr, "Invalid origin.");
- return;
- }
-
// Create AesDecryptor here to support External Clear Key key system.
// This is used for testing.
if (base::FeatureList::IsEnabled(media::kExternalClearKeyForTesting) &&
@@ -92,9 +86,9 @@ void AndroidCdmFactory::Create(
creation_id_,
PendingCreation(std::move(factory), std::move(bound_cdm_created_cb)));
- raw_factory->Create(key_system, security_origin, cdm_config,
- session_message_cb, session_closed_cb,
- session_keys_change_cb, session_expiration_update_cb,
+ raw_factory->Create(key_system, cdm_config, session_message_cb,
+ session_closed_cb, session_keys_change_cb,
+ session_expiration_update_cb,
base::BindOnce(&AndroidCdmFactory::OnCdmCreated,
weak_factory_.GetWeakPtr(), creation_id_));
}
diff --git a/chromium/media/base/android/android_cdm_factory.h b/chromium/media/base/android/android_cdm_factory.h
index b32b7935b77..ce241061eff 100644
--- a/chromium/media/base/android/android_cdm_factory.h
+++ b/chromium/media/base/android/android_cdm_factory.h
@@ -29,7 +29,6 @@ class MEDIA_EXPORT AndroidCdmFactory : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index 6bdfc3a974d..e0695e964d3 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -32,6 +32,7 @@
#include "media/base/android/media_drm_bridge_delegate.h"
#include "media/base/android/media_jni_headers/MediaDrmBridge_jni.h"
#include "media/base/cdm_key_information.h"
+#include "media/base/logging_override_if_enabled.h"
#include "media/base/media_drm_key_type.h"
#include "media/base/media_switches.h"
#include "media/base/provision_fetcher.h"
diff --git a/chromium/media/base/android/media_drm_bridge_factory.cc b/chromium/media/base/android/media_drm_bridge_factory.cc
index 52b3e01c271..ee3e4dcdb86 100644
--- a/chromium/media/base/android/media_drm_bridge_factory.cc
+++ b/chromium/media/base/android/media_drm_bridge_factory.cc
@@ -9,7 +9,6 @@
#include "media/base/cdm_config.h"
#include "media/base/content_decryption_module.h"
#include "third_party/widevine/cdm/widevine_cdm_common.h"
-#include "url/origin.h"
namespace media {
@@ -28,7 +27,6 @@ MediaDrmBridgeFactory::~MediaDrmBridgeFactory() {
void MediaDrmBridgeFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -37,7 +35,6 @@ void MediaDrmBridgeFactory::Create(
CdmCreatedCB cdm_created_cb) {
DCHECK(MediaDrmBridge::IsKeySystemSupported(key_system));
DCHECK(MediaDrmBridge::IsAvailable());
- DCHECK(!security_origin.opaque());
DCHECK(scheme_uuid_.empty()) << "This factory can only be used once.";
scheme_uuid_ = MediaDrmBridge::GetUUID(key_system);
diff --git a/chromium/media/base/android/media_drm_bridge_factory.h b/chromium/media/base/android/media_drm_bridge_factory.h
index 4dfa133dcf2..6df430ad62f 100644
--- a/chromium/media/base/android/media_drm_bridge_factory.h
+++ b/chromium/media/base/android/media_drm_bridge_factory.h
@@ -32,7 +32,6 @@ class MEDIA_EXPORT MediaDrmBridgeFactory : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/base/android/media_player_bridge.cc b/chromium/media/base/android/media_player_bridge.cc
index c0a10cef065..eeab111c274 100644
--- a/chromium/media/base/android/media_player_bridge.cc
+++ b/chromium/media/base/android/media_player_bridge.cc
@@ -74,6 +74,7 @@ MediaPlayerBridge::MediaPlayerBridge(const GURL& url,
bool allow_credentials,
bool is_hls)
: prepared_(false),
+ playback_completed_(false),
pending_play_(false),
should_seek_on_prepare_(false),
url_(url),
@@ -122,7 +123,7 @@ void MediaPlayerBridge::Initialize() {
return;
}
- if (allow_credentials_) {
+ if (allow_credentials_ && !url_.SchemeIsFile()) {
media::MediaResourceGetter* resource_getter =
client_->GetMediaResourceGetter();
@@ -415,7 +416,10 @@ void MediaPlayerBridge::OnMediaError(int error_type) {
}
void MediaPlayerBridge::OnPlaybackComplete() {
- client_->OnPlaybackComplete();
+ if (!playback_completed_) {
+ playback_completed_ = true;
+ client_->OnPlaybackComplete();
+ }
}
void MediaPlayerBridge::OnMediaPrepared() {
@@ -509,6 +513,8 @@ void MediaPlayerBridge::SeekInternal(base::TimeDelta time) {
return;
}
+ playback_completed_ = false;
+
// Note: we do not want to count changes in media time due to seeks as watch
// time, but tracking pending seeks is not completely trivial. Instead seeks
// larger than kWatchTimeReportingInterval * 2 will be discarded by the sanity
diff --git a/chromium/media/base/android/media_player_bridge.h b/chromium/media/base/android/media_player_bridge.h
index 8de59c2c59a..8757e09b24a 100644
--- a/chromium/media/base/android/media_player_bridge.h
+++ b/chromium/media/base/android/media_player_bridge.h
@@ -199,6 +199,9 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Whether the player is prepared for playback.
bool prepared_;
+ // Whether the player completed playback.
+ bool playback_completed_;
+
// Pending play event while player is preparing.
bool pending_play_;
diff --git a/chromium/media/base/async_destroy_video_decoder.h b/chromium/media/base/async_destroy_video_decoder.h
new file mode 100644
index 00000000000..921109b08f1
--- /dev/null
+++ b/chromium/media/base/async_destroy_video_decoder.h
@@ -0,0 +1,91 @@
+// Copyright (c) 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_ASYNC_DESTROY_VIDEO_DECODER_H_
+#define MEDIA_BASE_ASYNC_DESTROY_VIDEO_DECODER_H_
+
+#include <memory>
+#include <type_traits>
+#include "media/base/video_decoder.h"
+
+namespace media {
+
+// Some VideoDecoder implementations must do non-synchronous cleanup before
+// they are destroyed. This wrapper implementation allows a VideoDecoder
+// to schedule its own cleanup tasks before its memory is released.
+// The underlying type must implement a static
+// `DestroyAsync(std::unique_ptr<T>)` function which fires any pending
+// callbacks, stops and destroys the decoder. After this call, external
+// resources (e.g. raw pointers) held by the decoder might be invalidated
+// immediately. So if the decoder is destroyed asynchronously (e.g. DeleteSoon),
+// external resources must be released in this call.
+template <typename T>
+class AsyncDestroyVideoDecoder final : public VideoDecoder {
+ public:
+ explicit AsyncDestroyVideoDecoder(std::unique_ptr<T> wrapped_decoder)
+ : wrapped_decoder_(std::move(wrapped_decoder)) {
+ static_assert(std::is_base_of<VideoDecoder, T>::value,
+ "T must implement 'media::VideoDecoder'");
+ DCHECK(wrapped_decoder_);
+ }
+
+ ~AsyncDestroyVideoDecoder() override {
+ if (wrapped_decoder_)
+ T::DestroyAsync(std::move(wrapped_decoder_));
+ }
+
+ std::string GetDisplayName() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->GetDisplayName();
+ }
+
+ bool IsPlatformDecoder() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->IsPlatformDecoder();
+ }
+
+ void Initialize(const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ InitCB init_cb,
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) override {
+ DCHECK(wrapped_decoder_);
+ wrapped_decoder_->Initialize(config, low_delay, cdm_context,
+ std::move(init_cb), output_cb, waiting_cb);
+ }
+
+ void Decode(scoped_refptr<DecoderBuffer> buffer,
+ DecodeCB decode_cb) override {
+ DCHECK(wrapped_decoder_);
+ wrapped_decoder_->Decode(std::move(buffer), std::move(decode_cb));
+ }
+
+ void Reset(base::OnceClosure closure) override {
+ DCHECK(wrapped_decoder_);
+ wrapped_decoder_->Reset(std::move(closure));
+ }
+
+ bool NeedsBitstreamConversion() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->NeedsBitstreamConversion();
+ }
+
+ bool CanReadWithoutStalling() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->CanReadWithoutStalling();
+ }
+
+ int GetMaxDecodeRequests() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->GetMaxDecodeRequests();
+ }
+
+ private:
+ std::unique_ptr<T> wrapped_decoder_;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_ASYNC_DESTROY_VIDEO_DECODER_H_
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index d0f34c51e5b..622c9154f4f 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -7,6 +7,7 @@
#include <cmath>
#include "base/logging.h"
+#include "base/notreached.h"
#include "media/base/audio_bus.h"
#include "media/base/limits.h"
#include "media/base/timestamp_constants.h"
@@ -249,9 +250,9 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
// |dest| must have the same number of channels, and the number of frames
// specified must be in range.
DCHECK(!end_of_stream());
- DCHECK_EQ(dest->channels(), channel_count_);
- DCHECK_LE(source_frame_offset + frames_to_copy, adjusted_frame_count_);
- DCHECK_LE(dest_frame_offset + frames_to_copy, dest->frames());
+ CHECK_EQ(dest->channels(), channel_count_);
+ CHECK_LE(source_frame_offset + frames_to_copy, adjusted_frame_count_);
+ CHECK_LE(dest_frame_offset + frames_to_copy, dest->frames());
dest->set_is_bitstream_format(IsBitstreamFormat());
diff --git a/chromium/media/base/audio_converter.cc b/chromium/media/base/audio_converter.cc
index e54ded8b8fc..8d56906b31f 100644
--- a/chromium/media/base/audio_converter.cc
+++ b/chromium/media/base/audio_converter.cc
@@ -14,6 +14,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/logging.h"
#include "base/trace_event/trace_event.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_pull_fifo.h"
diff --git a/chromium/media/base/audio_renderer.h b/chromium/media/base/audio_renderer.h
index eac3c82ffea..533c46150d7 100644
--- a/chromium/media/base/audio_renderer.h
+++ b/chromium/media/base/audio_renderer.h
@@ -65,6 +65,10 @@ class MEDIA_EXPORT AudioRenderer {
// (restore UA default).
virtual void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) = 0;
+ // Sets a flag indicating that the AudioRenderer should use or avoid pitch
+ // preservation when playing back at speeds other than 1.0.
+ virtual void SetPreservesPitch(bool preserves_pitch) = 0;
+
private:
DISALLOW_COPY_AND_ASSIGN(AudioRenderer);
};
diff --git a/chromium/media/base/audio_renderer_mixer.cc b/chromium/media/base/audio_renderer_mixer.cc
index d3c1bd8c7db..d7109300027 100644
--- a/chromium/media/base/audio_renderer_mixer.cc
+++ b/chromium/media/base/audio_renderer_mixer.cc
@@ -24,49 +24,15 @@ namespace media {
enum { kPauseDelaySeconds = 10 };
-// Tracks the maximum value of a counter and logs it into a UMA histogram upon
-// each increase of the maximum. NOT thread-safe, make sure it is used under
-// lock.
-class AudioRendererMixer::UMAMaxValueTracker {
- public:
- UMAMaxValueTracker(UmaLogCallback log_callback)
- : log_callback_(std::move(log_callback)), count_(0), max_count_(0) {}
-
- ~UMAMaxValueTracker() = default;
-
- // Increments the counter, updates the maximum.
- void Increment() {
- ++count_;
- if (max_count_ < count_) {
- max_count_ = count_;
- log_callback_.Run(max_count_);
- }
- }
-
- // Decrements the counter.
- void Decrement() {
- DCHECK_GE(count_, 0);
- --count_;
- }
-
- private:
- const UmaLogCallback log_callback_;
- int count_;
- int max_count_;
- DISALLOW_COPY_AND_ASSIGN(UMAMaxValueTracker);
-};
-
AudioRendererMixer::AudioRendererMixer(const AudioParameters& output_params,
- scoped_refptr<AudioRendererSink> sink,
- UmaLogCallback log_callback)
+ scoped_refptr<AudioRendererSink> sink)
: output_params_(output_params),
audio_sink_(std::move(sink)),
master_converter_(output_params, output_params, true),
pause_delay_(base::TimeDelta::FromSeconds(kPauseDelaySeconds)),
last_play_time_(base::TimeTicks::Now()),
// Initialize |playing_| to true since Start() results in an auto-play.
- playing_(true),
- input_count_tracker_(new UMAMaxValueTracker(std::move(log_callback))) {
+ playing_(true) {
DCHECK(audio_sink_);
// If enabled we will disable the real audio output stream for muted/silent
@@ -126,8 +92,6 @@ void AudioRendererMixer::AddMixerInput(const AudioParameters& input_params,
}
converter->second->AddInput(input);
}
-
- input_count_tracker_->Increment();
}
void AudioRendererMixer::RemoveMixerInput(
@@ -148,8 +112,6 @@ void AudioRendererMixer::RemoveMixerInput(
converters_.erase(converter);
}
}
-
- input_count_tracker_->Decrement();
}
void AudioRendererMixer::AddErrorCallback(AudioRendererMixerInput* input) {
diff --git a/chromium/media/base/audio_renderer_mixer.h b/chromium/media/base/audio_renderer_mixer.h
index b8917dcf532..db82cb3eaa8 100644
--- a/chromium/media/base/audio_renderer_mixer.h
+++ b/chromium/media/base/audio_renderer_mixer.h
@@ -35,11 +35,8 @@ class SilentSinkSuspender;
class MEDIA_EXPORT AudioRendererMixer
: public AudioRendererSink::RenderCallback {
public:
- using UmaLogCallback = base::RepeatingCallback<void(int)>;
-
AudioRendererMixer(const AudioParameters& output_params,
- scoped_refptr<AudioRendererSink> sink,
- UmaLogCallback log_callback);
+ scoped_refptr<AudioRendererSink> sink);
~AudioRendererMixer() override;
// Add or remove a mixer input from mixing; called by AudioRendererMixerInput.
@@ -62,8 +59,6 @@ class MEDIA_EXPORT AudioRendererMixer
}
private:
- class UMAMaxValueTracker;
-
// AudioRendererSink::RenderCallback implementation.
int Render(base::TimeDelta delay,
base::TimeTicks delay_timestamp,
@@ -115,10 +110,6 @@ class MEDIA_EXPORT AudioRendererMixer
base::TimeTicks last_play_time_ GUARDED_BY(lock_);
bool playing_ GUARDED_BY(lock_);
- // Tracks the maximum number of simultaneous mixer inputs and logs it into
- // UMA histogram upon the destruction.
- std::unique_ptr<UMAMaxValueTracker> input_count_tracker_ GUARDED_BY(lock_);
-
DISALLOW_COPY_AND_ASSIGN(AudioRendererMixer);
};
diff --git a/chromium/media/base/audio_renderer_mixer_input_unittest.cc b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
index c94d73b9ecd..39b9c758531 100644
--- a/chromium/media/base/audio_renderer_mixer_input_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
@@ -21,10 +21,6 @@
using testing::_;
-namespace {
-void LogUma(int value) {}
-}
-
namespace media {
static const int kSampleRate = 48000;
@@ -67,8 +63,8 @@ class AudioRendererMixerInputTest : public testing::Test,
EXPECT_CALL(*reinterpret_cast<MockAudioRendererSink*>(sink.get()),
Start());
- mixers_[idx].reset(new AudioRendererMixer(
- audio_parameters_, std::move(sink), base::BindRepeating(&LogUma)));
+ mixers_[idx] = std::make_unique<AudioRendererMixer>(audio_parameters_,
+ std::move(sink));
}
EXPECT_CALL(*this, ReturnMixer(mixers_[idx].get()));
return mixers_[idx].get();
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index 14cdde6d460..d979c591c72 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -25,10 +25,6 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-namespace {
-void LogUma(int value) {}
-}
-
namespace media {
// Parameters which control the many input case tests.
@@ -77,8 +73,7 @@ class AudioRendererMixerTest
EXPECT_CALL(*sink_.get(), Start());
EXPECT_CALL(*sink_.get(), Stop());
- mixer_.reset(new AudioRendererMixer(output_parameters_, sink_,
- base::BindRepeating(&LogUma)));
+ mixer_ = std::make_unique<AudioRendererMixer>(output_parameters_, sink_);
mixer_callback_ = sink_->callback();
audio_bus_ = AudioBus::Create(output_parameters_);
diff --git a/chromium/media/base/audio_shifter.cc b/chromium/media/base/audio_shifter.cc
index e6cb0459566..3b7ae5441fc 100644
--- a/chromium/media/base/audio_shifter.cc
+++ b/chromium/media/base/audio_shifter.cc
@@ -10,6 +10,7 @@
#include "base/bind.h"
#include "base/containers/circular_deque.h"
+#include "base/logging.h"
#include "base/numerics/ranges.h"
#include "base/trace_event/trace_event.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/base/bit_reader_core.h b/chromium/media/base/bit_reader_core.h
index 1ba856a6986..7a9f7497ec0 100644
--- a/chromium/media/base/bit_reader_core.h
+++ b/chromium/media/base/bit_reader_core.h
@@ -7,7 +7,7 @@
#include <stdint.h>
-#include "base/logging.h"
+#include "base/check_op.h"
#include "base/macros.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index dac22fbd70f..cf298fef0e6 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -48,6 +48,4 @@ FuchsiaCdmContext* CdmContext::GetFuchsiaCdmContext() {
}
#endif
-void IgnoreCdmAttached(bool /* success */) {}
-
} // namespace media
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index e346698746e..b3a0d19c149 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -12,6 +12,7 @@
#include "media/media_buildflags.h"
#if defined(OS_WIN)
+#include <wrl/client.h>
struct IMFCdmProxy;
#endif
@@ -90,7 +91,8 @@ class MEDIA_EXPORT CdmContext {
virtual int GetCdmId() const;
#if defined(OS_WIN)
- using GetMediaFoundationCdmProxyCB = base::OnceCallback<void(IMFCdmProxy*)>;
+ using GetMediaFoundationCdmProxyCB =
+ base::OnceCallback<void(Microsoft::WRL::ComPtr<IMFCdmProxy>)>;
// This allows a CdmContext to expose an IMFTrustedInput instance for use in
// a Media Foundation rendering pipeline. This method is asynchronous because
// the underlying MF-based CDM might not have a native session created yet.
@@ -120,13 +122,6 @@ class MEDIA_EXPORT CdmContext {
DISALLOW_COPY_AND_ASSIGN(CdmContext);
};
-// Callback to notify that the CdmContext has been completely attached to
-// the media pipeline. Parameter indicates whether the operation succeeded.
-typedef base::OnceCallback<void(bool)> CdmAttachedCB;
-
-// A dummy implementation of CdmAttachedCB.
-MEDIA_EXPORT void IgnoreCdmAttached(bool success);
-
// A reference holder to make sure the CdmContext is always valid as long as
// |this| is alive. Typically |this| will hold a reference (directly or
// indirectly) to the host, e.g. a ContentDecryptionModule.
diff --git a/chromium/media/base/cdm_factory.h b/chromium/media/base/cdm_factory.h
index 29d764d8ae0..96fcf3a8678 100644
--- a/chromium/media/base/cdm_factory.h
+++ b/chromium/media/base/cdm_factory.h
@@ -34,7 +34,6 @@ class MEDIA_EXPORT CdmFactory {
// asynchronously.
virtual void Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/base/cdm_promise.h b/chromium/media/base/cdm_promise.h
index 771a6f025cc..5b3d05f5bc7 100644
--- a/chromium/media/base/cdm_promise.h
+++ b/chromium/media/base/cdm_promise.h
@@ -9,6 +9,7 @@
#include <string>
+#include "base/check.h"
#include "base/logging.h"
#include "base/macros.h"
#include "media/base/cdm_key_information.h"
diff --git a/chromium/media/base/cdm_session_tracker.h b/chromium/media/base/cdm_session_tracker.h
index 94dfa5e5f57..2d902640637 100644
--- a/chromium/media/base/cdm_session_tracker.h
+++ b/chromium/media/base/cdm_session_tracker.h
@@ -10,7 +10,6 @@
#include <string>
#include <unordered_set>
-#include "base/logging.h"
#include "base/macros.h"
#include "media/base/content_decryption_module.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/base/channel_layout.cc b/chromium/media/base/channel_layout.cc
index d7378d2619c..5a1738bbe85 100644
--- a/chromium/media/base/channel_layout.cc
+++ b/chromium/media/base/channel_layout.cc
@@ -6,7 +6,9 @@
#include <stddef.h>
+#include "base/check_op.h"
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/stl_util.h"
namespace media {
diff --git a/chromium/media/base/data_buffer.h b/chromium/media/base/data_buffer.h
index b7906b8ffd5..72a0398569c 100644
--- a/chromium/media/base/data_buffer.h
+++ b/chromium/media/base/data_buffer.h
@@ -9,7 +9,7 @@
#include <memory>
-#include "base/logging.h"
+#include "base/check_op.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/time/time.h"
diff --git a/chromium/media/base/decode_status.cc b/chromium/media/base/decode_status.cc
index 97fda7276d0..de30ca7e306 100644
--- a/chromium/media/base/decode_status.cc
+++ b/chromium/media/base/decode_status.cc
@@ -51,7 +51,7 @@ ScopedDecodeTrace::ScopedDecodeTrace(const char* trace_name,
: trace_name_(trace_name) {
DCHECK(trace_name_);
TRACE_EVENT_ASYNC_BEGIN1("media", trace_name_, this, "decoder_buffer",
- buffer.AsHumanReadableString());
+ buffer.AsHumanReadableString(/*verbose=*/true));
}
ScopedDecodeTrace::~ScopedDecodeTrace() {
diff --git a/chromium/media/base/decoder_buffer.cc b/chromium/media/base/decoder_buffer.cc
index 0a593b8d6ef..995e1044db7 100644
--- a/chromium/media/base/decoder_buffer.cc
+++ b/chromium/media/base/decoder_buffer.cc
@@ -164,21 +164,27 @@ bool DecoderBuffer::MatchesForTesting(const DecoderBuffer& buffer) const {
: true;
}
-std::string DecoderBuffer::AsHumanReadableString() const {
+std::string DecoderBuffer::AsHumanReadableString(bool verbose) const {
if (end_of_stream())
return "EOS";
std::ostringstream s;
- s << "timestamp=" << timestamp_.InMicroseconds()
+
+ s << "{timestamp=" << timestamp_.InMicroseconds()
<< " duration=" << duration_.InMicroseconds() << " size=" << size_
- << " side_data_size=" << side_data_size_
<< " is_key_frame=" << is_key_frame_
- << " encrypted=" << (decrypt_config_ != NULL) << " discard_padding (us)=("
- << discard_padding_.first.InMicroseconds() << ", "
- << discard_padding_.second.InMicroseconds() << ")";
+ << " encrypted=" << (decrypt_config_ != nullptr);
+
+ if (verbose) {
+ s << " side_data_size=" << side_data_size_ << " discard_padding (us)=("
+ << discard_padding_.first.InMicroseconds() << ", "
+ << discard_padding_.second.InMicroseconds() << ")";
+
+ if (decrypt_config_)
+ s << " decrypt_config=" << (*decrypt_config_);
+ }
- if (decrypt_config_)
- s << " decrypt=" << (*decrypt_config_);
+ s << "}";
return s.str();
}
diff --git a/chromium/media/base/decoder_buffer.h b/chromium/media/base/decoder_buffer.h
index e1bd6297b47..ea64fa50d0b 100644
--- a/chromium/media/base/decoder_buffer.h
+++ b/chromium/media/base/decoder_buffer.h
@@ -12,7 +12,7 @@
#include <string>
#include <utility>
-#include "base/logging.h"
+#include "base/check.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
#include "base/memory/read_only_shared_memory_region.h"
@@ -190,7 +190,7 @@ class MEDIA_EXPORT DecoderBuffer
bool MatchesForTesting(const DecoderBuffer& buffer) const;
// Returns a human-readable string describing |*this|.
- std::string AsHumanReadableString() const;
+ std::string AsHumanReadableString(bool verbose = false) const;
// Replaces any existing side data with data copied from |side_data|.
void CopySideDataFrom(const uint8_t* side_data, size_t side_data_size);
diff --git a/chromium/media/base/fake_audio_renderer_sink.cc b/chromium/media/base/fake_audio_renderer_sink.cc
index bf5ca25b581..114aa95dcb1 100644
--- a/chromium/media/base/fake_audio_renderer_sink.cc
+++ b/chromium/media/base/fake_audio_renderer_sink.cc
@@ -47,7 +47,7 @@ void FakeAudioRendererSink::Start() {
}
void FakeAudioRendererSink::Stop() {
- callback_ = NULL;
+ callback_ = nullptr;
ChangeState(kStopped);
}
diff --git a/chromium/media/base/format_utils.cc b/chromium/media/base/format_utils.cc
index b86350cea54..be5a8b9c411 100644
--- a/chromium/media/base/format_utils.cc
+++ b/chromium/media/base/format_utils.cc
@@ -34,6 +34,9 @@ base::Optional<VideoPixelFormat> GfxBufferFormatToVideoPixelFormat(
case gfx::BufferFormat::YUV_420_BIPLANAR:
return PIXEL_FORMAT_NV12;
+ case gfx::BufferFormat::P010:
+ return PIXEL_FORMAT_P016LE;
+
default:
DLOG(WARNING) << "Unsupported BufferFormat: "
<< gfx::BufferFormatToString(format);
@@ -62,6 +65,9 @@ base::Optional<gfx::BufferFormat> VideoPixelFormatToGfxBufferFormat(
case PIXEL_FORMAT_XBGR:
return gfx::BufferFormat::RGBX_8888;
+ case PIXEL_FORMAT_P016LE:
+ return gfx::BufferFormat::P010;
+
default:
DLOG(WARNING) << "Unsupported VideoPixelFormat: " << pixel_format;
return base::nullopt;
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index eabf9796a93..35dc6e348fe 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -11,6 +11,7 @@
#include "base/logging.h"
#include "base/no_destructor.h"
+#include "base/notreached.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/threading/thread_checker.h"
diff --git a/chromium/media/base/mac/video_frame_mac.cc b/chromium/media/base/mac/video_frame_mac.cc
index e299bf2fadb..7d3eb1a48ed 100644
--- a/chromium/media/base/mac/video_frame_mac.cc
+++ b/chromium/media/base/mac/video_frame_mac.cc
@@ -9,6 +9,7 @@
#include <algorithm>
+#include "base/logging.h"
#include "media/base/video_frame.h"
namespace media {
diff --git a/chromium/media/base/mac/videotoolbox_helpers.cc b/chromium/media/base/mac/videotoolbox_helpers.cc
index 1cd9c596e18..014cd2a05dd 100644
--- a/chromium/media/base/mac/videotoolbox_helpers.cc
+++ b/chromium/media/base/mac/videotoolbox_helpers.cc
@@ -8,6 +8,8 @@
#include <vector>
#include "base/big_endian.h"
+#include "base/logging.h"
+#include "base/notreached.h"
namespace media {
diff --git a/chromium/media/base/media_observer.h b/chromium/media/base/media_observer.h
index eb0e3969c58..533b60dd728 100644
--- a/chromium/media/base/media_observer.h
+++ b/chromium/media/base/media_observer.h
@@ -5,7 +5,6 @@
#ifndef MEDIA_BASE_MEDIA_OBSERVER_H_
#define MEDIA_BASE_MEDIA_OBSERVER_H_
-#include "media/base/cdm_context.h"
#include "media/base/pipeline_metadata.h"
#include "url/gurl.h"
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index e0d62061b43..162d2e2991a 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -239,6 +239,11 @@ const base::Feature kResumeBackgroundVideo {
#endif
};
+// When enabled, MediaCapabilities will check with GPU Video Accelerator
+// Factories to determine isPowerEfficient = true/false.
+const base::Feature kMediaCapabilitiesQueryGpuFactories{
+ "MediaCapabilitiesQueryGpuFactories", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enable Media Capabilities with finch-parameters.
const base::Feature kMediaCapabilitiesWithParameters{
"MediaCapabilitiesWithParameters", base::FEATURE_ENABLED_BY_DEFAULT};
@@ -261,6 +266,12 @@ const base::Feature kBackgroundVideoPauseOptimization{
const base::Feature kCdmHostVerification{"CdmHostVerification",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Use per-CDM-type, per-user and per-site CDM processes (for library CDM). If
+// disabled, the CDM processes are only per-CDM-type, meaning different sites
+// using the same CDM type would share one CDM process.
+const base::Feature kCdmProcessSiteIsolation{"CdmProcessSiteIsolation",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
// Make MSE garbage collection algorithm more aggressive when we are under
// moderate or critical memory pressure. This will relieve memory pressure by
// releasing stale data from MSE buffers.
@@ -365,7 +376,7 @@ const base::Feature kGlobalMediaControlsOverlayControls{
"GlobalMediaControlsOverlayControls", base::FEATURE_DISABLED_BY_DEFAULT};
// Show picture-in-picture button in Global Media Controls.
-const base::Feature kGlobalMediaControlsPictureInPicture{
+const base::Feature kGlobalMediaControlsPictureInPicture {
"GlobalMediaControlsPictureInPicture",
#if defined(OS_WIN) || defined(OS_MACOSX) || \
(defined(OS_LINUX) && !defined(OS_CHROMEOS))
@@ -375,6 +386,10 @@ const base::Feature kGlobalMediaControlsPictureInPicture{
#endif
};
+// Enable selection of audio output device in Global Media Controls.
+const base::Feature kGlobalMediaControlsSeamlessTransfer {
+ "GlobalMediaControlsSeamlessTransfer", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enable new cpu load estimator. Intended for evaluation in local
// testing and origin-trial.
// TODO(nisse): Delete once we have switched over to always using the
@@ -408,13 +423,9 @@ const base::Feature kUseR16Texture{"use-r16-texture",
const base::Feature kUnifiedAutoplay{"UnifiedAutoplay",
base::FEATURE_ENABLED_BY_DEFAULT};
-// Enable VA-API hardware encode acceleration for H264 on AMD.
-const base::Feature kVaapiH264AMDEncoder{"VaapiH264AMDEncoder",
- base::FEATURE_ENABLED_BY_DEFAULT};
-
-// Enable VA-API hardware low power encoder for all codecs.
-const base::Feature kVaapiLowPowerEncoder{"VaapiLowPowerEncoder",
- base::FEATURE_DISABLED_BY_DEFAULT};
+// Enable VA-API hardware low power encoder for all codecs on intel Gen9x gpu.
+const base::Feature kVaapiLowPowerEncoderGen9x{
+ "VaapiLowPowerEncoderGen9x", base::FEATURE_DISABLED_BY_DEFAULT};
// Enable VA-API hardware encode acceleration for VP8.
const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
@@ -475,7 +486,7 @@ const base::Feature kWidevineAv1ForceSupportForTesting{
"WidevineAv1ForceSupportForTesting", base::FEATURE_DISABLED_BY_DEFAULT};
// Enables handling of hardware media keys for controlling media.
-const base::Feature kHardwareMediaKeyHandling{
+const base::Feature kHardwareMediaKeyHandling {
"HardwareMediaKeyHandling",
#if defined(OS_CHROMEOS) || defined(OS_WIN) || defined(OS_MACOSX) || \
BUILDFLAG(USE_MPRIS)
@@ -505,6 +516,12 @@ const base::Feature kAutoplayWhitelistSettings{
"AutoplayWhitelistSettings", base::FEATURE_ENABLED_BY_DEFAULT};
#if defined(OS_ANDROID)
+// Should we allow video playback to use an overlay if it's not needed for
+// security? Normally, we'd always want to allow this, except as part of the
+// power testing A/B experiment. https://crbug.com/1081346 .
+const base::Feature kAllowNonSecureOverlays{"AllowNonSecureOverlays",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
// Enable a gesture to make the media controls expaned into the display cutout.
// TODO(beccahughes): Remove this.
const base::Feature kMediaControlsExpandGesture{
@@ -584,7 +601,7 @@ const base::Feature kMediaFoundationAsyncH264Encoding{
// Enables AV1 decode acceleration for Windows.
const base::Feature MEDIA_EXPORT kMediaFoundationAV1Decoding{
- "MediaFoundationAV1Decoding", base::FEATURE_DISABLED_BY_DEFAULT};
+ "MediaFoundationAV1Decoding", base::FEATURE_ENABLED_BY_DEFAULT};
// Enables MediaFoundation based video capture
const base::Feature kMediaFoundationVideoCapture{
@@ -653,6 +670,10 @@ const base::Feature kMediaEngagementHTTPSOnly{
const base::Feature kMediaFeeds{"MediaFeeds",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables fetching Media Feeds periodically in the background.
+const base::Feature kMediaFeedsBackgroundFetching{
+ "MediaFeedsBackgroundFetching", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables checking Media Feeds against safe search to prevent adult content.
const base::Feature kMediaFeedsSafeSearch{"MediaFeedsSafeSearch",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -710,7 +731,7 @@ const base::Feature kInternalMediaSession {
};
const base::Feature kKaleidoscope{"Kaleidoscope",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
const base::Feature kUseFakeDeviceForMediaStream{
"use-fake-device-for-media-stream", base::FEATURE_DISABLED_BY_DEFAULT};
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 5add050c996..967d16fe80a 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -108,6 +108,7 @@ MEDIA_EXPORT extern const base::Feature kAutoplayWhitelistSettings;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kBresenhamCadence;
MEDIA_EXPORT extern const base::Feature kCdmHostVerification;
+MEDIA_EXPORT extern const base::Feature kCdmProcessSiteIsolation;
MEDIA_EXPORT extern const base::Feature kD3D11PrintCodecOnCrash;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderIgnoreWorkarounds;
@@ -126,17 +127,20 @@ MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsAutoDismiss;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsForCast;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsOverlayControls;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsPictureInPicture;
+MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsSeamlessTransfer;
MEDIA_EXPORT extern const base::Feature kHardwareMediaKeyHandling;
MEDIA_EXPORT extern const base::Feature kHardwareSecureDecryption;
MEDIA_EXPORT extern const base::Feature kInternalMediaSession;
MEDIA_EXPORT extern const base::Feature kKaleidoscope;
MEDIA_EXPORT extern const base::Feature kLiveCaption;
MEDIA_EXPORT extern const base::Feature kLowDelayVideoRenderingOnLiveStream;
+MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesQueryGpuFactories;
MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesWithParameters;
MEDIA_EXPORT extern const base::Feature kMediaCastOverlayButton;
MEDIA_EXPORT extern const base::Feature kMediaEngagementBypassAutoplayPolicies;
MEDIA_EXPORT extern const base::Feature kMediaEngagementHTTPSOnly;
MEDIA_EXPORT extern const base::Feature kMediaFeeds;
+MEDIA_EXPORT extern const base::Feature kMediaFeedsBackgroundFetching;
MEDIA_EXPORT extern const base::Feature kMediaFeedsSafeSearch;
MEDIA_EXPORT extern const base::Feature kMediaInspectorLogging;
MEDIA_EXPORT extern const base::Feature kMediaLearningExperiment;
@@ -164,8 +168,7 @@ MEDIA_EXPORT extern const base::Feature kUseFakeDeviceForMediaStream;
MEDIA_EXPORT extern const base::Feature kUseMediaHistoryStore;
MEDIA_EXPORT extern const base::Feature kUseNewMediaCache;
MEDIA_EXPORT extern const base::Feature kUseR16Texture;
-MEDIA_EXPORT extern const base::Feature kVaapiH264AMDEncoder;
-MEDIA_EXPORT extern const base::Feature kVaapiLowPowerEncoder;
+MEDIA_EXPORT extern const base::Feature kVaapiLowPowerEncoderGen9x;
MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP9Encoder;
MEDIA_EXPORT extern const base::Feature kVideoBlitColorAccuracy;
@@ -178,6 +181,7 @@ MEDIA_EXPORT extern const base::Feature kVp9kSVCHWDecoding;
#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
#if defined(OS_ANDROID)
+MEDIA_EXPORT extern const base::Feature kAllowNonSecureOverlays;
MEDIA_EXPORT extern const base::Feature kMediaControlsExpandGesture;
MEDIA_EXPORT extern const base::Feature kMediaDrmPersistentLicense;
MEDIA_EXPORT extern const base::Feature kMediaDrmPreprovisioning;
diff --git a/chromium/media/base/media_url_demuxer.cc b/chromium/media/base/media_url_demuxer.cc
index ce7af50c5a8..cff132e69e2 100644
--- a/chromium/media/base/media_url_demuxer.cc
+++ b/chromium/media/base/media_url_demuxer.cc
@@ -5,6 +5,7 @@
#include "media/base/media_url_demuxer.h"
#include "base/bind.h"
+#include "base/logging.h"
#include "base/single_thread_task_runner.h"
namespace media {
diff --git a/chromium/media/base/memory_dump_provider_proxy.h b/chromium/media/base/memory_dump_provider_proxy.h
index 72767e003fe..0473c79403d 100644
--- a/chromium/media/base/memory_dump_provider_proxy.h
+++ b/chromium/media/base/memory_dump_provider_proxy.h
@@ -12,7 +12,6 @@
#include <string>
#include <utility>
-#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index 30347811de0..bf57e184925 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -8,6 +8,7 @@
#include "base/feature_list.h"
#include "base/logging.h"
#include "base/no_destructor.h"
+#include "base/notreached.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
@@ -285,11 +286,10 @@ void MimeUtil::AddSupportedMediaFormats() {
const CodecSet wav_codecs{PCM};
const CodecSet ogg_audio_codecs{FLAC, OPUS, VORBIS};
-#if !defined(OS_ANDROID)
- CodecSet ogg_video_codecs{THEORA, VP8};
-#else
- CodecSet ogg_video_codecs;
-#endif // !defined(OS_ANDROID)
+ CodecSet ogg_video_codecs{VP8};
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
+ ogg_video_codecs.emplace(THEORA);
+#endif // BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
CodecSet ogg_codecs(ogg_audio_codecs);
ogg_codecs.insert(ogg_video_codecs.begin(), ogg_video_codecs.end());
diff --git a/chromium/media/base/mime_util_unittest.cc b/chromium/media/base/mime_util_unittest.cc
index 31258457dc0..819390fe12a 100644
--- a/chromium/media/base/mime_util_unittest.cc
+++ b/chromium/media/base/mime_util_unittest.cc
@@ -148,11 +148,7 @@ TEST(MimeUtilTest, CommonMediaMimeType) {
EXPECT_TRUE(IsSupportedMediaMimeType("audio/ogg"));
EXPECT_TRUE(IsSupportedMediaMimeType("application/ogg"));
-#if defined(OS_ANDROID)
- EXPECT_FALSE(IsSupportedMediaMimeType("video/ogg"));
-#else
EXPECT_TRUE(IsSupportedMediaMimeType("video/ogg"));
-#endif // OS_ANDROID
EXPECT_EQ(kHlsSupported, IsSupportedMediaMimeType("application/x-mpegurl"));
EXPECT_EQ(kHlsSupported, IsSupportedMediaMimeType("Application/X-MPEGURL"));
diff --git a/chromium/media/base/mock_filters.cc b/chromium/media/base/mock_filters.cc
index e16cd6a65a9..109903cbc54 100644
--- a/chromium/media/base/mock_filters.cc
+++ b/chromium/media/base/mock_filters.cc
@@ -172,13 +172,11 @@ MockCdmSessionPromise::~MockCdmSessionPromise() {
}
MockCdm::MockCdm(const std::string& key_system,
- const url::Origin& security_origin,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb)
: key_system_(key_system),
- security_origin_(security_origin),
session_message_cb_(session_message_cb),
session_closed_cb_(session_closed_cb),
session_keys_change_cb_(session_keys_change_cb),
@@ -214,7 +212,6 @@ MockCdmFactory::~MockCdmFactory() = default;
void MockCdmFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& /* cdm_config */,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -236,8 +233,8 @@ void MockCdmFactory::Create(
// get the MockCdm via MockCdmFactory::GetCreatedCdm() and explicitly specify
// expectations using EXPECT_CALL.
scoped_refptr<MockCdm> cdm = new NiceMock<MockCdm>(
- key_system, security_origin, session_message_cb, session_closed_cb,
- session_keys_change_cb, session_expiration_update_cb);
+ key_system, session_message_cb, session_closed_cb, session_keys_change_cb,
+ session_expiration_update_cb);
created_cdm_ = cdm.get();
std::move(cdm_created_cb).Run(std::move(cdm), "");
}
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 1d900919416..8f012bd07d9 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -114,6 +114,7 @@ class MockPipeline : public Pipeline {
MOCK_CONST_METHOD0(GetVolume, float());
MOCK_METHOD1(SetVolume, void(float));
MOCK_METHOD1(SetLatencyHint, void(base::Optional<base::TimeDelta>));
+ MOCK_METHOD1(SetPreservesPitch, void(bool));
// TODO(sandersd): These should probably have setters too.
MOCK_CONST_METHOD0(GetMediaTime, base::TimeDelta());
@@ -357,6 +358,7 @@ class MockAudioRenderer : public AudioRenderer {
MOCK_METHOD1(SetVolume, void(float volume));
MOCK_METHOD1(SetLatencyHint,
void(base::Optional<base::TimeDelta> latency_hint));
+ MOCK_METHOD1(SetPreservesPitch, void(bool));
private:
DISALLOW_COPY_AND_ASSIGN(MockAudioRenderer);
@@ -378,7 +380,8 @@ class MockRenderer : public Renderer {
RendererClient* client,
PipelineStatusCallback& init_cb));
MOCK_METHOD1(SetLatencyHint, void(base::Optional<base::TimeDelta>));
- void Flush(base::OnceClosure flush_cb) { OnFlush(flush_cb); }
+ MOCK_METHOD1(SetPreservesPitch, void(bool));
+ void Flush(base::OnceClosure flush_cb) override { OnFlush(flush_cb); }
MOCK_METHOD1(OnFlush, void(base::OnceClosure& flush_cb));
MOCK_METHOD1(StartPlayingFrom, void(base::TimeDelta timestamp));
MOCK_METHOD1(SetPlaybackRate, void(double playback_rate));
@@ -575,7 +578,6 @@ class MockCdmSessionPromise : public NewSessionCdmPromise {
class MockCdm : public ContentDecryptionModule {
public:
MockCdm(const std::string& key_system,
- const url::Origin& security_origin,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
@@ -646,7 +648,6 @@ class MockCdmFactory : public CdmFactory {
// created CDM is passed to |cdm_created_cb|, a copy is kept (and available
// using Cdm()). If |key_system| is empty, no CDM will be created.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/base/output_device_info.cc b/chromium/media/base/output_device_info.cc
index 3bc4eaa1f8c..b1270471f08 100644
--- a/chromium/media/base/output_device_info.cc
+++ b/chromium/media/base/output_device_info.cc
@@ -4,6 +4,8 @@
#include "media/base/output_device_info.h"
+#include <sstream>
+
namespace media {
// Output device information returned by GetOutputDeviceInfo() methods of
diff --git a/chromium/media/base/pipeline.h b/chromium/media/base/pipeline.h
index fbd6575e3d0..72a22a031c9 100644
--- a/chromium/media/base/pipeline.h
+++ b/chromium/media/base/pipeline.h
@@ -12,7 +12,6 @@
#include "base/time/time.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/buffering_state.h"
-#include "media/base/cdm_context.h"
#include "media/base/media_export.h"
#include "media/base/media_status.h"
#include "media/base/media_track.h"
@@ -27,6 +26,7 @@
namespace media {
+class CdmContext;
class Demuxer;
class MEDIA_EXPORT Pipeline {
@@ -225,6 +225,10 @@ class MEDIA_EXPORT Pipeline {
// can choose its own default.
virtual void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) = 0;
+ // Sets whether pitch adjustment should be applied when the playback rate is
+ // different than 1.0.
+ virtual void SetPreservesPitch(bool preserves_pitch) = 0;
+
// Returns the current media playback time, which progresses from 0 until
// GetMediaDuration().
virtual base::TimeDelta GetMediaTime() const = 0;
@@ -243,6 +247,7 @@ class MEDIA_EXPORT Pipeline {
// Gets the current pipeline statistics.
virtual PipelineStatistics GetStatistics() const = 0;
+ using CdmAttachedCB = base::OnceCallback<void(bool)>;
virtual void SetCdm(CdmContext* cdm_context,
CdmAttachedCB cdm_attached_cb) = 0;
};
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index e1d7be53c5c..32bf075a0ec 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -20,6 +20,7 @@
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/base/demuxer.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
@@ -69,6 +70,7 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
void SetPlaybackRate(double playback_rate);
void SetVolume(float volume);
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint);
+ void SetPreservesPitch(bool preserves_pitch);
base::TimeDelta GetMediaTime() const;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const;
bool DidLoadingProgress();
@@ -192,6 +194,9 @@ class PipelineImpl::RendererWrapper : public DemuxerHost,
base::Optional<base::TimeDelta> latency_hint_;
CdmContext* cdm_context_;
+ // By default, apply pitch adjustments.
+ bool preserves_pitch_ = true;
+
// Lock used to serialize |shared_state_|.
// TODO(crbug.com/893739): Add GUARDED_BY annotations.
mutable base::Lock shared_state_lock_;
@@ -325,7 +330,7 @@ void PipelineImpl::RendererWrapper::Stop() {
if (demuxer_) {
demuxer_->Stop();
- demuxer_ = NULL;
+ demuxer_ = nullptr;
}
SetState(kStopped);
@@ -481,6 +486,17 @@ void PipelineImpl::RendererWrapper::SetLatencyHint(
shared_state_.renderer->SetLatencyHint(latency_hint_);
}
+void PipelineImpl::RendererWrapper::SetPreservesPitch(bool preserves_pitch) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (preserves_pitch_ == preserves_pitch)
+ return;
+
+ preserves_pitch_ = preserves_pitch;
+ if (shared_state_.renderer)
+ shared_state_.renderer->SetPreservesPitch(preserves_pitch_);
+}
+
base::TimeDelta PipelineImpl::RendererWrapper::GetMediaTime() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
@@ -1034,14 +1050,13 @@ void PipelineImpl::RendererWrapper::InitializeRenderer(
break;
}
- if (cdm_context_) {
- shared_state_.renderer->SetCdm(cdm_context_,
- base::BindOnce(&IgnoreCdmAttached));
- }
+ if (cdm_context_)
+ shared_state_.renderer->SetCdm(cdm_context_, base::DoNothing());
- if (latency_hint_) {
+ if (latency_hint_)
shared_state_.renderer->SetLatencyHint(latency_hint_);
- }
+
+ shared_state_.renderer->SetPreservesPitch(preserves_pitch_);
shared_state_.renderer->Initialize(demuxer_, this, std::move(done_cb));
}
@@ -1359,6 +1374,15 @@ void PipelineImpl::SetLatencyHint(
base::Unretained(renderer_wrapper_.get()), latency_hint));
}
+void PipelineImpl::SetPreservesPitch(bool preserves_pitch) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ media_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&RendererWrapper::SetPreservesPitch,
+ base::Unretained(renderer_wrapper_.get()),
+ preserves_pitch));
+}
+
base::TimeDelta PipelineImpl::GetMediaTime() const {
DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/chromium/media/base/pipeline_impl.h b/chromium/media/base/pipeline_impl.h
index 101de818805..d9444600f6f 100644
--- a/chromium/media/base/pipeline_impl.h
+++ b/chromium/media/base/pipeline_impl.h
@@ -104,6 +104,7 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
float GetVolume() const override;
void SetVolume(float volume) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
base::TimeDelta GetMediaTime() const override;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const override;
base::TimeDelta GetMediaDuration() const override;
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index 444f22ca07c..01b1e8b39b3 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -128,6 +128,8 @@ class PipelineImplTest : public ::testing::Test {
.WillRepeatedly(Return(base::TimeDelta()));
EXPECT_CALL(*demuxer_, GetStartTime()).WillRepeatedly(Return(start_time_));
+
+ EXPECT_CALL(*renderer_, SetPreservesPitch(true)).Times(AnyNumber());
}
~PipelineImplTest() override {
@@ -302,6 +304,7 @@ class PipelineImplTest : public ::testing::Test {
// |renderer_| has been deleted, replace it.
scoped_renderer_.reset(new StrictMock<MockRenderer>());
renderer_ = scoped_renderer_.get();
+ EXPECT_CALL(*renderer_, SetPreservesPitch(_)).Times(AnyNumber());
}
void ExpectResume(const base::TimeDelta& seek_time) {
@@ -606,6 +609,10 @@ TEST_F(PipelineImplTest, SuspendResume) {
EXPECT_EQ(stats.video_memory_usage,
pipeline_->GetStatistics().video_memory_usage);
+ // Make sure the preserves pitch flag is preserved between after resuming.
+ EXPECT_CALL(*renderer_, SetPreservesPitch(false)).Times(1);
+ pipeline_->SetPreservesPitch(false);
+
ExpectSuspend();
DoSuspend();
@@ -614,6 +621,8 @@ TEST_F(PipelineImplTest, SuspendResume) {
base::TimeDelta expected = base::TimeDelta::FromSeconds(2000);
ExpectResume(expected);
+ EXPECT_CALL(*renderer_, SetPreservesPitch(false)).Times(1);
+
DoResume(expected);
}
@@ -631,6 +640,21 @@ TEST_F(PipelineImplTest, SetVolume) {
base::RunLoop().RunUntilIdle();
}
+TEST_F(PipelineImplTest, SetPreservesPitch) {
+ CreateAudioStream();
+ SetDemuxerExpectations();
+
+ // The audio renderer preserve pitch by default.
+ EXPECT_CALL(*renderer_, SetPreservesPitch(true));
+ StartPipelineAndExpect(PIPELINE_OK);
+ base::RunLoop().RunUntilIdle();
+
+ // Changes to the preservesPitch flag should be propagated.
+ EXPECT_CALL(*renderer_, SetPreservesPitch(false));
+ pipeline_->SetPreservesPitch(false);
+ base::RunLoop().RunUntilIdle();
+}
+
TEST_F(PipelineImplTest, Properties) {
CreateVideoStream();
const auto kDuration = base::TimeDelta::FromSeconds(100);
diff --git a/chromium/media/base/ranges.h b/chromium/media/base/ranges.h
index ca6d03546aa..d9aa4803d69 100644
--- a/chromium/media/base/ranges.h
+++ b/chromium/media/base/ranges.h
@@ -12,7 +12,7 @@
#include <ostream>
#include <vector>
-#include "base/logging.h"
+#include "base/check_op.h"
#include "base/time/time.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/base/renderer.cc b/chromium/media/base/renderer.cc
index c2e1736fdc0..c1970f95e89 100644
--- a/chromium/media/base/renderer.cc
+++ b/chromium/media/base/renderer.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/base/renderer.h"
+#include "base/logging.h"
namespace media {
@@ -10,18 +11,27 @@ Renderer::Renderer() = default;
Renderer::~Renderer() = default;
+void Renderer::SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) {
+ DLOG(WARNING) << "CdmContext is not supported.";
+ std::move(cdm_attached_cb).Run(false);
+}
+
void Renderer::OnSelectedVideoTracksChanged(
const std::vector<DemuxerStream*>& enabled_tracks,
base::OnceClosure change_completed_cb) {
- std::move(change_completed_cb).Run();
DLOG(WARNING) << "Track changes are not supported.";
+ std::move(change_completed_cb).Run();
}
void Renderer::OnEnabledAudioTracksChanged(
const std::vector<DemuxerStream*>& enabled_tracks,
base::OnceClosure change_completed_cb) {
- std::move(change_completed_cb).Run();
DLOG(WARNING) << "Track changes are not supported.";
+ std::move(change_completed_cb).Run();
+}
+
+void Renderer::SetPreservesPitch(bool preserves_pitch) {
+ // Not supported by most renderers.
}
} // namespace media
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index 7481d000bef..b6e1a73373b 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -11,13 +11,13 @@
#include "base/optional.h"
#include "base/time/time.h"
#include "media/base/buffering_state.h"
-#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_export.h"
#include "media/base/pipeline_status.h"
namespace media {
+class CdmContext;
class MediaResource;
class RendererClient;
@@ -38,9 +38,10 @@ class MEDIA_EXPORT Renderer {
PipelineStatusCallback init_cb) = 0;
// Associates the |cdm_context| with this Renderer for decryption (and
- // decoding) of media data, then fires |cdm_attached_cb| with the result.
- virtual void SetCdm(CdmContext* cdm_context,
- CdmAttachedCB cdm_attached_cb) = 0;
+ // decoding) of media data, then fires |cdm_attached_cb| with whether the
+ // operation succeeded.
+ using CdmAttachedCB = base::OnceCallback<void(bool)>;
+ virtual void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb);
// Specifies a latency hint from the site. Renderers should clamp the hint
// value to reasonable min and max and use the resulting value as a target
@@ -50,6 +51,10 @@ class MEDIA_EXPORT Renderer {
// thresholds.
virtual void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) = 0;
+ // Sets whether pitch adjustment should be applied when the playback rate is
+ // different than 1.0.
+ virtual void SetPreservesPitch(bool preserves_pitch);
+
// The following functions must be called after Initialize().
// Discards any buffered data, executing |flush_cb| when completed.
diff --git a/chromium/media/base/renderer_factory_selector.h b/chromium/media/base/renderer_factory_selector.h
index b6dd257439c..c291fb35d20 100644
--- a/chromium/media/base/renderer_factory_selector.h
+++ b/chromium/media/base/renderer_factory_selector.h
@@ -46,7 +46,8 @@ enum class RendererFactoryType {
kCast = 5, // CastRendererClientFactory
kMediaFoundation = 6, // MediaFoundationRendererClientFactory
kFuchsia = 7, // FuchsiaRendererFactory
- kMaxValue = kFuchsia,
+ kRemoting = 8, // RemotingRendererFactory
+ kMaxValue = kRemoting,
};
class MEDIA_EXPORT RendererFactorySelector {
diff --git a/chromium/media/base/speech_recognition_client.h b/chromium/media/base/speech_recognition_client.h
index 1290cd88c25..8103b6a6eca 100644
--- a/chromium/media/base/speech_recognition_client.h
+++ b/chromium/media/base/speech_recognition_client.h
@@ -7,7 +7,9 @@
#include <memory>
+#include "base/callback.h"
#include "media/base/audio_buffer.h"
+#include "media/base/audio_bus.h"
#include "media/base/media_export.h"
namespace media {
@@ -16,11 +18,19 @@ namespace media {
// captions.
class MEDIA_EXPORT SpeechRecognitionClient {
public:
+ using OnReadyCallback = base::OnceCallback<void()>;
+
virtual ~SpeechRecognitionClient() = default;
virtual void AddAudio(scoped_refptr<AudioBuffer> buffer) = 0;
+ virtual void AddAudio(std::unique_ptr<media::AudioBus> audio_bus,
+ int sample_rate,
+ media::ChannelLayout channel_layout) = 0;
+
virtual bool IsSpeechRecognitionAvailable() = 0;
+
+ virtual void SetOnReadyCallback(OnReadyCallback callback) = 0;
};
} // namespace media
diff --git a/chromium/media/base/status.h b/chromium/media/base/status.h
index c06635b27a8..0e81b54f06a 100644
--- a/chromium/media/base/status.h
+++ b/chromium/media/base/status.h
@@ -179,6 +179,10 @@ class ErrorOr {
// the value in question.
ErrorOr(Status&& error) : error_(std::move(error)) {}
ErrorOr(const Status& error) : error_(error) {}
+ ErrorOr(StatusCode code,
+ const base::Location& location = base::Location::Current())
+ : error_(Status(code, "", location)) {}
+
ErrorOr(T&& value) : value_(std::move(value)) {}
ErrorOr(const T& value) : value_(value) {}
diff --git a/chromium/media/base/status_codes.h b/chromium/media/base/status_codes.h
index 468ff813946..84fd4d50274 100644
--- a/chromium/media/base/status_codes.h
+++ b/chromium/media/base/status_codes.h
@@ -53,6 +53,10 @@ enum class StatusCode : StatusCodeType {
kCantCreateEglStream = 0x00000304,
kCantCreateEglStreamConsumer = 0x00000305,
kCantCreateEglStreamProducer = 0x00000306,
+ kCannotCreateTextureSelector = 0x00000307,
+ kCannotQueryID3D11Multithread = 0x00000308,
+ kCannotGetDecoderConfigCount = 0x00000309,
+ kCannotGetDecoderConfig = 0x0000030A,
// MojoDecoder Errors: 0x04
kMojoDecoderNoWrappedDecoder = 0x00000401,
@@ -69,8 +73,9 @@ enum class StatusCode : StatusCodeType {
kV4l2FailedFileCapabilitiesCheck = 0x00000505,
kV4l2FailedResourceAllocation = 0x00000506,
kV4l2BadFormat = 0x00000507,
- kVaapiReinitializedDuringDecode = 0x00000508,
- kVaapiFailedAcceleratorCreation = 0x00000509,
+ kV4L2FailedToStartStreamQueue = 0x00000508,
+ kVaapiReinitializedDuringDecode = 0x00000509,
+ kVaapiFailedAcceleratorCreation = 0x00000510,
// Encoder Error: 0x06
kEncoderInitializeNeverCompleted = 0x00000601,
diff --git a/chromium/media/base/supported_types.cc b/chromium/media/base/supported_types.cc
index c98ce24e557..f3ccd25f1c4 100644
--- a/chromium/media/base/supported_types.cc
+++ b/chromium/media/base/supported_types.cc
@@ -7,6 +7,7 @@
#include "base/feature_list.h"
#include "base/logging.h"
#include "base/no_destructor.h"
+#include "base/notreached.h"
#include "build/build_config.h"
#include "media/base/media.h"
#include "media/base/media_client.h"
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index bffe5960363..4fd250ca230 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -328,7 +328,14 @@ MATCHER_P2(AudioNonKeyframe, pts_microseconds, dts_microseconds, "") {
base::NumberToString(pts_microseconds) + "us and DTS " +
base::NumberToString(dts_microseconds) +
"us indicated the frame is not a random access point (key "
- "frame). All audio frames are expected to be key frames.");
+ "frame). All audio frames are expected to be key frames for "
+ "the current audio codec.");
+}
+
+MATCHER(AudioNonKeyframeOutOfOrder, "") {
+ return CONTAINS_STRING(arg,
+ "Dependent audio frame with invalid decreasing "
+ "presentation timestamp detected.");
}
MATCHER_P2(SkippingSpliceAtOrBefore,
@@ -451,6 +458,20 @@ MATCHER_P3(DroppedFrameCheckAppendWindow,
base::NumberToString(append_window_end_us) + "us");
}
+MATCHER_P3(DroppedAppendWindowUnusedPreroll,
+ pts_us,
+ delta_us,
+ next_pts_us,
+ "") {
+ return CONTAINS_STRING(
+ arg,
+ "Partial append window trimming dropping unused audio preroll buffer "
+ "with PTS " +
+ base::NumberToString(pts_us) + "us that ends too far (" +
+ base::NumberToString(delta_us) + "us) from next buffer with PTS " +
+ base::NumberToString(next_pts_us) + "us");
+}
+
} // namespace media
#endif // MEDIA_BASE_TEST_HELPERS_H_
diff --git a/chromium/media/base/test_random.h b/chromium/media/base/test_random.h
index f08c6f2cb12..6c5eb51392b 100644
--- a/chromium/media/base/test_random.h
+++ b/chromium/media/base/test_random.h
@@ -7,7 +7,7 @@
#include <stdint.h>
-#include "base/logging.h"
+#include "base/check_op.h"
// Vastly simplified ACM random class meant to only be used for testing.
// This class is meant to generate predictable sequences of pseudorandom
diff --git a/chromium/media/base/user_input_monitor_linux.cc b/chromium/media/base/user_input_monitor_linux.cc
index 00d818f1f68..4ac78970651 100644
--- a/chromium/media/base/user_input_monitor_linux.cc
+++ b/chromium/media/base/user_input_monitor_linux.cc
@@ -96,9 +96,9 @@ class UserInputMonitorLinux : public UserInputMonitorBase {
UserInputMonitorLinuxCore::UserInputMonitorLinuxCore(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
: io_task_runner_(io_task_runner),
- x_control_display_(NULL),
- x_record_display_(NULL),
- x_record_range_(NULL),
+ x_control_display_(nullptr),
+ x_record_display_(nullptr),
+ x_record_range_(nullptr),
x_record_context_(0) {}
UserInputMonitorLinuxCore::~UserInputMonitorLinuxCore() {
@@ -138,8 +138,8 @@ void UserInputMonitorLinuxCore::StartMonitor() {
}
int xr_opcode, xr_event, xr_error;
- if (!XQueryExtension(
- x_control_display_, "RECORD", &xr_opcode, &xr_event, &xr_error)) {
+ if (!XQueryExtension(x_control_display_, "RECORD", &xr_opcode, &xr_event,
+ &xr_error)) {
LOG(ERROR) << "X Record extension not available.";
StopMonitor();
return;
@@ -154,8 +154,8 @@ void UserInputMonitorLinuxCore::StartMonitor() {
return;
}
- x_record_range_->device_events.first = KeyPress;
- x_record_range_->device_events.last = KeyRelease;
+ x_record_range_->device_events.first = x11::KeyEvent::Press;
+ x_record_range_->device_events.last = x11::KeyEvent::Release;
if (x_record_context_) {
XRecordDisableContext(x_control_display_, x_record_context_);
@@ -175,8 +175,7 @@ void UserInputMonitorLinuxCore::StartMonitor() {
return;
}
- if (!XRecordEnableContextAsync(x_record_display_,
- x_record_context_,
+ if (!XRecordEnableContextAsync(x_record_display_, x_record_context_,
&UserInputMonitorLinuxCore::ProcessReply,
reinterpret_cast<XPointer>(this))) {
LOG(ERROR) << "XRecordEnableContextAsync failed.";
@@ -210,7 +209,7 @@ void UserInputMonitorLinuxCore::StopMonitor() {
if (x_record_range_) {
XFree(x_record_range_);
- x_record_range_ = NULL;
+ x_record_range_ = nullptr;
}
// Context must be disabled via the control channel because we can't send
@@ -225,11 +224,11 @@ void UserInputMonitorLinuxCore::StopMonitor() {
}
if (x_record_display_) {
XCloseDisplay(x_record_display_);
- x_record_display_ = NULL;
+ x_record_display_ = nullptr;
}
if (x_control_display_) {
XCloseDisplay(x_control_display_);
- x_control_display_ = NULL;
+ x_control_display_ = nullptr;
}
key_press_count_mapping_.reset();
@@ -249,10 +248,12 @@ void UserInputMonitorLinuxCore::OnXEvent() {
void UserInputMonitorLinuxCore::ProcessXEvent(xEvent* event) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- DCHECK(event->u.u.type == KeyRelease || event->u.u.type == KeyPress);
+ DCHECK(event->u.u.type == x11::KeyEvent::Release ||
+ event->u.u.type == x11::KeyEvent::Press);
- ui::EventType type =
- (event->u.u.type == KeyPress) ? ui::ET_KEY_PRESSED : ui::ET_KEY_RELEASED;
+ ui::EventType type = (event->u.u.type == x11::KeyEvent::Press)
+ ? ui::ET_KEY_PRESSED
+ : ui::ET_KEY_RELEASED;
KeySym key_sym =
XkbKeycodeToKeysym(x_control_display_, event->u.u.detail, 0, 0);
diff --git a/chromium/media/base/vector_math.cc b/chromium/media/base/vector_math.cc
index fabb796d324..a0333cb6885 100644
--- a/chromium/media/base/vector_math.cc
+++ b/chromium/media/base/vector_math.cc
@@ -8,6 +8,7 @@
#include <algorithm>
#include "base/check_op.h"
+#include "base/memory/aligned_memory.h"
#include "build/build_config.h"
// NaCl does not allow intrinsics.
@@ -40,9 +41,8 @@ namespace media {
namespace vector_math {
void FMAC(const float src[], float scale, int len, float dest[]) {
- // Ensure |src| and |dest| are 16-byte aligned.
- DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(src) & (kRequiredAlignment - 1));
- DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(dest) & (kRequiredAlignment - 1));
+ DCHECK(base::IsAligned(src, kRequiredAlignment));
+ DCHECK(base::IsAligned(dest, kRequiredAlignment));
return FMAC_FUNC(src, scale, len, dest);
}
@@ -52,9 +52,8 @@ void FMAC_C(const float src[], float scale, int len, float dest[]) {
}
void FMUL(const float src[], float scale, int len, float dest[]) {
- // Ensure |src| and |dest| are 16-byte aligned.
- DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(src) & (kRequiredAlignment - 1));
- DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(dest) & (kRequiredAlignment - 1));
+ DCHECK(base::IsAligned(src, kRequiredAlignment));
+ DCHECK(base::IsAligned(dest, kRequiredAlignment));
return FMUL_FUNC(src, scale, len, dest);
}
@@ -65,8 +64,7 @@ void FMUL_C(const float src[], float scale, int len, float dest[]) {
std::pair<float, float> EWMAAndMaxPower(
float initial_value, const float src[], int len, float smoothing_factor) {
- // Ensure |src| is 16-byte aligned.
- DCHECK_EQ(0u, reinterpret_cast<uintptr_t>(src) & (kRequiredAlignment - 1));
+ DCHECK(base::IsAligned(src, kRequiredAlignment));
return EWMAAndMaxPower_FUNC(initial_value, src, len, smoothing_factor);
}
diff --git a/chromium/media/base/video_codecs.cc b/chromium/media/base/video_codecs.cc
index 4c04f29f6b2..fd84206a453 100644
--- a/chromium/media/base/video_codecs.cc
+++ b/chromium/media/base/video_codecs.cc
@@ -5,6 +5,7 @@
#include "media/base/video_codecs.h"
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
diff --git a/chromium/media/base/video_decoder.cc b/chromium/media/base/video_decoder.cc
index 3a7647313fb..626ca0f0182 100644
--- a/chromium/media/base/video_decoder.cc
+++ b/chromium/media/base/video_decoder.cc
@@ -15,10 +15,6 @@ namespace media {
VideoDecoder::VideoDecoder() = default;
-void VideoDecoder::Destroy() {
- delete this;
-}
-
VideoDecoder::~VideoDecoder() = default;
bool VideoDecoder::IsPlatformDecoder() const {
@@ -66,12 +62,3 @@ int VideoDecoder::GetRecommendedThreadCount(int desired_threads) {
}
} // namespace media
-
-namespace std {
-
-void default_delete<media::VideoDecoder>::operator()(
- media::VideoDecoder* ptr) const {
- ptr->Destroy();
-}
-
-} // namespace std
diff --git a/chromium/media/base/video_decoder.h b/chromium/media/base/video_decoder.h
index bbb0b11b151..06488700d0a 100644
--- a/chromium/media/base/video_decoder.h
+++ b/chromium/media/base/video_decoder.h
@@ -39,6 +39,7 @@ class MEDIA_EXPORT VideoDecoder {
using DecodeCB = base::OnceCallback<void(DecodeStatus)>;
VideoDecoder();
+ virtual ~VideoDecoder();
// Returns the name of the decoder for logging and decoder selection purposes.
// This name should be available immediately after construction (e.g. before
@@ -134,39 +135,9 @@ class MEDIA_EXPORT VideoDecoder {
// [|limits::kMinVideoDecodeThreads|, |limits::kMaxVideoDecodeThreads|].
static int GetRecommendedThreadCount(int desired_threads);
- protected:
- // Deletion is only allowed via Destroy().
- virtual ~VideoDecoder();
-
- private:
- friend struct std::default_delete<VideoDecoder>;
-
- // Fires any pending callbacks, stops and destroys the decoder. After this
- // call, external resources (e.g. raw pointers) |this| holds might be
- // invalidated immediately. So if the decoder is destroyed asynchronously
- // (e.g. DeleteSoon), external resources must be released in this call.
- virtual void Destroy();
-
DISALLOW_COPY_AND_ASSIGN(VideoDecoder);
};
} // namespace media
-namespace std {
-
-// Specialize std::default_delete to call Destroy().
-template <>
-struct MEDIA_EXPORT default_delete<media::VideoDecoder> {
- constexpr default_delete() = default;
-
- template <typename U,
- typename = typename std::enable_if<
- std::is_convertible<U*, media::VideoDecoder*>::value>::type>
- default_delete(const default_delete<U>& d) {}
-
- void operator()(media::VideoDecoder* ptr) const;
-};
-
-} // namespace std
-
#endif // MEDIA_BASE_VIDEO_DECODER_H_
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 405be9f8102..052adc4f6d6 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -95,6 +95,8 @@ class MEDIA_EXPORT VideoDecoderConfig {
// in this region are valid.
const gfx::Size& coded_size() const { return coded_size_; }
+ void set_coded_size(const gfx::Size& coded_size) { coded_size_ = coded_size; }
+
// Region of coded_size() that contains image data, also known as the clean
// aperture. Usually, but not always, origin-aligned (top-left).
const gfx::Rect& visible_rect() const { return visible_rect_; }
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index 0bb025a1d5f..a069012e49d 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -338,8 +338,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateVideoHoleFrame(
scoped_refptr<VideoFrame> frame =
new VideoFrame(*layout, StorageType::STORAGE_OPAQUE,
gfx::Rect(natural_size), natural_size, timestamp);
- frame->metadata()->SetUnguessableToken(VideoFrameMetadata::OVERLAY_PLANE_ID,
- overlay_plane_id);
+ frame->metadata()->overlay_plane_id = overlay_plane_id;
return frame;
}
@@ -366,7 +365,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTextures(
if (format != PIXEL_FORMAT_ARGB && format != PIXEL_FORMAT_XRGB &&
format != PIXEL_FORMAT_NV12 && format != PIXEL_FORMAT_I420 &&
format != PIXEL_FORMAT_ABGR && format != PIXEL_FORMAT_XR30 &&
- format != PIXEL_FORMAT_XB30) {
+ format != PIXEL_FORMAT_XB30 && format != PIXEL_FORMAT_P016LE) {
DLOG(ERROR) << "Unsupported pixel format: "
<< VideoPixelFormatToString(format);
return nullptr;
@@ -750,7 +749,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
}
scoped_refptr<VideoFrame> frame = new VideoFrame(
*layout, STORAGE_UNKNOWN, gfx::Rect(), gfx::Size(), kNoTimestamp);
- frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM, true);
+ frame->metadata()->end_of_stream = true;
return frame;
}
@@ -1119,7 +1118,7 @@ gpu::SyncToken VideoFrame::UpdateReleaseSyncToken(SyncTokenClient* client) {
}
std::string VideoFrame::AsHumanReadableString() const {
- if (metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM))
+ if (metadata()->end_of_stream)
return "end of stream";
std::ostringstream s;
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 271fcdf19fd..51515be694a 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -14,8 +14,8 @@
#include <vector>
#include "base/callback.h"
+#include "base/check_op.h"
#include "base/hash/md5.h"
-#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
#include "base/memory/ref_counted.h"
@@ -535,8 +535,16 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
//
// TODO(miu): Move some of the "extra" members of VideoFrame (below) into
// here as a later clean-up step.
+ //
+ // TODO(https://crbug.com/1096727): change the return type to const&.
const VideoFrameMetadata* metadata() const { return &metadata_; }
VideoFrameMetadata* metadata() { return &metadata_; }
+ void set_metadata(const VideoFrameMetadata& metadata) {
+ metadata_ = metadata;
+ }
+
+ // Resets |metadata_|.
+ void clear_metadata() { set_metadata(VideoFrameMetadata()); }
// The time span between the current frame and the first frame of the stream.
// This is the media timestamp, and not the reference time.
diff --git a/chromium/media/base/video_frame_metadata.cc b/chromium/media/base/video_frame_metadata.cc
index ad45ea010c9..2c8475df48c 100644
--- a/chromium/media/base/video_frame_metadata.cc
+++ b/chromium/media/base/video_frame_metadata.cc
@@ -9,194 +9,56 @@
#include <vector>
#include "base/check_op.h"
-#include "base/no_destructor.h"
#include "base/strings/string_number_conversions.h"
-#include "base/value_conversions.h"
+#include "base/util/values/values_util.h"
#include "ui/gfx/geometry/rect.h"
namespace media {
-namespace {
-
-std::vector<std::string> CreateInternalKeys() {
- std::vector<std::string> result(VideoFrameMetadata::NUM_KEYS);
- for (size_t i = 0; i < result.size(); i++)
- result[i] = base::NumberToString(i);
- return result;
-}
-
-// Map enum key to internal StringPiece key used by base::DictionaryValue.
-inline base::StringPiece ToInternalKey(VideoFrameMetadata::Key key) {
- DCHECK_LT(key, VideoFrameMetadata::NUM_KEYS);
- static const base::NoDestructor<std::vector<std::string>> internal_keys(
- CreateInternalKeys());
- return (*internal_keys)[int{key}];
-}
-
-} // namespace
-
VideoFrameMetadata::VideoFrameMetadata() = default;
VideoFrameMetadata::~VideoFrameMetadata() = default;
-bool VideoFrameMetadata::HasKey(Key key) const {
- return dictionary_.HasKey(ToInternalKey(key));
-}
-
-void VideoFrameMetadata::SetBoolean(Key key, bool value) {
- dictionary_.SetKey(ToInternalKey(key), base::Value(value));
-}
-
-void VideoFrameMetadata::SetInteger(Key key, int value) {
- dictionary_.SetKey(ToInternalKey(key), base::Value(value));
-}
-
-void VideoFrameMetadata::SetDouble(Key key, double value) {
- dictionary_.SetKey(ToInternalKey(key), base::Value(value));
-}
-
-void VideoFrameMetadata::SetRotation(Key key, VideoRotation value) {
- DCHECK_EQ(ROTATION, key);
- dictionary_.SetKey(ToInternalKey(key), base::Value(value));
-}
-
-void VideoFrameMetadata::SetString(Key key, const std::string& value) {
- dictionary_.SetKey(
- ToInternalKey(key),
-
- // Using BlobStorage since we don't want the |value| interpreted as having
- // any particular character encoding (e.g., UTF-8) by
- // base::DictionaryValue.
- base::Value(base::Value::BlobStorage(value.begin(), value.end())));
-}
-
-void VideoFrameMetadata::SetTimeDelta(Key key, const base::TimeDelta& value) {
- dictionary_.SetKey(ToInternalKey(key), base::CreateTimeDeltaValue(value));
-}
-
-void VideoFrameMetadata::SetTimeTicks(Key key, const base::TimeTicks& value) {
- // Serialize TimeTicks as TimeDeltas.
- dictionary_.SetKey(ToInternalKey(key),
- base::CreateTimeDeltaValue(value - base::TimeTicks()));
-}
-
-void VideoFrameMetadata::SetUnguessableToken(
- Key key,
- const base::UnguessableToken& value) {
- dictionary_.SetKey(ToInternalKey(key),
- base::CreateUnguessableTokenValue(value));
-}
-
-void VideoFrameMetadata::SetRect(Key key, const gfx::Rect& value) {
- base::Value init[] = {base::Value(value.x()), base::Value(value.y()),
- base::Value(value.width()),
- base::Value(value.height())};
- dictionary_.SetKey(ToInternalKey(key),
- base::Value(base::Value::ListStorage(
- std::make_move_iterator(std::begin(init)),
- std::make_move_iterator(std::end(init)))));
-}
-
-bool VideoFrameMetadata::GetBoolean(Key key, bool* value) const {
- DCHECK(value);
- auto opt_bool = dictionary_.FindBoolKey(ToInternalKey(key));
- if (opt_bool)
- *value = opt_bool.value();
+VideoFrameMetadata::VideoFrameMetadata(const VideoFrameMetadata& other) =
+ default;
- return opt_bool.has_value();
-}
-
-bool VideoFrameMetadata::GetInteger(Key key, int* value) const {
- DCHECK(value);
- auto opt_int = dictionary_.FindIntKey(ToInternalKey(key));
- if (opt_int)
- *value = opt_int.value();
-
- return opt_int.has_value();
-}
-
-bool VideoFrameMetadata::GetDouble(Key key, double* value) const {
- DCHECK(value);
- auto opt_double = dictionary_.FindDoubleKey(ToInternalKey(key));
- if (opt_double)
- *value = opt_double.value();
-
- return opt_double.has_value();
-}
-
-bool VideoFrameMetadata::GetRotation(Key key, VideoRotation* value) const {
- DCHECK_EQ(ROTATION, key);
- DCHECK(value);
- auto opt_int = dictionary_.FindIntKey(ToInternalKey(key));
- if (opt_int)
- *value = static_cast<VideoRotation>(opt_int.value());
- return opt_int.has_value();
-}
-
-bool VideoFrameMetadata::GetString(Key key, std::string* value) const {
- DCHECK(value);
- const base::Value::BlobStorage* const binary_value =
- dictionary_.FindBlobKey(ToInternalKey(key));
-
- if (!!binary_value)
- value->assign(binary_value->begin(), binary_value->end());
-
- return !!binary_value;
-}
-
-bool VideoFrameMetadata::GetTimeDelta(Key key, base::TimeDelta* value) const {
- const base::Value* internal_value = dictionary_.FindKey(ToInternalKey(key));
- if (!internal_value)
- return false;
- return base::GetValueAsTimeDelta(*internal_value, value);
-}
-
-bool VideoFrameMetadata::GetTimeTicks(Key key, base::TimeTicks* value) const {
- // Deserialize TimeTicks from TimeDelta.
- const base::Value* internal_value = dictionary_.FindKey(ToInternalKey(key));
- base::TimeDelta delta;
-
- if (!internal_value || !base::GetValueAsTimeDelta(*internal_value, &delta))
- return false;
-
- *value = base::TimeTicks() + delta;
- return true;
-}
-
-bool VideoFrameMetadata::GetUnguessableToken(
- Key key,
- base::UnguessableToken* value) const {
- const base::Value* internal_value = dictionary_.FindKey(ToInternalKey(key));
- if (!internal_value)
- return false;
- return base::GetValueAsUnguessableToken(*internal_value, value);
-}
-
-bool VideoFrameMetadata::GetRect(Key key, gfx::Rect* value) const {
- const base::Value* internal_value =
- dictionary_.FindListKey(ToInternalKey(key));
- if (!internal_value || internal_value->GetList().size() != 4)
- return false;
- *value = gfx::Rect(internal_value->GetList()[0].GetInt(),
- internal_value->GetList()[1].GetInt(),
- internal_value->GetList()[2].GetInt(),
- internal_value->GetList()[3].GetInt());
- return true;
-}
-
-bool VideoFrameMetadata::IsTrue(Key key) const {
- bool value = false;
- return GetBoolean(key, &value) && value;
-}
-
-void VideoFrameMetadata::MergeInternalValuesFrom(const base::Value& in) {
- // This function CHECKs if |in| is a dictionary.
- dictionary_.MergeDictionary(&in);
-}
+#define MERGE_FIELD(a, source) \
+ if (source->a) \
+ this->a = source->a
void VideoFrameMetadata::MergeMetadataFrom(
const VideoFrameMetadata* metadata_source) {
- dictionary_.MergeDictionary(&metadata_source->dictionary_);
+ MERGE_FIELD(allow_overlay, metadata_source);
+ MERGE_FIELD(capture_begin_time, metadata_source);
+ MERGE_FIELD(capture_end_time, metadata_source);
+ MERGE_FIELD(capture_counter, metadata_source);
+ MERGE_FIELD(capture_update_rect, metadata_source);
+ MERGE_FIELD(copy_required, metadata_source);
+ MERGE_FIELD(end_of_stream, metadata_source);
+ MERGE_FIELD(frame_duration, metadata_source);
+ MERGE_FIELD(frame_rate, metadata_source);
+ MERGE_FIELD(interactive_content, metadata_source);
+ MERGE_FIELD(reference_time, metadata_source);
+ MERGE_FIELD(resource_utilization, metadata_source);
+ MERGE_FIELD(read_lock_fences_enabled, metadata_source);
+ MERGE_FIELD(rotation, metadata_source);
+ MERGE_FIELD(texture_owner, metadata_source);
+ MERGE_FIELD(wants_promotion_hint, metadata_source);
+ MERGE_FIELD(protected_video, metadata_source);
+ MERGE_FIELD(hw_protected, metadata_source);
+ MERGE_FIELD(overlay_plane_id, metadata_source);
+ MERGE_FIELD(power_efficient, metadata_source);
+ MERGE_FIELD(device_scale_factor, metadata_source);
+ MERGE_FIELD(page_scale_factor, metadata_source);
+ MERGE_FIELD(root_scroll_offset_x, metadata_source);
+ MERGE_FIELD(root_scroll_offset_y, metadata_source);
+ MERGE_FIELD(top_controls_visible_height, metadata_source);
+ MERGE_FIELD(decode_begin_time, metadata_source);
+ MERGE_FIELD(decode_end_time, metadata_source);
+ MERGE_FIELD(processing_time, metadata_source);
+ MERGE_FIELD(rtp_timestamp, metadata_source);
+ MERGE_FIELD(receive_time, metadata_source);
+ MERGE_FIELD(wallclock_frame_duration, metadata_source);
}
} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index 4e873e4152d..9f16b7fce27 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -16,177 +16,43 @@
#include "build/build_config.h"
#include "media/base/media_export.h"
#include "media/base/video_transformation.h"
-
-namespace gfx {
-class Rect;
-}
+#include "ui/gfx/geometry/rect.h"
namespace media {
class MEDIA_EXPORT VideoFrameMetadata {
public:
enum Key {
- // Sources of VideoFrames use this marker to indicate that the associated
- // VideoFrame can be overlayed, case in which its contents do not need to be
- // further composited but displayed directly. Use Get/SetBoolean() for
- // this Key.
ALLOW_OVERLAY,
-
- // Video capture begin/end timestamps. Consumers can use these values for
- // dynamic optimizations, logging stats, etc. Use Get/SetTimeTicks() for
- // these keys.
CAPTURE_BEGIN_TIME,
CAPTURE_END_TIME,
-
- // A counter that is increased by the producer of video frames each time
- // it pushes out a new frame. By looking for gaps in this counter, clients
- // can determine whether or not any frames have been dropped on the way from
- // the producer between two consecutively received frames. Note that the
- // counter may start at arbitrary values, so the absolute value of it has no
- // meaning.
CAPTURE_COUNTER,
-
- // A base::ListValue containing 4 integers representing x, y, width, height
- // of the rectangular region of the frame that has changed since the frame
- // with the directly preceding CAPTURE_COUNTER. If that frame was not
- // received, typically because it was dropped during transport from the
- // producer, clients must assume that the entire frame has changed.
- // The rectangle is relative to the full frame data, i.e. [0, 0,
- // coded_size().width(), coded_size().height()]. It does not have to be
- // fully contained within visible_rect().
CAPTURE_UPDATE_RECT,
-
- // Indicates that this frame must be copied to a new texture before use,
- // rather than being used directly. Specifically this is required for
- // WebView because of limitations about sharing surface textures between GL
- // contexts.
COPY_REQUIRED,
-
- // Indicates if the current frame is the End of its current Stream. Use
- // Get/SetBoolean() for this Key.
END_OF_STREAM,
-
- // The estimated duration of this frame (i.e., the amount of time between
- // the media timestamp of this frame and the next). Note that this is not
- // the same information provided by FRAME_RATE as the FRAME_DURATION can
- // vary unpredictably for every frame. Consumers can use this to optimize
- // playback scheduling, make encoding quality decisions, and/or compute
- // frame-level resource utilization stats. Use Get/SetTimeDelta() for this
- // key.
FRAME_DURATION,
-
- // Represents either the fixed frame rate, or the maximum frame rate to
- // expect from a variable-rate source. This value generally remains the
- // same for all frames in the same session. Use Get/SetDouble() for this
- // key.
FRAME_RATE,
-
- // This is a boolean that signals that the video capture engine detects
- // interactive content. One possible optimization that this signal can help
- // with is remote content: adjusting end-to-end latency down to help the
- // user better coordinate their actions.
- //
- // Use Get/SetBoolean for this key.
INTERACTIVE_CONTENT,
-
- // This field represents the local time at which either: 1) the frame was
- // generated, if it was done so locally; or 2) the targeted play-out time
- // of the frame, if it was generated from a remote source. This value is NOT
- // a high-resolution timestamp, and so it should not be used as a
- // presentation time; but, instead, it should be used for buffering playback
- // and for A/V synchronization purposes.
- // Use Get/SetTimeTicks() for this key.
REFERENCE_TIME,
-
- // A feedback signal that indicates the fraction of the tolerable maximum
- // amount of resources that were utilized to process this frame. A producer
- // can check this value after-the-fact, usually via a VideoFrame destruction
- // observer, to determine whether the consumer can handle more or less data
- // volume, and achieve the right quality versus performance trade-off.
- //
- // Use Get/SetDouble() for this key. Values are interpreted as follows:
- // Less than 0.0 is meaningless and should be ignored. 1.0 indicates a
- // maximum sustainable utilization. Greater than 1.0 indicates the consumer
- // is likely to stall or drop frames if the data volume is not reduced.
- //
- // Example: In a system that encodes and transmits video frames over the
- // network, this value can be used to indicate whether sufficient CPU
- // is available for encoding and/or sufficient bandwidth is available for
- // transmission over the network. The maximum of the two utilization
- // measurements would be used as feedback.
RESOURCE_UTILIZATION,
-
- // Sources of VideoFrames use this marker to indicate that an instance of
- // VideoFrameExternalResources produced from the associated video frame
- // should use read lock fences.
READ_LOCK_FENCES_ENABLED,
-
- // Indicates that the frame is rotated.
ROTATION,
-
- // Android only: if set, then this frame is not suitable for overlay, even
- // if ALLOW_OVERLAY is set. However, it allows us to process the overlay
- // to see if it would have been promoted, if it were backed by a SurfaceView
- // instead. This lets us figure out when SurfaceViews are appropriate.
TEXTURE_OWNER,
-
- // Android only: if set, then this frame's resource would like to be
- // notified about its promotability to an overlay.
WANTS_PROMOTION_HINT,
-
- // This video frame comes from protected content.
PROTECTED_VIDEO,
-
- // This video frame is protected by hardware. This option is valid only if
- // PROTECTED_VIDEO is also set to true.
HW_PROTECTED,
-
- // An UnguessableToken that identifies VideoOverlayFactory that created
- // this VideoFrame. It's used by Cast to help with video hole punch.
- // Use Get/SetUnguessableToken() for this key.
OVERLAY_PLANE_ID,
-
- // Whether this frame was decoded in a power efficient way.
POWER_EFFICIENT,
-
- // CompositorFrameMetadata variables associated with this frame. Used for
- // remote debugging.
- // Use Get/SetDouble() for these keys.
- // TODO(crbug.com/832220): Use a customized dictionary value instead of
- // using these keys directly.
DEVICE_SCALE_FACTOR,
PAGE_SCALE_FACTOR,
ROOT_SCROLL_OFFSET_X,
ROOT_SCROLL_OFFSET_Y,
TOP_CONTROLS_VISIBLE_HEIGHT,
-
- // If present, this field represents the local time at which the VideoFrame
- // was decoded from whichever format it was encoded in. Sometimes only
- // DECODE_END_TIME will be present. Use Get/SetTimeTicks() for this key.
DECODE_BEGIN_TIME,
DECODE_END_TIME,
-
- // If present, this field represents the elapsed time from the submission of
- // the encoded packet with the same PTS as this frame to the decoder until
- // the decoded frame was ready for presentation. Stored as base::TimeDelta.
PROCESSING_TIME,
-
- // The RTP timestamp associated with this video frame. Stored as a double
- // since base::DictionaryValue doesn't have a uint32_t type.
- //
- // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource
RTP_TIMESTAMP,
-
- // For video frames coming from a remote source, this is the time the
- // encoded frame was received by the platform, i.e., the time at
- // which the last packet belonging to this frame was received over the
- // network.
RECEIVE_TIME,
-
- // If present, this field represents the duration this frame is ideally
- // expected to spend on the screen during playback. Unlike FRAME_DURATION
- // this field takes into account current playback rate.
- // Use Get/SetTimeDelta() for this key.
WALLCLOCK_FRAME_DURATION,
NUM_KEYS
@@ -195,47 +61,161 @@ class MEDIA_EXPORT VideoFrameMetadata {
VideoFrameMetadata();
~VideoFrameMetadata();
- bool HasKey(Key key) const;
-
- void Clear() { dictionary_.Clear(); }
-
- // Setters. Overwrites existing value, if present.
- void SetBoolean(Key key, bool value);
- void SetInteger(Key key, int value);
- void SetDouble(Key key, double value);
- void SetRotation(Key key, VideoRotation value);
- void SetString(Key key, const std::string& value);
- void SetTimeDelta(Key key, const base::TimeDelta& value);
- void SetTimeTicks(Key key, const base::TimeTicks& value);
- void SetUnguessableToken(Key key, const base::UnguessableToken& value);
- void SetRect(Key key, const gfx::Rect& value);
-
- // Getters. Returns true if |key| is present, and its value has been set.
- bool GetBoolean(Key key, bool* value) const WARN_UNUSED_RESULT;
- bool GetInteger(Key key, int* value) const WARN_UNUSED_RESULT;
- bool GetDouble(Key key, double* value) const WARN_UNUSED_RESULT;
- bool GetRotation(Key key, VideoRotation* value) const WARN_UNUSED_RESULT;
- bool GetString(Key key, std::string* value) const WARN_UNUSED_RESULT;
- bool GetTimeDelta(Key key, base::TimeDelta* value) const WARN_UNUSED_RESULT;
- bool GetTimeTicks(Key key, base::TimeTicks* value) const WARN_UNUSED_RESULT;
- bool GetUnguessableToken(Key key, base::UnguessableToken* value) const
- WARN_UNUSED_RESULT;
- bool GetRect(Key key, gfx::Rect* value) const WARN_UNUSED_RESULT;
-
- // Convenience method that returns true if |key| exists and is set to true.
- bool IsTrue(Key key) const WARN_UNUSED_RESULT;
-
- // For serialization.
- void MergeInternalValuesFrom(const base::Value& in);
- const base::Value& GetInternalValues() const { return dictionary_; }
+ VideoFrameMetadata(const VideoFrameMetadata& other);
// Merges internal values from |metadata_source|.
void MergeMetadataFrom(const VideoFrameMetadata* metadata_source);
- private:
- base::DictionaryValue dictionary_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameMetadata);
+ // Sources of VideoFrames use this marker to indicate that the associated
+ // VideoFrame can be overlaid, case in which its contents do not need to be
+ // further composited but displayed directly.
+ bool allow_overlay = false;
+
+ // Video capture begin/end timestamps. Consumers can use these values for
+ // dynamic optimizations, logging stats, etc.
+ base::Optional<base::TimeTicks> capture_begin_time;
+ base::Optional<base::TimeTicks> capture_end_time;
+
+ // A counter that is increased by the producer of video frames each time
+ // it pushes out a new frame. By looking for gaps in this counter, clients
+ // can determine whether or not any frames have been dropped on the way from
+ // the producer between two consecutively received frames. Note that the
+ // counter may start at arbitrary values, so the absolute value of it has no
+ // meaning.
+ base::Optional<int> capture_counter;
+
+ // The rectangular region of the frame that has changed since the frame
+ // with the directly preceding CAPTURE_COUNTER. If that frame was not
+ // received, typically because it was dropped during transport from the
+ // producer, clients must assume that the entire frame has changed.
+ // The rectangle is relative to the full frame data, i.e. [0, 0,
+ // coded_size().width(), coded_size().height()]. It does not have to be
+ // fully contained within visible_rect().
+ base::Optional<gfx::Rect> capture_update_rect;
+
+ // Indicates that this frame must be copied to a new texture before use,
+ // rather than being used directly. Specifically this is required for
+ // WebView because of limitations about sharing surface textures between GL
+ // contexts.
+ bool copy_required = false;
+
+ // Indicates if the current frame is the End of its current Stream.
+ bool end_of_stream = false;
+
+ // The estimated duration of this frame (i.e., the amount of time between
+ // the media timestamp of this frame and the next). Note that this is not
+ // the same information provided by FRAME_RATE as the FRAME_DURATION can
+ // vary unpredictably for every frame. Consumers can use this to optimize
+ // playback scheduling, make encoding quality decisions, and/or compute
+ // frame-level resource utilization stats.
+ base::Optional<base::TimeDelta> frame_duration;
+
+ // Represents either the fixed frame rate, or the maximum frame rate to
+ // expect from a variable-rate source. This value generally remains the
+ // same for all frames in the same session.
+ base::Optional<double> frame_rate;
+
+ // This is a boolean that signals that the video capture engine detects
+ // interactive content. One possible optimization that this signal can help
+ // with is remote content: adjusting end-to-end latency down to help the
+ // user better coordinate their actions.
+ bool interactive_content = false;
+
+ // This field represents the local time at which either: 1) the frame was
+ // generated, if it was done so locally; or 2) the targeted play-out time
+ // of the frame, if it was generated from a remote source. This value is NOT
+ // a high-resolution timestamp, and so it should not be used as a
+ // presentation time; but, instead, it should be used for buffering playback
+ // and for A/V synchronization purposes.
+ base::Optional<base::TimeTicks> reference_time;
+
+ // A feedback signal that indicates the fraction of the tolerable maximum
+ // amount of resources that were utilized to process this frame. A producer
+ // can check this value after-the-fact, usually via a VideoFrame destruction
+ // observer, to determine whether the consumer can handle more or less data
+ // volume, and achieve the right quality versus performance trade-off.
+ //
+ // Values are interpreted as follows:
+ // Less than 0.0 is meaningless and should be ignored. 1.0 indicates a
+ // maximum sustainable utilization. Greater than 1.0 indicates the consumer
+ // is likely to stall or drop frames if the data volume is not reduced.
+ //
+ // Example: In a system that encodes and transmits video frames over the
+ // network, this value can be used to indicate whether sufficient CPU
+ // is available for encoding and/or sufficient bandwidth is available for
+ // transmission over the network. The maximum of the two utilization
+ // measurements would be used as feedback.
+ base::Optional<double> resource_utilization;
+
+ // Sources of VideoFrames use this marker to indicate that an instance of
+ // VideoFrameExternalResources produced from the associated video frame
+ // should use read lock fences.
+ bool read_lock_fences_enabled = false;
+
+ // Indicates that the frame is rotated.
+ base::Optional<VideoRotation> rotation;
+
+ // Android only: if set, then this frame is not suitable for overlay, even
+ // if ALLOW_OVERLAY is set. However, it allows us to process the overlay
+ // to see if it would have been promoted, if it were backed by a SurfaceView
+ // instead. This lets us figure out when SurfaceViews are appropriate.
+ bool texture_owner = false;
+
+ // Android only: if set, then this frame's resource would like to be
+ // notified about its promotability to an overlay.
+ bool wants_promotion_hint = false;
+
+ // This video frame comes from protected content.
+ bool protected_video = false;
+
+ // This video frame is protected by hardware. This option is valid only if
+ // PROTECTED_VIDEO is also set to true.
+ bool hw_protected = false;
+
+ // An UnguessableToken that identifies VideoOverlayFactory that created
+ // this VideoFrame. It's used by Cast to help with video hole punch.
+ base::Optional<base::UnguessableToken> overlay_plane_id;
+
+ // Whether this frame was decoded in a power efficient way.
+ bool power_efficient = false;
+
+ // CompositorFrameMetadata variables associated with this frame. Used for
+ // remote debugging.
+ // TODO(crbug.com/832220): Use a customized dictionary value instead of
+ // using these keys directly.
+ base::Optional<double> device_scale_factor;
+ base::Optional<double> page_scale_factor;
+ base::Optional<double> root_scroll_offset_x;
+ base::Optional<double> root_scroll_offset_y;
+ base::Optional<double> top_controls_visible_height;
+
+ // If present, this field represents the local time at which the VideoFrame
+ // was decoded from whichever format it was encoded in. Sometimes only
+ // DECODE_END_TIME will be present.
+ base::Optional<base::TimeTicks> decode_begin_time;
+ base::Optional<base::TimeTicks> decode_end_time;
+
+ // If present, this field represents the elapsed time from the submission of
+ // the encoded packet with the same PTS as this frame to the decoder until
+ // the decoded frame was ready for presentation.
+ base::Optional<base::TimeDelta> processing_time;
+
+ // The RTP timestamp associated with this video frame. Stored as a double
+ // since base::DictionaryValue doesn't have a uint32_t type.
+ //
+ // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource
+ base::Optional<double> rtp_timestamp;
+
+ // For video frames coming from a remote source, this is the time the
+ // encoded frame was received by the platform, i.e., the time at
+ // which the last packet belonging to this frame was received over the
+ // network.
+ base::Optional<base::TimeTicks> receive_time;
+
+ // If present, this field represents the duration this frame is ideally
+ // expected to spend on the screen during playback. Unlike FRAME_DURATION
+ // this field takes into account current playback rate.
+ base::Optional<base::TimeDelta> wallclock_frame_duration;
};
} // namespace media
diff --git a/chromium/media/base/video_frame_pool.cc b/chromium/media/base/video_frame_pool.cc
index 1d029f054b6..4541324af7b 100644
--- a/chromium/media/base/video_frame_pool.cc
+++ b/chromium/media/base/video_frame_pool.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/containers/circular_deque.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
@@ -93,7 +94,7 @@ scoped_refptr<VideoFrame> VideoFramePool::PoolImpl::CreateFrame(
natural_size)) {
frame = pool_frame;
frame->set_timestamp(timestamp);
- frame->metadata()->Clear();
+ frame->clear_metadata();
break;
}
}
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index 17a1b972c64..7f9b105990b 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -47,6 +47,99 @@ void CreateTestY16Frame(const gfx::Size& coded_size,
}
}
}
+
+// Returns a VideoFrameMetadata object with a value for each field.
+media::VideoFrameMetadata GetFullVideoFrameMetadata() {
+ // Assign a non-default, distinct (when possible), value to all fields, and
+ // make sure values are preserved across serialization.
+ media::VideoFrameMetadata metadata;
+
+ // ints
+ metadata.capture_counter = 123;
+
+ // gfx::Rects
+ metadata.capture_update_rect = gfx::Rect(12, 34, 360, 480);
+
+ // media::VideoRotations
+ metadata.rotation = media::VideoRotation::VIDEO_ROTATION_90;
+
+ // bools
+ metadata.allow_overlay = true;
+ metadata.copy_required = true;
+ metadata.end_of_stream = true;
+ metadata.texture_owner = true;
+ metadata.wants_promotion_hint = true;
+ metadata.protected_video = true;
+ metadata.hw_protected = true;
+ metadata.power_efficient = true;
+ metadata.read_lock_fences_enabled = true;
+ metadata.interactive_content = true;
+
+ // base::UnguessableTokens
+ metadata.overlay_plane_id = base::UnguessableToken::Create();
+
+ // doubles
+ metadata.device_scale_factor = 2.0;
+ metadata.page_scale_factor = 2.1;
+ metadata.root_scroll_offset_x = 100.2;
+ metadata.root_scroll_offset_y = 200.1;
+ metadata.top_controls_visible_height = 25.5;
+ metadata.resource_utilization = 95.8;
+ metadata.frame_rate = 29.94;
+ metadata.rtp_timestamp = 1.0;
+
+ // base::TimeTicks
+ base::TimeTicks now = base::TimeTicks::Now();
+ metadata.receive_time = now + base::TimeDelta::FromMilliseconds(10);
+ metadata.capture_begin_time = now + base::TimeDelta::FromMilliseconds(20);
+ metadata.capture_end_time = now + base::TimeDelta::FromMilliseconds(30);
+ metadata.decode_begin_time = now + base::TimeDelta::FromMilliseconds(40);
+ metadata.decode_end_time = now + base::TimeDelta::FromMilliseconds(50);
+ metadata.reference_time = now + base::TimeDelta::FromMilliseconds(60);
+
+ // base::TimeDeltas
+ metadata.processing_time = base::TimeDelta::FromMilliseconds(500);
+ metadata.frame_duration = base::TimeDelta::FromMilliseconds(16);
+ metadata.wallclock_frame_duration = base::TimeDelta::FromMilliseconds(17);
+
+ return metadata;
+}
+
+void VerifyVideoFrameMetadataEquality(const media::VideoFrameMetadata& a,
+ const media::VideoFrameMetadata& b) {
+ EXPECT_EQ(a.allow_overlay, b.allow_overlay);
+ EXPECT_EQ(a.capture_begin_time, b.capture_begin_time);
+ EXPECT_EQ(a.capture_end_time, b.capture_end_time);
+ EXPECT_EQ(a.capture_counter, b.capture_counter);
+ EXPECT_EQ(a.capture_update_rect, b.capture_update_rect);
+ EXPECT_EQ(a.copy_required, b.copy_required);
+ EXPECT_EQ(a.end_of_stream, b.end_of_stream);
+ EXPECT_EQ(a.frame_duration, b.frame_duration);
+ EXPECT_EQ(a.frame_rate, b.frame_rate);
+ EXPECT_EQ(a.interactive_content, b.interactive_content);
+ EXPECT_EQ(a.reference_time, b.reference_time);
+ EXPECT_EQ(a.resource_utilization, b.resource_utilization);
+ EXPECT_EQ(a.read_lock_fences_enabled, b.read_lock_fences_enabled);
+ EXPECT_EQ(a.rotation, b.rotation);
+ EXPECT_EQ(a.texture_owner, b.texture_owner);
+ EXPECT_EQ(a.wants_promotion_hint, b.wants_promotion_hint);
+ EXPECT_EQ(a.protected_video, b.protected_video);
+ EXPECT_EQ(a.hw_protected, b.hw_protected);
+ EXPECT_EQ(a.overlay_plane_id, b.overlay_plane_id);
+ EXPECT_EQ(a.power_efficient, b.power_efficient);
+ EXPECT_EQ(a.device_scale_factor, b.device_scale_factor);
+ EXPECT_EQ(a.page_scale_factor, b.page_scale_factor);
+ EXPECT_EQ(a.root_scroll_offset_x, b.root_scroll_offset_x);
+ EXPECT_EQ(a.root_scroll_offset_y, b.root_scroll_offset_y);
+ EXPECT_EQ(a.top_controls_visible_height, b.top_controls_visible_height);
+ EXPECT_EQ(a.decode_begin_time, b.decode_begin_time);
+ EXPECT_EQ(a.decode_end_time, b.decode_end_time);
+ EXPECT_EQ(a.processing_time, b.processing_time);
+ EXPECT_EQ(a.rtp_timestamp, b.rtp_timestamp);
+ EXPECT_EQ(a.receive_time, b.receive_time);
+ EXPECT_EQ(a.wallclock_frame_duration, b.wallclock_frame_duration);
+}
+
} // namespace
namespace media {
@@ -198,8 +291,7 @@ TEST(VideoFrame, CreateFrame) {
// Test an empty frame.
frame = VideoFrame::CreateEOSFrame();
- EXPECT_TRUE(
- frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_TRUE(frame->metadata()->end_of_stream);
}
TEST(VideoFrame, CreateZeroInitializedFrame) {
@@ -235,8 +327,7 @@ TEST(VideoFrame, CreateBlackFrame) {
// Test basic properties.
EXPECT_EQ(0, frame->timestamp().InMicroseconds());
- EXPECT_FALSE(
- frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_FALSE(frame->metadata()->end_of_stream);
// Test |frame| properties.
EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
@@ -278,8 +369,7 @@ TEST(VideoFrame, WrapVideoFrame) {
gfx::Rect visible_rect(1, 1, 1, 1);
gfx::Size natural_size = visible_rect.size();
- wrapped_frame->metadata()->SetTimeDelta(
- media::VideoFrameMetadata::FRAME_DURATION, kFrameDuration);
+ wrapped_frame->metadata()->frame_duration = kFrameDuration;
frame = media::VideoFrame::WrapVideoFrame(
wrapped_frame, wrapped_frame->format(), visible_rect, natural_size);
wrapped_frame->AddDestructionObserver(
@@ -293,18 +383,12 @@ TEST(VideoFrame, WrapVideoFrame) {
EXPECT_EQ(natural_size, frame->natural_size());
// Verify metadata was copied to the wrapped frame.
- base::TimeDelta frame_duration;
- ASSERT_TRUE(frame->metadata()->GetTimeDelta(
- media::VideoFrameMetadata::FRAME_DURATION, &frame_duration));
-
- EXPECT_EQ(frame_duration, kFrameDuration);
+ EXPECT_EQ(*frame->metadata()->frame_duration, kFrameDuration);
// Verify the metadata copy was a deep copy.
- wrapped_frame->metadata()->Clear();
- EXPECT_NE(
- wrapped_frame->metadata()->HasKey(
- media::VideoFrameMetadata::FRAME_DURATION),
- frame->metadata()->HasKey(media::VideoFrameMetadata::FRAME_DURATION));
+ wrapped_frame->clear_metadata();
+ EXPECT_NE(wrapped_frame->metadata()->frame_duration.has_value(),
+ frame->metadata()->frame_duration.has_value());
}
// Verify that |wrapped_frame| outlives |frame|.
@@ -643,111 +727,44 @@ TEST(VideoFrame, AllocationSize_OddSize) {
}
}
-TEST(VideoFrameMetadata, SetAndThenGetAllKeysForAllTypes) {
- VideoFrameMetadata metadata;
-
- for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
- const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
-
- EXPECT_FALSE(metadata.HasKey(key));
- metadata.SetBoolean(key, true);
- EXPECT_TRUE(metadata.HasKey(key));
- bool bool_value = false;
- EXPECT_TRUE(metadata.GetBoolean(key, &bool_value));
- EXPECT_EQ(true, bool_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- metadata.SetInteger(key, i);
- EXPECT_TRUE(metadata.HasKey(key));
- int int_value = -999;
- EXPECT_TRUE(metadata.GetInteger(key, &int_value));
- EXPECT_EQ(i, int_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- metadata.SetDouble(key, 3.14 * i);
- EXPECT_TRUE(metadata.HasKey(key));
- double double_value = -999.99;
- EXPECT_TRUE(metadata.GetDouble(key, &double_value));
- EXPECT_EQ(3.14 * i, double_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- metadata.SetString(key, base::StringPrintf("\xfe%d\xff", i));
- EXPECT_TRUE(metadata.HasKey(key));
- std::string string_value;
- EXPECT_TRUE(metadata.GetString(key, &string_value));
- EXPECT_EQ(base::StringPrintf("\xfe%d\xff", i), string_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- base::TimeDelta reference_delta = base::TimeDelta::FromMilliseconds(42 + i);
- metadata.SetTimeDelta(key, reference_delta);
- EXPECT_TRUE(metadata.HasKey(key));
- base::TimeDelta delta_value;
- EXPECT_TRUE(metadata.GetTimeDelta(key, &delta_value));
- EXPECT_EQ(reference_delta, delta_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- base::TimeTicks reference_ticks =
- base::TimeTicks() + base::TimeDelta::FromMilliseconds(1234 + i);
- metadata.SetTimeTicks(key, reference_ticks);
- EXPECT_TRUE(metadata.HasKey(key));
- base::TimeTicks ticks_value;
- EXPECT_TRUE(metadata.GetTimeTicks(key, &ticks_value));
- EXPECT_EQ(reference_ticks, ticks_value);
- metadata.Clear();
-
- EXPECT_FALSE(metadata.HasKey(key));
- gfx::Rect reference_rect = gfx::Rect(3, 5, 240, 360);
- metadata.SetRect(key, reference_rect);
- EXPECT_TRUE(metadata.HasKey(key));
- gfx::Rect rect_value;
- EXPECT_TRUE(metadata.GetRect(key, &rect_value));
- EXPECT_EQ(reference_rect, rect_value);
- metadata.Clear();
- }
+TEST(VideoFrameMetadata, MergeMetadata) {
+ VideoFrameMetadata reference_metadata = GetFullVideoFrameMetadata();
+ VideoFrameMetadata full_metadata = reference_metadata;
+ VideoFrameMetadata empty_metadata;
- // The Get/SetRotation methods only accept ROTATION as a key.
- auto rot_key = VideoFrameMetadata::Key::ROTATION;
- EXPECT_FALSE(metadata.HasKey(rot_key));
- VideoRotation reference_rot = VideoRotation::VIDEO_ROTATION_270;
- metadata.SetRotation(rot_key, reference_rot);
- EXPECT_TRUE(metadata.HasKey(rot_key));
- VideoRotation rot_value;
- EXPECT_TRUE(metadata.GetRotation(rot_key, &rot_value));
- EXPECT_EQ(reference_rot, rot_value);
- metadata.Clear();
-}
+ // Merging empty metadata into full metadata should be a no-op.
+ full_metadata.MergeMetadataFrom(&empty_metadata);
+ VerifyVideoFrameMetadataEquality(full_metadata, reference_metadata);
-TEST(VideoFrameMetadata, PassMetadataViaIntermediary) {
- VideoFrameMetadata expected;
- for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
- const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
- expected.SetInteger(key, i);
- }
-
- VideoFrameMetadata result;
- result.MergeMetadataFrom(&expected);
-
- for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
- const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
- int value = -1;
- EXPECT_TRUE(result.GetInteger(key, &value));
- EXPECT_EQ(i, value);
- }
-
- result.Clear();
- result.MergeInternalValuesFrom(expected.GetInternalValues());
+ // Merging full metadata into empty metadata should fill it up.
+ empty_metadata.MergeMetadataFrom(&full_metadata);
+ VerifyVideoFrameMetadataEquality(empty_metadata, reference_metadata);
+}
- for (int i = 0; i < VideoFrameMetadata::NUM_KEYS; ++i) {
- const VideoFrameMetadata::Key key = static_cast<VideoFrameMetadata::Key>(i);
- int value = -1;
- EXPECT_TRUE(result.GetInteger(key, &value));
- EXPECT_EQ(i, value);
- }
+TEST(VideoFrameMetadata, PartialMergeMetadata) {
+ VideoFrameMetadata full_metadata = GetFullVideoFrameMetadata();
+
+ const gfx::Rect kTempRect{100, 200, 300, 400};
+ const base::TimeTicks kTempTicks =
+ base::TimeTicks::Now() + base::TimeDelta::FromSeconds(2);
+ const base::TimeDelta kTempDelta = base::TimeDelta::FromMilliseconds(31415);
+ const double kTempDouble = 123.45;
+
+ VideoFrameMetadata partial_metadata;
+ partial_metadata.capture_update_rect = kTempRect;
+ partial_metadata.reference_time = kTempTicks;
+ partial_metadata.processing_time = kTempDelta;
+ partial_metadata.resource_utilization = kTempDouble;
+ partial_metadata.allow_overlay = false;
+
+ // Merging partial metadata into full metadata partially override it.
+ full_metadata.MergeMetadataFrom(&partial_metadata);
+
+ EXPECT_EQ(partial_metadata.capture_update_rect, kTempRect);
+ EXPECT_EQ(partial_metadata.reference_time, kTempTicks);
+ EXPECT_EQ(partial_metadata.processing_time, kTempDelta);
+ EXPECT_EQ(partial_metadata.resource_utilization, kTempDouble);
+ EXPECT_EQ(partial_metadata.allow_overlay, false);
}
} // namespace media
diff --git a/chromium/media/base/video_renderer_sink.h b/chromium/media/base/video_renderer_sink.h
index 2598d0c3b0a..62442035a12 100644
--- a/chromium/media/base/video_renderer_sink.h
+++ b/chromium/media/base/video_renderer_sink.h
@@ -5,7 +5,6 @@
#ifndef MEDIA_BASE_VIDEO_RENDERER_SINK_H_
#define MEDIA_BASE_VIDEO_RENDERER_SINK_H_
-#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/time/time.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/base/win/BUILD.gn b/chromium/media/base/win/BUILD.gn
index b59888ffc6c..d30fdc1ef4c 100644
--- a/chromium/media/base/win/BUILD.gn
+++ b/chromium/media/base/win/BUILD.gn
@@ -57,6 +57,11 @@ source_set("hresult_status_helper") {
deps = [ "//media" ]
}
+source_set("mf_cdm_proxy") {
+ sources = [ "mf_cdm_proxy.h" ]
+ deps = [ "//base" ]
+}
+
source_set("d3d11_test_support") {
testonly = true
sources = [
diff --git a/chromium/media/renderers/win/mf_cdm_proxy.h b/chromium/media/base/win/mf_cdm_proxy.h
index a04dbab3ef7..536cfa4cf58 100644
--- a/chromium/media/renderers/win/mf_cdm_proxy.h
+++ b/chromium/media/base/win/mf_cdm_proxy.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
-#define MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
+#ifndef MEDIA_BASE_WIN_MF_CDM_PROXY_H_
+#define MEDIA_BASE_WIN_MF_CDM_PROXY_H_
#include <stdint.h>
#include <unknwn.h>
@@ -64,4 +64,4 @@ IMFCdmProxy : public IUnknown {
_In_ IUnknown * request, _In_ IMFAsyncResult * result) = 0;
};
-#endif // MEDIA_RENDERERS_WIN_MF_CDM_PROXY_H_
+#endif // MEDIA_BASE_WIN_MF_CDM_PROXY_H_
diff --git a/chromium/media/base/win/mf_helpers.cc b/chromium/media/base/win/mf_helpers.cc
index fce7ada08e4..29837bb407b 100644
--- a/chromium/media/base/win/mf_helpers.cc
+++ b/chromium/media/base/win/mf_helpers.cc
@@ -4,6 +4,8 @@
#include "media/base/win/mf_helpers.h"
+#include "base/check_op.h"
+
namespace media {
Microsoft::WRL::ComPtr<IMFSample> CreateEmptySampleWithBuffer(
diff --git a/chromium/media/base/win/mf_initializer.h b/chromium/media/base/win/mf_initializer.h
index 2f8431b65bc..714cf3c4501 100644
--- a/chromium/media/base/win/mf_initializer.h
+++ b/chromium/media/base/win/mf_initializer.h
@@ -9,7 +9,7 @@
#include <memory>
-#include "base/logging.h"
+#include "base/compiler_specific.h"
#include "media/base/win/mf_initializer_export.h"
namespace media {
diff --git a/chromium/media/blink/BUILD.gn b/chromium/media/blink/BUILD.gn
index a4e8c8f3a0c..fa0a3546689 100644
--- a/chromium/media/blink/BUILD.gn
+++ b/chromium/media/blink/BUILD.gn
@@ -99,6 +99,8 @@ component("blink") {
"webmediaplayer_impl.cc",
"webmediaplayer_impl.h",
]
+
+ deps += [ "//media/remoting:remoting_constants" ]
}
}
diff --git a/chromium/media/blink/cdm_session_adapter.cc b/chromium/media/blink/cdm_session_adapter.cc
index e1673156290..a2fa29db1f1 100644
--- a/chromium/media/blink/cdm_session_adapter.cc
+++ b/chromium/media/blink/cdm_session_adapter.cc
@@ -20,7 +20,6 @@
#include "media/base/key_systems.h"
#include "media/blink/webcontentdecryptionmodulesession_impl.h"
#include "media/cdm/cdm_context_ref_impl.h"
-#include "url/origin.h"
namespace media {
@@ -37,7 +36,6 @@ CdmSessionAdapter::~CdmSessionAdapter() = default;
void CdmSessionAdapter::CreateCdm(CdmFactory* cdm_factory,
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
WebCdmCreatedCB web_cdm_created_cb) {
TRACE_EVENT_ASYNC_BEGIN0("media", "CdmSessionAdapter::CreateCdm",
@@ -54,7 +52,7 @@ void CdmSessionAdapter::CreateCdm(CdmFactory* cdm_factory,
web_cdm_created_cb_ = std::move(web_cdm_created_cb);
cdm_factory->Create(
- key_system, security_origin, cdm_config,
+ key_system, cdm_config,
base::Bind(&CdmSessionAdapter::OnSessionMessage, weak_this),
base::Bind(&CdmSessionAdapter::OnSessionClosed, weak_this),
base::Bind(&CdmSessionAdapter::OnSessionKeysChange, weak_this),
@@ -76,8 +74,10 @@ void CdmSessionAdapter::GetStatusForPolicy(
}
std::unique_ptr<WebContentDecryptionModuleSessionImpl>
-CdmSessionAdapter::CreateSession() {
- return std::make_unique<WebContentDecryptionModuleSessionImpl>(this);
+CdmSessionAdapter::CreateSession(
+ blink::WebEncryptedMediaSessionType session_type) {
+ return std::make_unique<WebContentDecryptionModuleSessionImpl>(this,
+ session_type);
}
bool CdmSessionAdapter::RegisterSession(
diff --git a/chromium/media/blink/cdm_session_adapter.h b/chromium/media/blink/cdm_session_adapter.h
index d609af8319b..33255048f94 100644
--- a/chromium/media/blink/cdm_session_adapter.h
+++ b/chromium/media/blink/cdm_session_adapter.h
@@ -21,10 +21,6 @@
#include "media/blink/webcontentdecryptionmodule_impl.h"
#include "third_party/blink/public/platform/web_content_decryption_module_session.h"
-namespace url {
-class Origin;
-}
-
namespace media {
struct CdmConfig;
@@ -44,7 +40,6 @@ class CdmSessionAdapter : public base::RefCounted<CdmSessionAdapter> {
// via |result|.
void CreateCdm(CdmFactory* cdm_factory,
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
WebCdmCreatedCB web_cdm_created_cb);
@@ -60,7 +55,8 @@ class CdmSessionAdapter : public base::RefCounted<CdmSessionAdapter> {
// Creates a new session and adds it to the internal map. RemoveSession()
// must be called when destroying it, if RegisterSession() was called.
- std::unique_ptr<WebContentDecryptionModuleSessionImpl> CreateSession();
+ std::unique_ptr<WebContentDecryptionModuleSessionImpl> CreateSession(
+ blink::WebEncryptedMediaSessionType session_type);
// Adds a session to the internal map. Called once the session is successfully
// initialized. Returns true if the session was registered, false if there is
diff --git a/chromium/media/blink/interval_map.h b/chromium/media/blink/interval_map.h
index aa530034a00..1dd67a1ad1f 100644
--- a/chromium/media/blink/interval_map.h
+++ b/chromium/media/blink/interval_map.h
@@ -9,7 +9,7 @@
#include <limits>
#include <map>
-#include "base/logging.h"
+#include "base/check.h"
namespace media {
diff --git a/chromium/media/blink/multibuffer.cc b/chromium/media/blink/multibuffer.cc
index cb9f3eecd71..84bbc6b085d 100644
--- a/chromium/media/blink/multibuffer.cc
+++ b/chromium/media/blink/multibuffer.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/location.h"
+#include "base/logging.h"
namespace media {
diff --git a/chromium/media/blink/multibuffer_unittest.cc b/chromium/media/blink/multibuffer_unittest.cc
index cc52013c46a..8e3c095d34d 100644
--- a/chromium/media/blink/multibuffer_unittest.cc
+++ b/chromium/media/blink/multibuffer_unittest.cc
@@ -12,6 +12,7 @@
#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
+#include "base/logging.h"
#include "base/test/simple_test_tick_clock.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "media/base/test_random.h"
diff --git a/chromium/media/blink/url_index.cc b/chromium/media/blink/url_index.cc
index d68055f0638..9fc8b1f51e8 100644
--- a/chromium/media/blink/url_index.cc
+++ b/chromium/media/blink/url_index.cc
@@ -218,6 +218,7 @@ UrlIndex::UrlIndex(ResourceFetchContext* fetch_context, int block_shift)
lru_(new MultiBuffer::GlobalLRU(base::ThreadTaskRunnerHandle::Get())),
block_shift_(block_shift),
memory_pressure_listener_(
+ FROM_HERE,
base::Bind(&UrlIndex::OnMemoryPressure, base::Unretained(this))) {}
UrlIndex::~UrlIndex() {
diff --git a/chromium/media/blink/video_frame_compositor.h b/chromium/media/blink/video_frame_compositor.h
index fc8456053bd..e79ab4b53e1 100644
--- a/chromium/media/blink/video_frame_compositor.h
+++ b/chromium/media/blink/video_frame_compositor.h
@@ -131,8 +131,8 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
virtual void SetOnFramePresentedCallback(OnNewFramePresentedCB present_cb);
// Gets the metadata for the last frame that was presented to the compositor.
- // Used to populate the VideoFrameMetadata of video.requestAnimationFrame()
- // callbacks. See https://wicg.github.io/video-raf/.
+ // Used to populate the VideoFrameMetadata of video.requestVideoFrameCallback
+ // callbacks. See https://wicg.github.io/video-rvfc/.
// Can be called on any thread.
virtual std::unique_ptr<blink::WebMediaPlayer::VideoFramePresentationMetadata>
GetLastPresentedFrameMetadata();
@@ -255,8 +255,8 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
base::Lock current_frame_lock_;
scoped_refptr<VideoFrame> current_frame_;
- // Used to fulfill video.requestAnimationFrame() calls.
- // See https://wicg.github.io/video-raf/.
+ // Used to fulfill video.requestVideoFrameCallback() calls.
+ // See https://wicg.github.io/video-rvfc/.
base::TimeTicks last_presentation_time_ GUARDED_BY(current_frame_lock_);
base::TimeTicks last_expected_display_time_ GUARDED_BY(current_frame_lock_);
uint32_t presentation_counter_ GUARDED_BY(current_frame_lock_) = 0u;
diff --git a/chromium/media/blink/webcontentdecryptionmodule_impl.cc b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
index 9e4ade56fd7..b267d9d7d0c 100644
--- a/chromium/media/blink/webcontentdecryptionmodule_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
@@ -107,7 +107,7 @@ void WebContentDecryptionModuleImpl::Create(
// |web_cdm_created_cb|), it will keep a reference to |adapter|. Otherwise,
// |adapter| will be destructed.
scoped_refptr<CdmSessionAdapter> adapter(new CdmSessionAdapter());
- adapter->CreateCdm(cdm_factory, key_system_ascii, security_origin, cdm_config,
+ adapter->CreateCdm(cdm_factory, key_system_ascii, cdm_config,
std::move(web_cdm_created_cb));
}
@@ -119,8 +119,9 @@ WebContentDecryptionModuleImpl::WebContentDecryptionModuleImpl(
WebContentDecryptionModuleImpl::~WebContentDecryptionModuleImpl() = default;
std::unique_ptr<blink::WebContentDecryptionModuleSession>
-WebContentDecryptionModuleImpl::CreateSession() {
- return adapter_->CreateSession();
+WebContentDecryptionModuleImpl::CreateSession(
+ blink::WebEncryptedMediaSessionType session_type) {
+ return adapter_->CreateSession(session_type);
}
void WebContentDecryptionModuleImpl::SetServerCertificate(
diff --git a/chromium/media/blink/webcontentdecryptionmodule_impl.h b/chromium/media/blink/webcontentdecryptionmodule_impl.h
index 36c2a1dbd1f..fddf586b88e 100644
--- a/chromium/media/blink/webcontentdecryptionmodule_impl.h
+++ b/chromium/media/blink/webcontentdecryptionmodule_impl.h
@@ -46,8 +46,8 @@ class MEDIA_BLINK_EXPORT WebContentDecryptionModuleImpl
~WebContentDecryptionModuleImpl() override;
// blink::WebContentDecryptionModule implementation.
- std::unique_ptr<blink::WebContentDecryptionModuleSession> CreateSession()
- override;
+ std::unique_ptr<blink::WebContentDecryptionModuleSession> CreateSession(
+ blink::WebEncryptedMediaSessionType session_type) override;
void SetServerCertificate(
const uint8_t* server_certificate,
size_t server_certificate_length,
diff --git a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
index 464b8fa30a6..e7b274b9827 100644
--- a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
@@ -226,11 +226,12 @@ class IgnoreResponsePromise : public SimpleCdmPromise {
} // namespace
WebContentDecryptionModuleSessionImpl::WebContentDecryptionModuleSessionImpl(
- const scoped_refptr<CdmSessionAdapter>& adapter)
+ const scoped_refptr<CdmSessionAdapter>& adapter,
+ blink::WebEncryptedMediaSessionType session_type)
: adapter_(adapter),
+ session_type_(convertSessionType(session_type)),
has_close_been_called_(false),
- is_closed_(false),
- is_persistent_session_(false) {}
+ is_closed_(false) {}
WebContentDecryptionModuleSessionImpl::
~WebContentDecryptionModuleSessionImpl() {
@@ -269,7 +270,6 @@ void WebContentDecryptionModuleSessionImpl::InitializeNewSession(
EmeInitDataType eme_init_data_type,
const unsigned char* init_data,
size_t init_data_length,
- blink::WebEncryptedMediaSessionType session_type,
blink::WebContentDecryptionModuleResult result) {
DCHECK(init_data);
DCHECK(session_id_.empty());
@@ -334,10 +334,8 @@ void WebContentDecryptionModuleSessionImpl::InitializeNewSession(
// 10.8 Let cdm be the CDM instance represented by this object's cdm
// instance value.
// 10.9 Use the cdm to execute the following steps:
- CdmSessionType cdm_session_type = convertSessionType(session_type);
- is_persistent_session_ = cdm_session_type != CdmSessionType::kTemporary;
adapter_->InitializeNewSession(
- eme_init_data_type, sanitized_init_data, cdm_session_type,
+ eme_init_data_type, sanitized_init_data, session_type_,
std::unique_ptr<NewSessionCdmPromise>(new NewSessionCdmResultPromise(
result, adapter_->GetKeySystemUMAPrefix(), kGenerateRequestUMAName,
base::BindOnce(
@@ -352,6 +350,8 @@ void WebContentDecryptionModuleSessionImpl::Load(
DCHECK(!session_id.IsEmpty());
DCHECK(session_id_.empty());
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(session_type_ == CdmSessionType::kPersistentLicense ||
+ session_type_ == CdmSessionType::kPersistentUsageRecord);
// From https://w3c.github.io/encrypted-media/#load.
// 8.1 Let sanitized session ID be a validated and/or sanitized version of
@@ -368,12 +368,8 @@ void WebContentDecryptionModuleSessionImpl::Load(
return;
}
- // TODO(jrummell): Now that there are 2 types of persistent sessions, the
- // session type should be passed from blink. Type should also be passed in the
- // constructor (and removed from initializeNewSession()).
- is_persistent_session_ = true;
adapter_->LoadSession(
- CdmSessionType::kPersistentLicense, sanitized_session_id,
+ session_type_, sanitized_session_id,
std::unique_ptr<NewSessionCdmPromise>(new NewSessionCdmResultPromise(
result, adapter_->GetKeySystemUMAPrefix(), kLoadSessionUMAName,
base::BindOnce(
diff --git a/chromium/media/blink/webcontentdecryptionmodulesession_impl.h b/chromium/media/blink/webcontentdecryptionmodulesession_impl.h
index 652c3d69754..b6c4572e96f 100644
--- a/chromium/media/blink/webcontentdecryptionmodulesession_impl.h
+++ b/chromium/media/blink/webcontentdecryptionmodulesession_impl.h
@@ -29,7 +29,8 @@ class WebContentDecryptionModuleSessionImpl
: public blink::WebContentDecryptionModuleSession {
public:
WebContentDecryptionModuleSessionImpl(
- const scoped_refptr<CdmSessionAdapter>& adapter);
+ const scoped_refptr<CdmSessionAdapter>& adapter,
+ blink::WebEncryptedMediaSessionType session_type);
~WebContentDecryptionModuleSessionImpl() override;
// blink::WebContentDecryptionModuleSession implementation.
@@ -40,7 +41,6 @@ class WebContentDecryptionModuleSessionImpl
EmeInitDataType init_data_type,
const unsigned char* initData,
size_t initDataLength,
- blink::WebEncryptedMediaSessionType session_type,
blink::WebContentDecryptionModuleResult result) override;
void Load(const blink::WebString& session_id,
blink::WebContentDecryptionModuleResult result) override;
@@ -66,6 +66,10 @@ class WebContentDecryptionModuleSessionImpl
scoped_refptr<CdmSessionAdapter> adapter_;
+ // Keep track of the session type to be passed into InitializeNewSession() and
+ // LoadSession().
+ const CdmSessionType session_type_;
+
// Non-owned pointer.
Client* client_;
@@ -84,9 +88,6 @@ class WebContentDecryptionModuleSessionImpl
bool has_close_been_called_;
bool is_closed_;
- // Keep track of whether this is a persistent session or not.
- bool is_persistent_session_;
-
base::ThreadChecker thread_checker_;
// Since promises will live until they are fired, use a weak reference when
// creating a promise in case this class disappears before the promise
diff --git a/chromium/media/blink/webinbandtexttrack_impl.cc b/chromium/media/blink/webinbandtexttrack_impl.cc
index df182a7b906..18fa8de1bef 100644
--- a/chromium/media/blink/webinbandtexttrack_impl.cc
+++ b/chromium/media/blink/webinbandtexttrack_impl.cc
@@ -8,17 +8,15 @@
namespace media {
-WebInbandTextTrackImpl::WebInbandTextTrackImpl(
- Kind kind,
- const blink::WebString& label,
- const blink::WebString& language,
- const blink::WebString& id)
- : client_(NULL),
+WebInbandTextTrackImpl::WebInbandTextTrackImpl(Kind kind,
+ const blink::WebString& label,
+ const blink::WebString& language,
+ const blink::WebString& id)
+ : client_(nullptr),
kind_(kind),
label_(label),
language_(language),
- id_(id) {
-}
+ id_(id) {}
WebInbandTextTrackImpl::~WebInbandTextTrackImpl() {
DCHECK(!client_);
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index 254902da241..8c769e69efa 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -56,6 +56,7 @@
#include "media/filters/memory_data_source.h"
#include "media/learning/mojo/public/cpp/mojo_learning_task_controller.h"
#include "media/media_buildflags.h"
+#include "media/remoting/remoting_constants.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "net/base/data_url.h"
#include "third_party/blink/public/platform/web_encrypted_media_types.h"
@@ -280,6 +281,30 @@ void CreateAllocation(base::trace_event::ProcessMemoryDump* pmd,
pmd->AddSuballocation(dump->guid(), std_allocator);
}
+// Determine whether we should update MediaPosition in |delegate_|.
+bool MediaPositionNeedsUpdate(
+ const media_session::MediaPosition& old_position,
+ const media_session::MediaPosition& new_position) {
+ if (old_position.playback_rate() != new_position.playback_rate())
+ return true;
+
+ if (old_position.duration() != new_position.duration())
+ return true;
+
+ // MediaPosition is potentially changed upon each OnTimeUpdate() call. In
+ // practice most of these calls happen periodically during normal playback,
+ // with unchanged rate and duration. If we want to avoid updating
+ // MediaPosition unnecessarily, we need to compare the current time
+ // calculated from the old and new MediaPositions with some tolerance. That's
+ // because we don't know the exact time when GetMediaTime() calculated the
+ // media position. We choose an arbitrary tolerance that is high enough to
+ // eliminate a lot of MediaPosition updates and low enough not to make a
+ // perceptible difference.
+ const auto drift =
+ (old_position.GetPosition() - new_position.GetPosition()).magnitude();
+ return drift > base::TimeDelta::FromMilliseconds(100);
+}
+
} // namespace
class BufferedDataSourceHostImpl;
@@ -331,7 +356,6 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
params->IsBackgroundVideoPlaybackEnabled()),
is_background_video_track_optimization_supported_(
params->IsBackgroundVideoTrackOptimizationSupported()),
- is_remoting_renderer_enabled_(params->IsRemotingRendererEnabled()),
simple_watch_timer_(
base::BindRepeating(&WebMediaPlayerImpl::OnSimpleWatchTimerTick,
base::Unretained(this)),
@@ -737,6 +761,12 @@ void WebMediaPlayerImpl::DoLoad(LoadType load_type,
// used, the pipeline can start immediately.
StartPipeline();
} else {
+ // If |loaded_url_| is remoting media, starting the pipeline.
+ if (loaded_url_.SchemeIs(remoting::kRemotingScheme)) {
+ StartPipeline();
+ return;
+ }
+
// Short circuit the more complex loading path for data:// URLs. Sending
// them through the network based loading path just wastes memory and causes
// worse performance since reads become asynchronous.
@@ -819,9 +849,6 @@ void WebMediaPlayerImpl::Play() {
MaybeUpdateBufferSizesForPlayback();
UpdatePlayState();
- // Paused changed so we should update media position state.
- UpdateMediaPositionState();
-
// Notify the learning task, if needed.
will_play_helper_.CompleteObservationIfNeeded(learning::TargetValue(true));
}
@@ -865,9 +892,6 @@ void WebMediaPlayerImpl::Pause() {
simple_watch_timer_.Stop();
media_log_->AddEvent<MediaLogEvent::kPause>();
- // Paused changed so we should update media position state.
- UpdateMediaPositionState();
-
UpdatePlayState();
}
@@ -932,9 +956,6 @@ void WebMediaPlayerImpl::DoSeek(base::TimeDelta time, bool time_updated) {
// This needs to be called after Seek() so that if a resume is triggered, it
// is to the correct time.
UpdatePlayState();
-
- // The seek time has changed so we should update the media position state.
- UpdateMediaPositionState();
}
void WebMediaPlayerImpl::SetRate(double rate) {
@@ -953,10 +974,6 @@ void WebMediaPlayerImpl::SetRate(double rate) {
pipeline_controller_->SetPlaybackRate(rate);
MaybeUpdateBufferSizesForPlayback();
-
- // The playback rate has changed so we should rebuild the media position
- // state.
- UpdateMediaPositionState();
}
void WebMediaPlayerImpl::SetVolume(double volume) {
@@ -984,6 +1001,11 @@ void WebMediaPlayerImpl::SetLatencyHint(double seconds) {
pipeline_controller_->SetLatencyHint(latency_hint);
}
+void WebMediaPlayerImpl::SetPreservesPitch(bool preserves_pitch) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ pipeline_controller_->SetPreservesPitch(preserves_pitch);
+}
+
void WebMediaPlayerImpl::OnRequestPictureInPicture() {
if (!surface_layer_for_video_enabled_)
ActivateSurfaceLayerForVideo();
@@ -1430,10 +1452,9 @@ void WebMediaPlayerImpl::ComputeFrameUploadMetadata(
out_metadata->frame_id = frame->unique_id();
out_metadata->visible_rect = frame->visible_rect();
out_metadata->timestamp = frame->timestamp();
- base::TimeDelta frame_duration;
- if (frame->metadata()->GetTimeDelta(media::VideoFrameMetadata::FRAME_DURATION,
- &frame_duration)) {
- out_metadata->expected_timestamp = frame->timestamp() + frame_duration;
+ if (frame->metadata()->frame_duration.has_value()) {
+ out_metadata->expected_timestamp =
+ frame->timestamp() + *frame->metadata()->frame_duration;
};
bool skip_possible = already_uploaded_id != -1;
bool same_frame_id = frame->unique_id() == already_uploaded_id;
@@ -1648,9 +1669,6 @@ void WebMediaPlayerImpl::OnPipelineSeeked(bool time_updated) {
}
attempting_suspended_start_ = false;
-
- // The current time has changed so we should update the media position state.
- UpdateMediaPositionState();
}
void WebMediaPlayerImpl::OnPipelineSuspended() {
@@ -2187,6 +2205,7 @@ void WebMediaPlayerImpl::OnBufferingStateChangeInternal(
!seeking_) {
underflow_timer_ = std::make_unique<base::ElapsedTimer>();
watch_time_reporter_->OnUnderflow();
+ delegate_->DidBufferUnderflow(delegate_id_);
if (playback_events_recorder_)
playback_events_recorder_->OnBuffering();
@@ -2227,9 +2246,6 @@ void WebMediaPlayerImpl::OnDurationChange() {
client_->DurationChanged();
if (watch_time_reporter_)
watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
-
- // The duration has changed so we should update the media position state.
- UpdateMediaPositionState();
}
void WebMediaPlayerImpl::OnAddTextTrack(const TextTrackConfig& config,
@@ -2820,8 +2836,9 @@ void WebMediaPlayerImpl::StartPipeline() {
if (base::FeatureList::IsEnabled(kMemoryPressureBasedSourceBufferGC)) {
// base::Unretained is safe because |this| owns memory_pressure_listener_.
memory_pressure_listener_ =
- std::make_unique<base::MemoryPressureListener>(base::Bind(
- &WebMediaPlayerImpl::OnMemoryPressure, base::Unretained(this)));
+ std::make_unique<base::MemoryPressureListener>(
+ FROM_HERE, base::Bind(&WebMediaPlayerImpl::OnMemoryPressure,
+ base::Unretained(this)));
}
}
@@ -2870,10 +2887,6 @@ void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) {
// Always notify to ensure client has the latest value.
client_->ReadyStateChanged();
-
- // The ready state affects the effective playback rate so we should update
- // the media position state.
- UpdateMediaPositionState();
}
scoped_refptr<blink::WebAudioSourceProviderImpl>
@@ -2933,9 +2946,7 @@ void WebMediaPlayerImpl::UpdatePlayState() {
UpdateSmoothnessHelper();
}
-void WebMediaPlayerImpl::UpdateMediaPositionState() {
- DCHECK(delegate_);
-
+void WebMediaPlayerImpl::OnTimeUpdate() {
// When seeking the current time can go beyond the duration so we should
// cap the current time at the duration.
base::TimeDelta duration = GetPipelineMediaDuration();
@@ -2950,7 +2961,7 @@ void WebMediaPlayerImpl::UpdateMediaPositionState() {
media_session::MediaPosition new_position(effective_playback_rate, duration,
current_time);
- if (media_position_state_ == new_position)
+ if (!MediaPositionNeedsUpdate(media_position_state_, new_position))
return;
DVLOG(2) << __func__ << "(" << new_position.ToString() << ")";
@@ -3461,6 +3472,11 @@ bool WebMediaPlayerImpl::ShouldPausePlaybackWhenHidden() const {
if (!HasVideo())
return false;
+ if (using_media_player_renderer_ &&
+ pipeline_metadata_.natural_size.IsEmpty()) {
+ return false;
+ }
+
if (!is_background_video_playback_enabled_)
return true;
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index b62f8cf93b9..f4e48c46f62 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -130,7 +130,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void SetRate(double rate) override;
void SetVolume(double volume) override;
void SetLatencyHint(double seconds) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
void OnRequestPictureInPicture() override;
+ void OnTimeUpdate() override;
void SetSinkId(
const blink::WebString& sink_id,
blink::WebSetSinkIdCompleteCallback completion_callback) override;
@@ -448,21 +450,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// - enter_pip_callback_,
void UpdatePlayState();
- // Calculates the current position state for the media element and notifies
- // |delegate_| if it has changed.
- //
- // Spec: https://wicg.github.io/mediasession/#position-state
- //
- // This method should be called any time its dependent values change. These
- // are:
- // - pipeline_controller_->GetMediaDuration()
- // - pipeline_media_duration_for_test_
- // - pipeline_controller_->GetMediaTime()
- // - playback_rate_
- // - Seeking() / seek_time_
- // - paused_, paused_time_
- void UpdateMediaPositionState();
-
// Methods internal to UpdatePlayState().
PlayState UpdatePlayState_ComputePlayState(bool is_flinging,
bool can_auto_suspend,
@@ -1036,15 +1023,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Whether background video optimization is supported on current platform.
bool is_background_video_track_optimization_supported_ = true;
- // Whether the media in this frame is a remoting media.
- //
- // Remoting media is a special media that has the media streams are delivered
- // to the browser directly from somewhere without any URL request
- // (http, file, ...)
- // When setting to true, a remoting renderer will be created as the remoting
- // target in the client.
- bool is_remoting_renderer_enabled_ = false;
-
base::CancelableOnceClosure have_enough_after_lazy_load_cb_;
// State for simplified watch time reporting.
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index 77dc3344993..d054fef8b92 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -241,6 +241,10 @@ class MockWebMediaPlayerDelegate : public blink::WebMediaPlayerDelegate {
DCHECK_EQ(player_id_, player_id);
}
+ void DidBufferUnderflow(int player_id) override {
+ DCHECK_EQ(player_id_, player_id);
+ }
+
bool IsFrameHidden() override { return is_hidden_; }
bool IsFrameClosed() override { return is_closed_; }
@@ -436,7 +440,7 @@ class WebMediaPlayerImplTest
viz::TestContextProvider::Create(),
blink::WebMediaPlayer::SurfaceLayerMode::kAlways,
is_background_suspend_enabled_, is_background_video_playback_enabled_,
- true, false, std::move(demuxer_override), nullptr);
+ true, std::move(demuxer_override), nullptr);
auto compositor = std::make_unique<NiceMock<MockVideoFrameCompositor>>(
params->video_frame_compositor_task_runner());
@@ -1545,167 +1549,82 @@ TEST_F(WebMediaPlayerImplTest, AutoplayMuted_SetVolume) {
wmpi_->SetVolume(1.0);
}
-TEST_F(WebMediaPlayerImplTest, MediaPositionState_OnDurationChange) {
+TEST_F(WebMediaPlayerImplTest, MediaPositionState_Playing) {
InitializeWebMediaPlayerImpl();
+ LoadAndWaitForReadyState(kAudioOnlyTestFile,
+ blink::WebMediaPlayer::kReadyStateHaveFutureData);
+ wmpi_->SetRate(1.0);
+ Play();
- testing::Sequence s;
EXPECT_CALL(delegate_,
DidPlayerMediaPositionStateChange(
delegate_.player_id(),
- media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())))
- .InSequence(s);
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(
- 0.0, kInfiniteDuration, base::TimeDelta())))
- .InSequence(s);
-
- LoadAndWaitForReadyState(kAudioOnlyTestFile,
- blink::WebMediaPlayer::kReadyStateHaveCurrentData);
-
- SetDuration(kInfiniteDuration);
+ media_session::MediaPosition(1.0, kAudioOnlyTestFileDuration,
+ base::TimeDelta())));
+ wmpi_->OnTimeUpdate();
}
-TEST_F(WebMediaPlayerImplTest, MediaPositionState_PlayPauseSetRate) {
+TEST_F(WebMediaPlayerImplTest, MediaPositionState_Paused) {
InitializeWebMediaPlayerImpl();
+ LoadAndWaitForReadyState(kAudioOnlyTestFile,
+ blink::WebMediaPlayer::kReadyStateHaveFutureData);
+ wmpi_->SetRate(1.0);
- testing::Sequence s;
+ // The effective playback rate is 0.0 while paused.
EXPECT_CALL(delegate_,
DidPlayerMediaPositionStateChange(
delegate_.player_id(),
media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())))
- .InSequence(s);
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(s)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(1.0, position.playback_rate());
- EXPECT_EQ(kAudioOnlyTestFileDuration, position.duration());
- EXPECT_EQ(base::TimeDelta(),
- position.GetPositionAtTime(position.last_updated_time()));
- });
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())))
- .InSequence(s);
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(s)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(2.0, position.playback_rate());
- EXPECT_EQ(kAudioOnlyTestFileDuration, position.duration());
- EXPECT_EQ(base::TimeDelta(),
- position.GetPositionAtTime(position.last_updated_time()));
- });
-
- wmpi_->SetRate(1.0);
- LoadAndWaitForReadyState(kAudioOnlyTestFile,
- blink::WebMediaPlayer::kReadyStateHaveCurrentData);
-
- // Play will set the playback rate to 1.0.
- Play();
-
- // Pause will set the playback rate to 0.0.
- Pause();
-
- // SetRate will set the playback rate, but it will not affect the position
- // state until we have started playing again.
- wmpi_->SetRate(2.0);
- Play();
+ base::TimeDelta())));
+ wmpi_->OnTimeUpdate();
}
-TEST_F(WebMediaPlayerImplTest, MediaPositionState_Underflow) {
+TEST_F(WebMediaPlayerImplTest, MediaPositionState_PositionChange) {
InitializeWebMediaPlayerImpl();
-
- testing::Sequence sequence;
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(sequence)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(0.0, position.playback_rate());
- });
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(sequence)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(1.0, position.playback_rate());
- });
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(sequence)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(0.0, position.playback_rate());
- });
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(sequence)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(1.0, position.playback_rate());
- });
-
- wmpi_->SetRate(1.0);
LoadAndWaitForReadyState(kAudioOnlyTestFile,
- blink::WebMediaPlayer::kReadyStateHaveCurrentData);
- // Play will set the playback rate to 1.0.
+ blink::WebMediaPlayer::kReadyStateHaveFutureData);
+ wmpi_->SetRate(0.5);
Play();
- // Underflow will set the playback rate to 0.0.
- SetReadyState(blink::WebMediaPlayer::kReadyStateHaveCurrentData);
+ testing::Sequence sequence;
+ EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
+ delegate_.player_id(),
+ media_session::MediaPosition(
+ 0.0, kAudioOnlyTestFileDuration,
+ base::TimeDelta::FromSecondsD(0.1))))
+ .InSequence(sequence);
+ wmpi_->Seek(0.1);
+ wmpi_->OnTimeUpdate();
- // Leaving the underflow state will restore the playback rate of 1.0.
+ // If we load enough data to resume playback the position should be updated.
+ EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
+ delegate_.player_id(),
+ media_session::MediaPosition(
+ 0.5, kAudioOnlyTestFileDuration,
+ base::TimeDelta::FromSecondsD(0.1))))
+ .InSequence(sequence);
SetReadyState(blink::WebMediaPlayer::kReadyStateHaveFutureData);
+ wmpi_->OnTimeUpdate();
+
+ // No media time progress -> no MediaPositionState change.
+ wmpi_->OnTimeUpdate();
}
-TEST_F(WebMediaPlayerImplTest, MediaPositionState_Seeking) {
+TEST_F(WebMediaPlayerImplTest, MediaPositionState_Underflow) {
InitializeWebMediaPlayerImpl();
+ LoadAndWaitForReadyState(kAudioOnlyTestFile,
+ blink::WebMediaPlayer::kReadyStateHaveFutureData);
+ wmpi_->SetRate(1.0);
+ Play();
- testing::Sequence s;
+ // Underflow will set the effective playback rate to 0.0.
EXPECT_CALL(delegate_,
DidPlayerMediaPositionStateChange(
delegate_.player_id(),
media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())))
- .InSequence(s);
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(s)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(1.0, position.playback_rate());
- EXPECT_EQ(kAudioOnlyTestFileDuration, position.duration());
- EXPECT_EQ(base::TimeDelta(),
- position.GetPositionAtTime(position.last_updated_time()));
- });
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(delegate_.player_id(), _))
- .InSequence(s)
- .WillOnce([](auto id, auto position) {
- EXPECT_EQ(0.0, position.playback_rate());
- EXPECT_EQ(kAudioOnlyTestFileDuration, position.duration());
- EXPECT_EQ(base::TimeDelta(),
- position.GetPositionAtTime(position.last_updated_time()));
- });
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(
- 0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta::FromMilliseconds(100))))
- .InSequence(s);
-
- wmpi_->SetRate(1.0);
- LoadAndWaitForReadyState(kAudioOnlyTestFile,
- blink::WebMediaPlayer::kReadyStateHaveCurrentData);
- Play();
-
- // Seek forward 100ms will result in the position to be updated.
- wmpi_->Seek(0.1);
-
- // If we trigger another update to the position state the new position should
- // be used.
- Pause();
+ base::TimeDelta())));
+ SetReadyState(blink::WebMediaPlayer::kReadyStateHaveCurrentData);
+ wmpi_->OnTimeUpdate();
}
TEST_F(WebMediaPlayerImplTest, NoStreams) {
@@ -2194,7 +2113,8 @@ TEST_F(WebMediaPlayerImplTest, MemDumpReporting) {
}
// Verify that a demuxer override is used when specified.
-TEST_F(WebMediaPlayerImplTest, DemuxerOverride) {
+// TODO(https://crbug.com/1084476): This test is flaky.
+TEST_F(WebMediaPlayerImplTest, DISABLED_DemuxerOverride) {
std::unique_ptr<MockDemuxer> demuxer =
std::make_unique<NiceMock<MockDemuxer>>();
StrictMock<MockDemuxerStream> stream(DemuxerStream::AUDIO);
diff --git a/chromium/media/blink/webmediaplayer_params.cc b/chromium/media/blink/webmediaplayer_params.cc
index a9dede964a4..e1a715c18f3 100644
--- a/chromium/media/blink/webmediaplayer_params.cc
+++ b/chromium/media/blink/webmediaplayer_params.cc
@@ -33,7 +33,6 @@ WebMediaPlayerParams::WebMediaPlayerParams(
bool is_background_suspend_enabled,
bool is_background_video_playback_enabled,
bool is_background_video_track_optimization_supported,
- bool is_remoting_renderer_enabled,
std::unique_ptr<Demuxer> demuxer_override,
std::unique_ptr<PowerStatusHelper> power_status_helper)
: defer_load_cb_(defer_load_cb),
@@ -58,7 +57,6 @@ WebMediaPlayerParams::WebMediaPlayerParams(
is_background_video_playback_enabled),
is_background_video_track_optimization_supported_(
is_background_video_track_optimization_supported),
- is_remoting_renderer_enabled_(is_remoting_renderer_enabled),
demuxer_override_(std::move(demuxer_override)),
power_status_helper_(std::move(power_status_helper)) {}
diff --git a/chromium/media/blink/webmediaplayer_params.h b/chromium/media/blink/webmediaplayer_params.h
index 4cc8756d4e2..70db6159b12 100644
--- a/chromium/media/blink/webmediaplayer_params.h
+++ b/chromium/media/blink/webmediaplayer_params.h
@@ -85,7 +85,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
bool is_background_suspend_enabled,
bool is_background_video_play_enabled,
bool is_background_video_track_optimization_supported,
- bool is_remoting_renderer_enabled,
std::unique_ptr<Demuxer> demuxer_override,
std::unique_ptr<PowerStatusHelper> power_status_helper);
@@ -170,10 +169,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
return is_background_video_track_optimization_supported_;
}
- bool IsRemotingRendererEnabled() const {
- return is_remoting_renderer_enabled_;
- }
-
std::unique_ptr<Demuxer> TakeDemuxerOverride();
std::unique_ptr<PowerStatusHelper> TakePowerStatusHelper() {
@@ -208,8 +203,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerParams {
bool is_background_video_playback_enabled_ = true;
// Whether background video optimization is supported on current platform.
bool is_background_video_track_optimization_supported_ = true;
- // Whether the media in this frame is a remoting media.
- bool is_remoting_renderer_enabled_ = false;
// Optional custom demuxer to use instead of the standard demuxers.
std::unique_ptr<Demuxer> demuxer_override_;
diff --git a/chromium/media/blink/websourcebuffer_impl.cc b/chromium/media/blink/websourcebuffer_impl.cc
index 20d841bb4fb..257df4b49ef 100644
--- a/chromium/media/blink/websourcebuffer_impl.cc
+++ b/chromium/media/blink/websourcebuffer_impl.cc
@@ -66,7 +66,7 @@ WebSourceBufferImpl::WebSourceBufferImpl(const std::string& id,
ChunkDemuxer* demuxer)
: id_(id),
demuxer_(demuxer),
- client_(NULL),
+ client_(nullptr),
append_window_end_(kInfiniteDuration) {
DCHECK(demuxer_);
demuxer_->SetTracksWatcher(
@@ -136,7 +136,7 @@ bool WebSourceBufferImpl::Append(const unsigned char* data,
append_window_end_, &timestamp_offset_);
// Coded frame processing may update the timestamp offset. If the caller
- // provides a non-NULL |timestamp_offset| and frame processing changes the
+ // provides a non-nullptr |timestamp_offset| and frame processing changes the
// timestamp offset, report the new offset to the caller. Do not update the
// caller's offset otherwise, to preserve any pre-existing value that may have
// more than microsecond precision.
@@ -200,8 +200,8 @@ void WebSourceBufferImpl::SetAppendWindowEnd(double end) {
void WebSourceBufferImpl::RemovedFromMediaSource() {
demuxer_->RemoveId(id_);
- demuxer_ = NULL;
- client_ = NULL;
+ demuxer_ = nullptr;
+ client_ = nullptr;
}
blink::WebMediaPlayer::TrackType mediaTrackTypeToBlink(MediaTrack::Type type) {
diff --git a/chromium/media/capabilities/video_decode_stats_db.h b/chromium/media/capabilities/video_decode_stats_db.h
index 8ab35fbb851..fc42e7842af 100644
--- a/chromium/media/capabilities/video_decode_stats_db.h
+++ b/chromium/media/capabilities/video_decode_stats_db.h
@@ -9,7 +9,7 @@
#include <string>
#include "base/callback_forward.h"
-#include "base/logging.h"
+#include "base/check.h"
#include "base/macros.h"
#include "media/base/media_export.h"
#include "media/base/video_codecs.h"
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index 06e45ca2886..f3b9f26f051 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -269,6 +269,7 @@ jumbo_component("capture_lib") {
"video/chromeos/camera_hal_dispatcher_impl.h",
"video/chromeos/camera_metadata_utils.cc",
"video/chromeos/camera_metadata_utils.h",
+ "video/chromeos/capture_metadata_dispatcher.h",
"video/chromeos/display_rotation_observer.cc",
"video/chromeos/display_rotation_observer.h",
"video/chromeos/gpu_memory_buffer_tracker.cc",
diff --git a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
index 27a1515b9e7..498f65f10d8 100644
--- a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
@@ -221,16 +221,11 @@ void ThreadSafeCaptureOracle::DidCaptureFrame(
if (!should_deliver_frame || !client_)
return;
- frame->metadata()->SetDouble(VideoFrameMetadata::FRAME_RATE,
- params_.requested_format.frame_rate);
- frame->metadata()->SetTimeTicks(VideoFrameMetadata::CAPTURE_BEGIN_TIME,
- capture->begin_time);
- frame->metadata()->SetTimeTicks(VideoFrameMetadata::CAPTURE_END_TIME,
- base::TimeTicks::Now());
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- capture->frame_duration);
- frame->metadata()->SetTimeTicks(VideoFrameMetadata::REFERENCE_TIME,
- reference_time);
+ frame->metadata()->frame_rate = params_.requested_format.frame_rate;
+ frame->metadata()->capture_begin_time = capture->begin_time;
+ frame->metadata()->capture_end_time = base::TimeTicks::Now();
+ frame->metadata()->frame_duration = capture->frame_duration;
+ frame->metadata()->reference_time = reference_time;
media::VideoCaptureFormat format(frame->coded_size(),
params_.requested_format.frame_rate,
diff --git a/chromium/media/capture/content/video_capture_oracle.cc b/chromium/media/capture/content/video_capture_oracle.cc
index 2a1e3c12801..e92d28ec940 100644
--- a/chromium/media/capture/content/video_capture_oracle.cc
+++ b/chromium/media/capture/content/video_capture_oracle.cc
@@ -9,6 +9,7 @@
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/format_macros.h"
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "base/strings/stringprintf.h"
diff --git a/chromium/media/capture/mojom/BUILD.gn b/chromium/media/capture/mojom/BUILD.gn
index a2aaecee8a9..2eddaec9ea8 100644
--- a/chromium/media/capture/mojom/BUILD.gn
+++ b/chromium/media/capture/mojom/BUILD.gn
@@ -13,6 +13,7 @@ mojom("video_capture") {
public_deps = [
"//gpu/ipc/common:interfaces",
+ "//media//mojo/mojom:mojom",
"//mojo/public/mojom/base",
"//ui/gfx/geometry/mojom",
"//ui/gfx/mojom",
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index eaeff60009f..ce0943d046c 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -5,9 +5,9 @@
module media.mojom;
import "gpu/ipc/common/mailbox_holder.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/shared_memory.mojom";
import "mojo/public/mojom/base/time.mojom";
-import "mojo/public/mojom/base/values.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
import "ui/gfx/mojom/buffer_types.mojom";
import "ui/gfx/mojom/color_space.mojom";
@@ -279,7 +279,7 @@ struct PlaneStrides {
struct VideoFrameInfo{
mojo_base.mojom.TimeDelta timestamp;
- mojo_base.mojom.DictionaryValue metadata;
+ VideoFrameMetadata metadata;
VideoCapturePixelFormat pixel_format;
gfx.mojom.Size coded_size;
gfx.mojom.Rect visible_rect;
@@ -293,6 +293,10 @@ struct VideoFrameInfo{
PlaneStrides? strides;
};
+// Represents information about a capture device.
+// |device_id| represents a unique id of a physical device. Since the same
+// physical device may be accessible through different APIs |capture_api|
+// disambiguates the API.
struct VideoCaptureDeviceDescriptor {
string display_name;
string device_id;
@@ -300,8 +304,12 @@ struct VideoCaptureDeviceDescriptor {
VideoFacingMode facing_mode;
VideoCaptureApi capture_api;
VideoCaptureTransportType transport_type;
+ bool has_pan_tilt_zoom_supported;
+ bool pan_tilt_zoom_supported;
};
+// Bundles a VideoCaptureDeviceDescriptor with corresponding supported
+// video formats.
struct VideoCaptureDeviceInfo {
VideoCaptureDeviceDescriptor descriptor;
array<VideoCaptureFormat> supported_formats;
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index a910706f1ac..d2f604851f8 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -1687,6 +1687,8 @@ bool StructTraits<media::mojom::VideoCaptureDeviceDescriptorDataView,
return false;
if (!data.ReadTransportType(&(output->transport_type)))
return false;
+ if (data.has_pan_tilt_zoom_supported())
+ output->set_pan_tilt_zoom_supported(data.pan_tilt_zoom_supported());
return true;
}
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
index ebe98f6280b..548149da828 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.h
@@ -189,6 +189,16 @@ struct COMPONENT_EXPORT(MEDIA_CAPTURE_MOJOM_TRAITS)
return input.transport_type;
}
+ static bool has_pan_tilt_zoom_supported(
+ const media::VideoCaptureDeviceDescriptor& input) {
+ return input.pan_tilt_zoom_supported().has_value();
+ }
+
+ static bool pan_tilt_zoom_supported(
+ const media::VideoCaptureDeviceDescriptor& input) {
+ return input.pan_tilt_zoom_supported().value_or(false);
+ }
+
static bool Read(media::mojom::VideoCaptureDeviceDescriptorDataView data,
media::VideoCaptureDeviceDescriptor* output);
};
diff --git a/chromium/media/capture/run_all_unittests.cc b/chromium/media/capture/run_all_unittests.cc
index 70c36d5c855..3302a184ac3 100644
--- a/chromium/media/capture/run_all_unittests.cc
+++ b/chromium/media/capture/run_all_unittests.cc
@@ -5,6 +5,7 @@
#include <stdio.h>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/message_loop/message_pump_type.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/test_suite.h"
diff --git a/chromium/media/capture/video/android/video_capture_device_android.cc b/chromium/media/capture/video/android/video_capture_device_android.cc
index b8897bf7a6f..e9a555e7da0 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_android.cc
@@ -14,6 +14,7 @@
#include "base/numerics/safe_conversions.h"
#include "base/strings/string_number_conversions.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "base/trace_event/trace_event.h"
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/android/capture_jni_headers/VideoCapture_jni.h"
#include "media/capture/video/android/photo_capabilities.h"
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index be9228c9c20..8e5cba8978e 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -83,7 +83,8 @@ void VideoCaptureDeviceFactoryAndroid::GetDeviceDescriptors(
display_name, device_id, "" /*model_id*/,
static_cast<VideoCaptureApi>(capture_api_type),
VideoCaptureTransportType::OTHER_TRANSPORT,
- static_cast<VideoFacingMode>(facing_mode));
+ static_cast<VideoFacingMode>(facing_mode),
+ /*pan_tilt_zoom_supported=*/false);
// We put user-facing devices to the front of the list in order to make
// them by-default preferred over environment-facing ones when no other
diff --git a/chromium/media/capture/video/chromeos/camera_buffer_factory.cc b/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
index 5e3eceb98a7..5e497c1cc19 100644
--- a/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
+++ b/chromium/media/capture/video/chromeos/camera_buffer_factory.cc
@@ -4,6 +4,7 @@
#include "media/capture/video/chromeos/camera_buffer_factory.h"
+#include "base/stl_util.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
namespace media {
@@ -14,24 +15,27 @@ CameraBufferFactory::~CameraBufferFactory() = default;
std::unique_ptr<gfx::GpuMemoryBuffer>
CameraBufferFactory::CreateGpuMemoryBuffer(const gfx::Size& size,
- gfx::BufferFormat format) {
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage) {
gpu::GpuMemoryBufferManager* buf_manager =
VideoCaptureDeviceFactoryChromeOS::GetBufferManager();
if (!buf_manager) {
LOG(ERROR) << "GpuMemoryBufferManager not set";
return std::unique_ptr<gfx::GpuMemoryBuffer>();
}
- return buf_manager->CreateGpuMemoryBuffer(
- size, format, GetBufferUsage(format), gpu::kNullSurfaceHandle);
+ return buf_manager->CreateGpuMemoryBuffer(size, format, usage,
+ gpu::kNullSurfaceHandle);
}
// There's no good way to resolve the HAL pixel format to the platform-specific
// DRM format, other than to actually allocate the buffer and see if the
// allocation succeeds.
ChromiumPixelFormat CameraBufferFactory::ResolveStreamBufferFormat(
- cros::mojom::HalPixelFormat hal_format) {
- if (resolved_hal_formats_.find(hal_format) != resolved_hal_formats_.end()) {
- return resolved_hal_formats_[hal_format];
+ cros::mojom::HalPixelFormat hal_format,
+ gfx::BufferUsage usage) {
+ const auto key = std::make_pair(hal_format, usage);
+ if (base::Contains(resolved_format_usages_, key)) {
+ return resolved_format_usages_[key];
}
ChromiumPixelFormat kUnsupportedFormat{PIXEL_FORMAT_UNKNOWN,
@@ -44,25 +48,13 @@ ChromiumPixelFormat CameraBufferFactory::ResolveStreamBufferFormat(
}
for (const auto& f : cr_formats) {
auto buffer = CreateGpuMemoryBuffer(
- gfx::Size(kDummyBufferWidth, kDummyBufferHeight), f.gfx_format);
+ gfx::Size(kDummyBufferWidth, kDummyBufferHeight), f.gfx_format, usage);
if (buffer) {
- resolved_hal_formats_[hal_format] = f;
+ resolved_format_usages_[key] = f;
return f;
}
}
return kUnsupportedFormat;
}
-// static
-gfx::BufferUsage CameraBufferFactory::GetBufferUsage(gfx::BufferFormat format) {
- switch (format) {
- case gfx::BufferFormat::R_8:
- // Usage for JPEG capture buffer backed by R8 pixel buffer.
- return gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE;
- default:
- // Default usage for YUV camera buffer.
- return gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE;
- }
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_buffer_factory.h b/chromium/media/capture/video/chromeos/camera_buffer_factory.h
index ded3d310755..a2c2a2cb015 100644
--- a/chromium/media/capture/video/chromeos/camera_buffer_factory.h
+++ b/chromium/media/capture/video/chromeos/camera_buffer_factory.h
@@ -5,8 +5,8 @@
#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_BUFFER_FACTORY_H_
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_BUFFER_FACTORY_H_
+#include <map>
#include <memory>
-#include <unordered_map>
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/pixel_format_utils.h"
@@ -24,16 +24,17 @@ class CAPTURE_EXPORT CameraBufferFactory {
virtual std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
const gfx::Size& size,
- gfx::BufferFormat format);
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage);
virtual ChromiumPixelFormat ResolveStreamBufferFormat(
- cros::mojom::HalPixelFormat hal_format);
-
- static gfx::BufferUsage GetBufferUsage(gfx::BufferFormat format);
+ cros::mojom::HalPixelFormat hal_format,
+ gfx::BufferUsage usage);
private:
- std::unordered_map<cros::mojom::HalPixelFormat, ChromiumPixelFormat>
- resolved_hal_formats_;
+ std::map<std::pair<cros::mojom::HalPixelFormat, gfx::BufferUsage>,
+ ChromiumPixelFormat>
+ resolved_format_usages_;
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.cc b/chromium/media/capture/video/chromeos/camera_device_context.cc
index 03e98623ed5..d3312f60a47 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_context.cc
@@ -50,8 +50,8 @@ void CameraDeviceContext::SubmitCapturedVideoCaptureBuffer(
base::TimeDelta timestamp) {
VideoFrameMetadata metadata;
// All frames are pre-rotated to the display orientation.
- metadata.SetRotation(VideoFrameMetadata::Key::ROTATION,
- VideoRotation::VIDEO_ROTATION_0);
+ metadata.rotation = VideoRotation::VIDEO_ROTATION_0;
+
// TODO: Figure out the right color space for the camera frame. We may need
// to populate the camera metadata with the color space reported by the V4L2
// device.
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index c361ae56ad9..cafc48e6c2e 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -32,6 +32,21 @@ namespace media {
namespace {
+constexpr char kBrightness[] = "com.google.control.brightness";
+constexpr char kBrightnessRange[] = "com.google.control.brightnessRange";
+constexpr char kContrast[] = "com.google.control.contrast";
+constexpr char kContrastRange[] = "com.google.control.contrastRange";
+constexpr char kPan[] = "com.google.control.pan";
+constexpr char kPanRange[] = "com.google.control.panRange";
+constexpr char kSaturation[] = "com.google.control.saturation";
+constexpr char kSaturationRange[] = "com.google.control.saturationRange";
+constexpr char kSharpness[] = "com.google.control.sharpness";
+constexpr char kSharpnessRange[] = "com.google.control.sharpnessRange";
+constexpr char kTilt[] = "com.google.control.tilt";
+constexpr char kTiltRange[] = "com.google.control.tiltRange";
+constexpr char kZoom[] = "com.google.control.zoom";
+constexpr char kZoomRange[] = "com.google.control.zoomRange";
+
std::pair<int32_t, int32_t> GetTargetFrameRateRange(
const cros::mojom::CameraMetadataPtr& static_metadata,
int target_frame_rate,
@@ -216,6 +231,9 @@ class CameraDeviceDelegate::StreamCaptureInterfaceImpl final
const base::WeakPtr<CameraDeviceDelegate> camera_device_delegate_;
};
+ResultMetadata::ResultMetadata() = default;
+ResultMetadata::~ResultMetadata() = default;
+
CameraDeviceDelegate::CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
@@ -233,6 +251,7 @@ void CameraDeviceDelegate::AllocateAndStart(
CameraDeviceContext* device_context) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ got_result_metadata_ = false;
chrome_capture_params_ = params;
device_context_ = device_context;
device_context_->SetState(CameraDeviceContext::State::kStarting);
@@ -259,6 +278,10 @@ void CameraDeviceDelegate::AllocateAndStart(
FROM_HERE, "Camera is missing required sensor orientation info");
return;
}
+ auto rect = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ active_array_size_ = gfx::Rect(rect[0], rect[1], rect[2], rect[3]);
device_context_->SetSensorOrientation(sensor_orientation[0]);
// |device_ops_| is bound after the BindNewPipeAndPassReceiver call.
@@ -324,38 +347,13 @@ void CameraDeviceDelegate::GetPhotoState(
VideoCaptureDevice::GetPhotoStateCallback callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- auto photo_state = mojo::CreateEmptyPhotoState();
-
- if (!device_context_ ||
- (device_context_->GetState() !=
- CameraDeviceContext::State::kStreamConfigured &&
- device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
- std::move(callback).Run(std::move(photo_state));
- return;
- }
-
- std::vector<gfx::Size> blob_resolutions;
- GetStreamResolutions(
- static_metadata_, cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT,
- cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB, &blob_resolutions);
- if (blob_resolutions.empty()) {
- std::move(callback).Run(std::move(photo_state));
+ if (!got_result_metadata_) {
+ get_photo_state_queue_.push_back(
+ base::BindOnce(&CameraDeviceDelegate::DoGetPhotoState,
+ weak_ptr_factory_.GetWeakPtr(), std::move(callback)));
return;
}
-
- // Sets the correct range of min/max resolution in order to bypass checks that
- // the resolution caller request should fall within the range when taking
- // photos. And since we are not actually use the mechanism to get other
- // resolutions, we set the step to 0.0 here.
- photo_state->width->current = current_blob_resolution_.width();
- photo_state->width->min = blob_resolutions.front().width();
- photo_state->width->max = blob_resolutions.back().width();
- photo_state->width->step = 0.0;
- photo_state->height->current = current_blob_resolution_.height();
- photo_state->height->min = blob_resolutions.front().height();
- photo_state->height->max = blob_resolutions.back().height();
- photo_state->height->step = 0.0;
- std::move(callback).Run(std::move(photo_state));
+ DoGetPhotoState(std::move(callback));
}
// On success, invokes |callback| with value |true|. On failure, drops
@@ -375,6 +373,55 @@ void CameraDeviceDelegate::SetPhotoOptions(
return;
}
+ auto set_vendor_int = [&](const std::string& name, bool has_field,
+ double value) {
+ if (!has_field) {
+ return;
+ }
+ const VendorTagInfo* info =
+ camera_hal_delegate_->GetVendorTagInfoByName(name);
+ if (info == nullptr)
+ return;
+ std::vector<uint8_t> temp(sizeof(int32_t));
+ auto* temp_ptr = reinterpret_cast<int32_t*>(temp.data());
+ *temp_ptr = value;
+ request_manager_->SetRepeatingCaptureMetadata(info->tag, info->type, 1,
+ std::move(temp));
+ };
+ set_vendor_int(kBrightness, settings->has_brightness, settings->brightness);
+ set_vendor_int(kContrast, settings->has_contrast, settings->contrast);
+ set_vendor_int(kPan, settings->has_pan, settings->pan);
+ set_vendor_int(kSaturation, settings->has_saturation, settings->saturation);
+ set_vendor_int(kSharpness, settings->has_sharpness, settings->sharpness);
+ set_vendor_int(kTilt, settings->has_tilt, settings->tilt);
+ if (settings->has_zoom && use_digital_zoom_) {
+ if (settings->zoom == 1) {
+ request_manager_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SCALER_CROP_REGION);
+ VLOG(1) << "zoom ratio 1";
+ } else {
+ double zoom_factor = sqrt(settings->zoom);
+ int32_t crop_width = std::round(active_array_size_.width() / zoom_factor);
+ int32_t crop_height =
+ std::round(active_array_size_.height() / zoom_factor);
+ // crop from center
+ int32_t region[4] = {(active_array_size_.width() - crop_width) / 2,
+ (active_array_size_.height() - crop_height) / 2,
+ crop_width, crop_height};
+
+ request_manager_->SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SCALER_CROP_REGION,
+ cros::mojom::EntryType::TYPE_INT32, 4,
+ SerializeMetadataValueFromSpan(base::make_span(region, 4)));
+
+ VLOG(1) << "zoom ratio:" << settings->zoom << " scaler.crop.region("
+ << region[0] << "," << region[1] << "," << region[2] << ","
+ << region[3] << ")";
+ }
+ } else {
+ set_vendor_int(kZoom, settings->has_zoom, settings->zoom);
+ }
+
bool is_resolution_specified = settings->has_width && settings->has_height;
bool should_reconfigure_streams =
is_resolution_specified && (current_blob_resolution_.IsEmpty() ||
@@ -483,6 +530,9 @@ void CameraDeviceDelegate::OnClosed(int32_t result) {
device_context_->LogToClient(std::string("Failed to close device: ") +
base::safe_strerror(-result));
}
+ if (request_manager_) {
+ request_manager_->RemoveResultMetadataObserver(this);
+ }
ResetMojoInterface();
device_context_ = nullptr;
current_blob_resolution_.SetSize(0, 0);
@@ -548,6 +598,7 @@ void CameraDeviceDelegate::Initialize() {
device_ops_->Initialize(
std::move(callback_ops),
base::BindOnce(&CameraDeviceDelegate::OnInitialized, GetWeakPtr()));
+ request_manager_->AddResultMetadataObserver(this);
}
void CameraDeviceDelegate::OnInitialized(int32_t result) {
@@ -607,7 +658,8 @@ void CameraDeviceDelegate::ConfigureStreams(
chrome_capture_params_.requested_format.frame_size.height();
preview_stream->format =
cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
- preview_stream->usage = cros::mojom::GRALLOC_USAGE_HW_COMPOSER;
+ preview_stream->usage = cros::mojom::GRALLOC_USAGE_HW_COMPOSER |
+ cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER;
preview_stream->data_space = 0;
preview_stream->rotation =
cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
@@ -1030,20 +1082,168 @@ bool CameraDeviceDelegate::SetPointsOfInterest(
}();
// TODO(shik): Respect to SCALER_CROP_REGION, which is unused now.
-
- auto active_array_size = [&]() {
- auto rect = GetMetadataEntryAsSpan<int32_t>(
- static_metadata_,
- cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
- // (xmin, ymin, width, height)
- return gfx::Rect(rect[0], rect[1], rect[2], rect[3]);
- }();
-
- x *= active_array_size.width() - 1;
- y *= active_array_size.height() - 1;
+ x *= active_array_size_.width() - 1;
+ y *= active_array_size_.height() - 1;
gfx::Point point = {static_cast<int>(x), static_cast<int>(y)};
camera_3a_controller_->SetPointOfInterest(point);
return true;
}
+mojom::RangePtr CameraDeviceDelegate::GetControlRangeByVendorTagName(
+ const std::string& range_name,
+ const base::Optional<int32_t>& current) {
+ const VendorTagInfo* info =
+ camera_hal_delegate_->GetVendorTagInfoByName(range_name);
+ if (info == nullptr) {
+ return mojom::Range::New();
+ }
+ auto static_val =
+ GetMetadataEntryAsSpan<int32_t>(static_metadata_, info->tag);
+ if (static_val.size() != 3) {
+ return mojom::Range::New();
+ }
+
+ if (!current) {
+ return mojom::Range::New();
+ }
+
+ mojom::RangePtr range = mojom::Range::New();
+
+ range->min = static_val[0];
+ range->max = static_val[1];
+ range->step = static_val[2];
+ range->current = current.value();
+
+ return range;
+}
+
+void CameraDeviceDelegate::OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr& result_metadata) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ auto get_vendor_int =
+ [&](const std::string& name,
+ const cros::mojom::CameraMetadataPtr& result_metadata,
+ base::Optional<int32_t>* returned_value) {
+ returned_value->reset();
+ const VendorTagInfo* info =
+ camera_hal_delegate_->GetVendorTagInfoByName(name);
+ if (info == nullptr)
+ return;
+ auto val = GetMetadataEntryAsSpan<int32_t>(result_metadata, info->tag);
+ if (val.size() == 1)
+ *returned_value = val[0];
+ };
+
+ get_vendor_int(kBrightness, result_metadata, &result_metadata_.brightness);
+ get_vendor_int(kContrast, result_metadata, &result_metadata_.contrast);
+ get_vendor_int(kPan, result_metadata, &result_metadata_.pan);
+ get_vendor_int(kSaturation, result_metadata, &result_metadata_.saturation);
+ get_vendor_int(kSharpness, result_metadata, &result_metadata_.sharpness);
+ get_vendor_int(kTilt, result_metadata, &result_metadata_.tilt);
+ get_vendor_int(kZoom, result_metadata, &result_metadata_.zoom);
+
+ result_metadata_.scaler_crop_region.reset();
+ auto rect = GetMetadataEntryAsSpan<int32_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_SCALER_CROP_REGION);
+ if (rect.size() == 4) {
+ result_metadata_.scaler_crop_region =
+ gfx::Rect(rect[0], rect[1], rect[2], rect[3]);
+ }
+
+ if (!got_result_metadata_) {
+ for (auto& request : get_photo_state_queue_)
+ ipc_task_runner_->PostTask(FROM_HERE, std::move(request));
+ get_photo_state_queue_.clear();
+ got_result_metadata_ = true;
+ }
+}
+
+void CameraDeviceDelegate::DoGetPhotoState(
+ VideoCaptureDevice::GetPhotoStateCallback callback) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ auto photo_state = mojo::CreateEmptyPhotoState();
+
+ if (!device_context_ ||
+ (device_context_->GetState() !=
+ CameraDeviceContext::State::kStreamConfigured &&
+ device_context_->GetState() != CameraDeviceContext::State::kCapturing)) {
+ std::move(callback).Run(std::move(photo_state));
+ return;
+ }
+
+ std::vector<gfx::Size> blob_resolutions;
+ GetStreamResolutions(
+ static_metadata_, cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT,
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_BLOB, &blob_resolutions);
+ if (blob_resolutions.empty()) {
+ std::move(callback).Run(std::move(photo_state));
+ return;
+ }
+
+ // Sets the correct range of min/max resolution in order to bypass checks that
+ // the resolution caller request should fall within the range when taking
+ // photos. And since we are not actually use the mechanism to get other
+ // resolutions, we set the step to 0.0 here.
+ photo_state->width->current = current_blob_resolution_.width();
+ photo_state->width->min = blob_resolutions.front().width();
+ photo_state->width->max = blob_resolutions.back().width();
+ photo_state->width->step = 0.0;
+ photo_state->height->current = current_blob_resolution_.height();
+ photo_state->height->min = blob_resolutions.front().height();
+ photo_state->height->max = blob_resolutions.back().height();
+ photo_state->height->step = 0.0;
+
+ photo_state->brightness = GetControlRangeByVendorTagName(
+ kBrightnessRange, result_metadata_.brightness);
+ photo_state->contrast =
+ GetControlRangeByVendorTagName(kContrastRange, result_metadata_.contrast);
+ photo_state->pan =
+ GetControlRangeByVendorTagName(kPanRange, result_metadata_.pan);
+ photo_state->saturation = GetControlRangeByVendorTagName(
+ kSaturationRange, result_metadata_.saturation);
+ photo_state->sharpness = GetControlRangeByVendorTagName(
+ kSharpnessRange, result_metadata_.sharpness);
+ photo_state->tilt =
+ GetControlRangeByVendorTagName(kTiltRange, result_metadata_.tilt);
+
+ // For zoom part, we check the scaler.availableMaxDigitalZoom first, if there
+ // is no metadata or the value is one we use zoom vendor tag.
+ //
+ // https://w3c.github.io/mediacapture-image/#zoom
+ //
+ // scaler.availableMaxDigitalZoom:
+ // We use area ratio for this type zoom.
+ //
+ // Vendor tag zoom:
+ // It is used by UVC camera usually.
+ // The zoom unit is driver-specific for V4L2_CID_ZOOM_ABSOLUTE.
+ // https://www.kernel.org/doc/html/latest/media/uapi/v4l/ext-ctrls-camera.html
+ auto max_digital_zoom = GetMetadataEntryAsSpan<float>(
+ static_metadata_, cros::mojom::CameraMetadataTag::
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ if (max_digital_zoom.size() == 1 && max_digital_zoom[0] > 1 &&
+ result_metadata_.scaler_crop_region) {
+ photo_state->zoom->min = 1;
+ photo_state->zoom->max = max_digital_zoom[0] * max_digital_zoom[0];
+ photo_state->zoom->step = 0.1;
+ photo_state->zoom->current =
+ (active_array_size_.width() /
+ (float)result_metadata_.scaler_crop_region->width()) *
+ (active_array_size_.height() /
+ (float)result_metadata_.scaler_crop_region->height());
+ // get 0.1 precision
+ photo_state->zoom->current = round(photo_state->zoom->current * 10) / 10;
+ use_digital_zoom_ = true;
+ } else {
+ photo_state->zoom =
+ GetControlRangeByVendorTagName(kZoomRange, result_metadata_.zoom);
+ use_digital_zoom_ = false;
+ }
+
+ std::move(callback).Run(std::move(photo_state));
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index a8aceeea4c0..d261b2b96cc 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -11,6 +11,7 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
+#include "media/capture/video/chromeos/capture_metadata_dispatcher.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "media/capture/video/video_capture_device.h"
@@ -35,6 +36,22 @@ enum class StreamType : uint64_t {
kUnknown,
};
+// The metadata might be large so clone a whole metadata might be relatively
+// expensive. We only keep the needed data by this structure.
+struct ResultMetadata {
+ ResultMetadata();
+ ~ResultMetadata();
+
+ base::Optional<int32_t> brightness;
+ base::Optional<int32_t> contrast;
+ base::Optional<int32_t> pan;
+ base::Optional<int32_t> saturation;
+ base::Optional<int32_t> sharpness;
+ base::Optional<int32_t> tilt;
+ base::Optional<int32_t> zoom;
+ base::Optional<gfx::Rect> scaler_crop_region;
+};
+
// Returns true if the given stream type is an input stream.
bool IsInputStream(StreamType stream_type);
@@ -71,7 +88,8 @@ class CAPTURE_EXPORT StreamCaptureInterface {
// AllocateAndStart of VideoCaptureDeviceArcChromeOS runs on. All the methods
// in CameraDeviceDelegate run on |ipc_task_runner_| and hence all the
// access to member variables is sequenced.
-class CAPTURE_EXPORT CameraDeviceDelegate final {
+class CAPTURE_EXPORT CameraDeviceDelegate final
+ : public CaptureMetadataDispatcher::ResultMetadataObserver {
public:
CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
@@ -79,7 +97,7 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
CameraAppDeviceImpl* camera_app_device);
- ~CameraDeviceDelegate();
+ ~CameraDeviceDelegate() final;
// Delegation methods for the VideoCaptureDevice interface.
void AllocateAndStart(const VideoCaptureParams& params,
@@ -175,6 +193,18 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
bool SetPointsOfInterest(
const std::vector<mojom::Point2DPtr>& points_of_interest);
+ // This function gets the TYPE_INT32[3] array of [max, min, step] from static
+ // metadata by |range_name| and current value of |current|.
+ mojom::RangePtr GetControlRangeByVendorTagName(
+ const std::string& range_name,
+ const base::Optional<int32_t>& current);
+
+ // CaptureMetadataDispatcher::ResultMetadataObserver implementation.
+ void OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr& result_metadata) final;
+
+ void DoGetPhotoState(VideoCaptureDevice::GetPhotoStateCallback callback);
+
const VideoCaptureDeviceDescriptor device_descriptor_;
// Current configured resolution of BLOB stream.
@@ -208,6 +238,13 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
CameraAppDeviceImpl* camera_app_device_; // Weak.
+ // GetPhotoState requests waiting for |got_result_metadata_| to be served.
+ std::vector<base::OnceClosure> get_photo_state_queue_;
+ bool got_result_metadata_;
+ bool use_digital_zoom_;
+ ResultMetadata result_metadata_;
+ gfx::Rect active_array_size_;
+
base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceDelegate);
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index c0777cb92fa..3371f40ff01 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -249,6 +249,17 @@ class CameraDeviceDelegateTest : public ::testing::Test {
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 5;
+ entry->tag =
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE;
+ entry->type = cros::mojom::EntryType::TYPE_INT32;
+ entry->count = 4;
+ std::vector<int32_t> active_array_size = {0, 0, 1920, 1080};
+ as_int8 = reinterpret_cast<uint8_t*>(active_array_size.data());
+ entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
+ static_metadata->entries->push_back(std::move(entry));
+
switch (camera_id) {
case 0:
camera_info->facing = cros::mojom::CameraFacing::CAMERA_FACING_FRONT;
@@ -363,9 +374,10 @@ class CameraDeviceDelegateTest : public ::testing::Test {
Invoke(this, &CameraDeviceDelegateTest::ConfigureFakeStreams));
EXPECT_CALL(
mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(_, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ CreateGpuMemoryBuffer(
+ _, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
@@ -379,10 +391,11 @@ class CameraDeviceDelegateTest : public ::testing::Test {
CreateFakeGpuMemoryBuffer));
EXPECT_CALL(
mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(gfx::Size(kDefaultWidth, kDefaultHeight),
- gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ CreateGpuMemoryBuffer(
+ gfx::Size(kDefaultWidth, kDefaultHeight),
+ gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index adf4955a9d0..90b72382367 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -250,8 +250,13 @@ void CameraHalDelegate::GetSupportedFormats(
}
float max_fps = 1.0 * 1000000000LL / duration;
+ // There's no consumer information here to determine the buffer usage, so
+ // hard-code the usage that all the clients should be using.
+ constexpr gfx::BufferUsage kClientBufferUsage =
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
const ChromiumPixelFormat cr_format =
- camera_buffer_factory_->ResolveStreamBufferFormat(hal_format);
+ camera_buffer_factory_->ResolveStreamBufferFormat(hal_format,
+ kClientBufferUsage);
if (cr_format.video_format == PIXEL_FORMAT_UNKNOWN) {
continue;
}
@@ -340,6 +345,7 @@ void CameraHalDelegate::GetDeviceDescriptors(
// about malformed values.
}
}
+ desc.set_pan_tilt_zoom_supported(IsPanTiltZoomSupported(camera_info));
device_id_to_camera_id_[desc.device_id] = camera_id;
device_descriptors->push_back(desc);
}
@@ -350,6 +356,41 @@ void CameraHalDelegate::GetDeviceDescriptors(
DVLOG(1) << "Number of device descriptors: " << device_descriptors->size();
}
+bool CameraHalDelegate::IsPanTiltZoomSupported(
+ const cros::mojom::CameraInfoPtr& camera_info) {
+ auto is_vendor_range_valid = [&](const std::string& key) -> bool {
+ const VendorTagInfo* info = vendor_tag_ops_delegate_.GetInfoByName(key);
+ if (info == nullptr)
+ return false;
+ auto range = GetMetadataEntryAsSpan<int32_t>(
+ camera_info->static_camera_characteristics, info->tag);
+ return range.size() == 3 && range[0] < range[1];
+ };
+
+ if (is_vendor_range_valid("com.google.control.panRange"))
+ return true;
+
+ if (is_vendor_range_valid("com.google.control.tiltRange"))
+ return true;
+
+ if (is_vendor_range_valid("com.google.control.zoomRange"))
+ return true;
+
+ auto scaler_crop_region = GetMetadataEntryAsSpan<int32_t>(
+ camera_info->static_camera_characteristics,
+ cros::mojom::CameraMetadataTag::ANDROID_SCALER_CROP_REGION);
+ auto max_digital_zoom = GetMetadataEntryAsSpan<float>(
+ camera_info->static_camera_characteristics,
+ cros::mojom::CameraMetadataTag::
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ if (max_digital_zoom.size() == 1 && max_digital_zoom[0] > 1 &&
+ scaler_crop_region.size() == 4) {
+ return true;
+ }
+
+ return false;
+}
+
cros::mojom::CameraInfoPtr CameraHalDelegate::GetCameraInfoFromDeviceId(
const std::string& device_id) {
base::AutoLock lock(camera_info_lock_);
@@ -364,6 +405,11 @@ cros::mojom::CameraInfoPtr CameraHalDelegate::GetCameraInfoFromDeviceId(
return it->second.Clone();
}
+const VendorTagInfo* CameraHalDelegate::GetVendorTagInfoByName(
+ const std::string& full_name) {
+ return vendor_tag_ops_delegate_.GetInfoByName(full_name);
+}
+
void CameraHalDelegate::OpenDevice(
int32_t camera_id,
mojo::PendingReceiver<cros::mojom::Camera3DeviceOps> device_ops_receiver,
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index bcd28a53fae..014b830f67d 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -81,10 +81,15 @@ class CAPTURE_EXPORT CameraHalDelegate final
// Gets camera id from device id. Returns -1 on error.
int GetCameraIdFromDeviceId(const std::string& device_id);
+ // Returns true if either pan, tilt, or zoom camera capability is supported.
+ bool IsPanTiltZoomSupported(const cros::mojom::CameraInfoPtr& camera_info);
+
// Gets the camera info of |device_id|. Returns null CameraInfoPtr on error.
cros::mojom::CameraInfoPtr GetCameraInfoFromDeviceId(
const std::string& device_id);
+ const VendorTagInfo* GetVendorTagInfoByName(const std::string& full_name);
+
private:
friend class base::RefCountedThreadSafe<CameraHalDelegate>;
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index ab3eccfd296..07b70efb893 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -241,9 +241,10 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
// |model_id| are set properly according to the vendor tags.
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(_, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ CreateGpuMemoryBuffer(
+ _, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
diff --git a/chromium/media/capture/video/chromeos/capture_metadata_dispatcher.h b/chromium/media/capture/video/chromeos/capture_metadata_dispatcher.h
new file mode 100644
index 00000000000..dd17930740d
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/capture_metadata_dispatcher.h
@@ -0,0 +1,43 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAPTURE_METADATA_DISPATCHER_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAPTURE_METADATA_DISPATCHER_H_
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+
+namespace media {
+
+// Interface that provides API to let Camera3AController and
+// CameraDeviceDelegate to update the metadata that will be sent with capture
+// request.
+class CAPTURE_EXPORT CaptureMetadataDispatcher {
+ public:
+ class ResultMetadataObserver {
+ public:
+ virtual ~ResultMetadataObserver() {}
+ virtual void OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr&) = 0;
+ };
+
+ virtual ~CaptureMetadataDispatcher() {}
+ virtual void AddResultMetadataObserver(ResultMetadataObserver* observer) = 0;
+ virtual void RemoveResultMetadataObserver(
+ ResultMetadataObserver* observer) = 0;
+ virtual void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value) = 0;
+ virtual void SetRepeatingCaptureMetadata(cros::mojom::CameraMetadataTag tag,
+ cros::mojom::EntryType type,
+ size_t count,
+ std::vector<uint8_t> value) = 0;
+ virtual void UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag tag) = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAPTURE_METADATA_DISPATCHER_H_
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
index a1be18153d4..cbf8f6ef13c 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
@@ -25,7 +25,15 @@ bool GpuMemoryBufferTracker::Init(const gfx::Size& dimensions,
<< VideoPixelFormatToString(format);
return false;
}
- buffer_ = buffer_factory_.CreateGpuMemoryBuffer(dimensions, *gfx_format);
+ // There's no consumer information here to determine the precise buffer usage,
+ // so we try the usage flag that covers all use cases.
+ // JPEG capture buffer is backed by R8 pixel buffer.
+ const gfx::BufferUsage usage =
+ *gfx_format == gfx::BufferFormat::R_8
+ ? gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE
+ : gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ buffer_ =
+ buffer_factory_.CreateGpuMemoryBuffer(dimensions, *gfx_format, usage);
if (!buffer_) {
NOTREACHED() << "Failed to create GPU memory buffer";
return false;
diff --git a/chromium/media/capture/video/chromeos/mojom/BUILD.gn b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
index f98d89e6e0d..cb72cea2f1b 100644
--- a/chromium/media/capture/video/chromeos/mojom/BUILD.gn
+++ b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
@@ -17,7 +17,6 @@ mojom("cros_camera") {
deps = [
"//components/chromeos_camera/common",
"//media/capture/mojom:image_capture",
- "//media/mojo/mojom",
"//ui/gfx/geometry/mojom",
"//ui/gfx/range/mojom",
]
diff --git a/chromium/media/capture/video/chromeos/mojom/camera3.mojom b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
index 2f3fdfb4bcb..5e7e6ded75d 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera3.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
@@ -16,6 +16,7 @@ const uint32 GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003;
const uint32 GRALLOC_USAGE_SW_WRITE_NEVER = 0x00000000;
const uint32 GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030;
const uint32 GRALLOC_USAGE_HW_COMPOSER = 0x00000800;
+const uint32 GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000;
const uint32 GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000;
const uint32 GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000;
// A private gralloc usage flag to force allocation of YUV420 buffer. This
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index 0f2c89e1601..f71699d7a0b 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -17,6 +17,7 @@
#include "media/capture/mojom/image_capture.mojom.h"
#include "media/capture/video/chromeos/camera_app_device_impl.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
+#include "media/capture/video/chromeos/capture_metadata_dispatcher.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
#include "media/capture/video/chromeos/request_builder.h"
@@ -44,32 +45,6 @@ constexpr int32_t kMinConfiguredStreams = 1;
// Maximum configured streams could contain two optional YUV streams.
constexpr int32_t kMaxConfiguredStreams = 4;
-// Interface that provides API to let Camera3AController to update the metadata
-// that will be sent with capture request.
-class CAPTURE_EXPORT CaptureMetadataDispatcher {
- public:
- class ResultMetadataObserver {
- public:
- virtual ~ResultMetadataObserver() {}
- virtual void OnResultMetadataAvailable(
- const cros::mojom::CameraMetadataPtr&) = 0;
- };
-
- virtual ~CaptureMetadataDispatcher() {}
- virtual void AddResultMetadataObserver(ResultMetadataObserver* observer) = 0;
- virtual void RemoveResultMetadataObserver(
- ResultMetadataObserver* observer) = 0;
- virtual void SetCaptureMetadata(cros::mojom::CameraMetadataTag tag,
- cros::mojom::EntryType type,
- size_t count,
- std::vector<uint8_t> value) = 0;
- virtual void SetRepeatingCaptureMetadata(cros::mojom::CameraMetadataTag tag,
- cros::mojom::EntryType type,
- size_t count,
- std::vector<uint8_t> value) = 0;
- virtual void UnsetRepeatingCaptureMetadata(
- cros::mojom::CameraMetadataTag tag) = 0;
-};
// RequestManager is responsible for managing the flow for sending capture
// requests and receiving capture results. Having RequestBuilder to build
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index b62c2cd9ed1..74fad9dca4d 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -62,15 +62,15 @@ class FakeCameraBufferFactory : public CameraBufferFactory {
}
std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
const gfx::Size& size,
- gfx::BufferFormat format) override {
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage) override {
return unittest_internal::MockGpuMemoryBufferManager::
- CreateFakeGpuMemoryBuffer(size, format,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE,
- gpu::kNullSurfaceHandle);
+ CreateFakeGpuMemoryBuffer(size, format, usage, gpu::kNullSurfaceHandle);
}
ChromiumPixelFormat ResolveStreamBufferFormat(
- cros::mojom::HalPixelFormat hal_format) override {
+ cros::mojom::HalPixelFormat hal_format,
+ gfx::BufferUsage usage) override {
return ChromiumPixelFormat{PIXEL_FORMAT_NV12,
gfx::BufferFormat::YUV_420_BIPLANAR};
}
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index c1afda42dab..184d9f57f6c 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -41,7 +41,9 @@ StreamBufferManager::~StreamBufferManager() {
}
void StreamBufferManager::ReserveBuffer(StreamType stream_type) {
- if (video_capture_use_gmb_) {
+ // The YUV output buffer for reprocessing is not passed to client, so can be
+ // allocated by the local buffer factory without zero-copy concerns.
+ if (video_capture_use_gmb_ && stream_type != StreamType::kYUVOutput) {
ReserveBufferFromPool(stream_type);
} else {
ReserveBufferFromFactory(stream_type);
@@ -154,8 +156,8 @@ StreamBufferManager::AcquireBufferForClientById(StreamType stream_type,
DCHECK(gfx_format);
auto rotated_gmb = gmb_support_->CreateGpuMemoryBufferImplFromHandle(
rotated_buffer.handle_provider->GetGpuMemoryBufferHandle(),
- format->frame_size, *gfx_format,
- CameraBufferFactory::GetBufferUsage(*gfx_format), base::NullCallback());
+ format->frame_size, *gfx_format, stream_context->buffer_usage,
+ base::NullCallback());
if (!rotated_gmb || !rotated_gmb->Map()) {
DLOG(WARNING) << "Failed to map rotated buffer";
@@ -238,19 +240,19 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
stream_context->capture_format = capture_format;
stream_context->stream = std::move(stream);
- const ChromiumPixelFormat stream_format =
- camera_buffer_factory_->ResolveStreamBufferFormat(
- stream_context->stream->format);
- // Internally we keep track of the VideoPixelFormat that's actually
- // supported by the camera instead of the one requested by the client.
- stream_context->capture_format.pixel_format = stream_format.video_format;
-
switch (stream_type) {
case StreamType::kPreviewOutput:
+ stream_context->buffer_dimension = gfx::Size(
+ stream_context->stream->width, stream_context->stream->height);
+ stream_context->buffer_usage =
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ break;
case StreamType::kYUVInput:
case StreamType::kYUVOutput:
stream_context->buffer_dimension = gfx::Size(
stream_context->stream->width, stream_context->stream->height);
+ stream_context->buffer_usage =
+ gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE;
break;
case StreamType::kJpegOutput: {
auto jpeg_size = GetMetadataEntryAsSpan<int32_t>(
@@ -258,12 +260,21 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
cros::mojom::CameraMetadataTag::ANDROID_JPEG_MAX_SIZE);
CHECK_EQ(jpeg_size.size(), 1u);
stream_context->buffer_dimension = gfx::Size(jpeg_size[0], 1);
+ stream_context->buffer_usage =
+ gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE;
break;
}
default: {
NOTREACHED();
}
}
+ const ChromiumPixelFormat stream_format =
+ camera_buffer_factory_->ResolveStreamBufferFormat(
+ stream_context->stream->format, stream_context->buffer_usage);
+ // Internally we keep track of the VideoPixelFormat that's actually
+ // supported by the camera instead of the one requested by the client.
+ stream_context->capture_format.pixel_format = stream_format.video_format;
+
stream_context_[stream_type] = std::move(stream_context);
// For input stream, there is no need to allocate buffers.
@@ -381,7 +392,8 @@ void StreamBufferManager::ReserveBufferFromFactory(StreamType stream_type) {
return;
}
auto gmb = camera_buffer_factory_->CreateGpuMemoryBuffer(
- stream_context->buffer_dimension, *gfx_format);
+ stream_context->buffer_dimension, *gfx_format,
+ stream_context->buffer_usage);
if (!gmb) {
device_context_->SetErrorState(
media::VideoCaptureError::
@@ -426,7 +438,7 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
auto gmb = gmb_support_->CreateGpuMemoryBufferImplFromHandle(
vcd_buffer.handle_provider->GetGpuMemoryBufferHandle(),
stream_context->buffer_dimension, *gfx_format,
- CameraBufferFactory::GetBufferUsage(*gfx_format), base::NullCallback());
+ stream_context->buffer_usage, base::NullCallback());
stream_context->free_buffers.push(vcd_buffer.id);
stream_context->buffers.insert(std::make_pair(
vcd_buffer.id, BufferPair(std::move(gmb), std::move(vcd_buffer))));
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index bd89f891ab5..cc468903a7a 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -131,6 +131,8 @@ class CAPTURE_EXPORT StreamBufferManager final {
cros::mojom::Camera3StreamPtr stream;
// The dimension of the buffer layout.
gfx::Size buffer_dimension;
+ // The usage of the buffer.
+ gfx::BufferUsage buffer_usage;
// The allocated buffer pairs.
std::map<int, BufferPair> buffers;
// The free buffers of this stream. The queue stores keys into the
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
index 665ac4eb39e..a8056fb047f 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
@@ -130,11 +130,8 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame->BackWithOwnedSharedMemory(std::move(out_region),
std::move(out_mapping));
- out_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
- frame_format.frame_rate);
-
- out_frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
- reference_time);
+ out_frame->metadata()->frame_rate = frame_format.frame_rate;
+ out_frame->metadata()->reference_time = reference_time;
media::mojom::VideoFrameInfoPtr out_frame_info =
media::mojom::VideoFrameInfo::New();
@@ -142,7 +139,7 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame_info->pixel_format = media::PIXEL_FORMAT_I420;
out_frame_info->coded_size = dimensions;
out_frame_info->visible_rect = gfx::Rect(dimensions);
- out_frame_info->metadata = out_frame->metadata()->GetInternalValues().Clone();
+ out_frame_info->metadata = *(out_frame->metadata());
out_frame_info->color_space = out_frame->ColorSpace();
{
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index 96beb87ae21..a6a22008f17 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -610,22 +610,28 @@ void FakePhotoDevice::GetPhotoState(
photo_state->focus_distance->step = kFocusDistanceStep;
photo_state->pan = mojom::Range::New();
- photo_state->pan->current = fake_device_state_->pan;
- photo_state->pan->max = kMaxPan;
- photo_state->pan->min = kMinPan;
- photo_state->pan->step = kPanStep;
+ if (config_.pan_tilt_zoom_supported) {
+ photo_state->pan->current = fake_device_state_->pan;
+ photo_state->pan->max = kMaxPan;
+ photo_state->pan->min = kMinPan;
+ photo_state->pan->step = kPanStep;
+ }
photo_state->tilt = mojom::Range::New();
- photo_state->tilt->current = fake_device_state_->tilt;
- photo_state->tilt->max = kMaxTilt;
- photo_state->tilt->min = kMinTilt;
- photo_state->tilt->step = kTiltStep;
+ if (config_.pan_tilt_zoom_supported) {
+ photo_state->tilt->current = fake_device_state_->tilt;
+ photo_state->tilt->max = kMaxTilt;
+ photo_state->tilt->min = kMinTilt;
+ photo_state->tilt->step = kTiltStep;
+ }
photo_state->zoom = mojom::Range::New();
- photo_state->zoom->current = fake_device_state_->zoom;
- photo_state->zoom->max = kMaxZoom;
- photo_state->zoom->min = kMinZoom;
- photo_state->zoom->step = kZoomStep;
+ if (config_.pan_tilt_zoom_supported) {
+ photo_state->zoom->current = fake_device_state_->zoom;
+ photo_state->zoom->max = kMaxZoom;
+ photo_state->zoom->min = kMinZoom;
+ photo_state->zoom->step = kZoomStep;
+ }
photo_state->supports_torch = false;
photo_state->torch = false;
diff --git a/chromium/media/capture/video/fake_video_capture_device.h b/chromium/media/capture/video/fake_video_capture_device.h
index 1d419292da6..057fc2c7475 100644
--- a/chromium/media/capture/video/fake_video_capture_device.h
+++ b/chromium/media/capture/video/fake_video_capture_device.h
@@ -156,14 +156,10 @@ class FrameDelivererFactory {
};
struct FakePhotoDeviceConfig {
- FakePhotoDeviceConfig()
- : should_fail_get_photo_capabilities(false),
- should_fail_set_photo_options(false),
- should_fail_take_photo(false) {}
-
- bool should_fail_get_photo_capabilities;
- bool should_fail_set_photo_options;
- bool should_fail_take_photo;
+ bool pan_tilt_zoom_supported = true;
+ bool should_fail_get_photo_capabilities = false;
+ bool should_fail_set_photo_options = false;
+ bool should_fail_take_photo = false;
};
// Implements the photo functionality of a FakeVideoCaptureDevice
diff --git a/chromium/media/capture/video/fake_video_capture_device_factory.cc b/chromium/media/capture/video/fake_video_capture_device_factory.cc
index 0b9c4763041..32c6f2a4ae8 100644
--- a/chromium/media/capture/video/fake_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_factory.cc
@@ -209,17 +209,18 @@ void FakeVideoCaptureDeviceFactory::GetDeviceDescriptors(
device_descriptors->emplace_back(
base::StringPrintf("fake_device_%d", entry_index), entry.device_id,
#if defined(OS_LINUX)
- VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE
+ VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE,
#elif defined(OS_MACOSX)
- VideoCaptureApi::MACOSX_AVFOUNDATION
+ VideoCaptureApi::MACOSX_AVFOUNDATION,
#elif defined(OS_WIN)
- VideoCaptureApi::WIN_DIRECT_SHOW
+ VideoCaptureApi::WIN_DIRECT_SHOW,
#elif defined(OS_ANDROID)
- VideoCaptureApi::ANDROID_API2_LEGACY
+ VideoCaptureApi::ANDROID_API2_LEGACY,
#elif defined(OS_FUCHSIA)
- VideoCaptureApi::UNKNOWN
+ VideoCaptureApi::UNKNOWN,
#endif
- );
+ VideoCaptureTransportType::OTHER_TRANSPORT,
+ entry.photo_device_config.pan_tilt_zoom_supported);
entry_index++;
}
}
@@ -255,6 +256,7 @@ void FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
std::vector<gfx::Size> resolutions = ArrayToVector(kDefaultResolutions);
std::vector<float> frame_rates = ArrayToVector(kDefaultFrameRates);
int device_count = kDefaultDeviceCount;
+ FakePhotoDeviceConfig photo_device_config;
FakeVideoCaptureDevice::DisplayMediaType display_media_type =
FakeVideoCaptureDevice::DisplayMediaType::ANY;
@@ -331,6 +333,13 @@ void FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
} else if (base::EqualsCaseInsensitiveASCII(param.back(), "browser")) {
display_media_type = FakeVideoCaptureDevice::DisplayMediaType::BROWSER;
}
+ } else if (base::EqualsCaseInsensitiveASCII(param.front(),
+ "hardware-support")) {
+ photo_device_config.pan_tilt_zoom_supported = false;
+ if (base::EqualsCaseInsensitiveASCII(param.back(), "pan-tilt-zoom"))
+ photo_device_config.pan_tilt_zoom_supported = true;
+ else if (!base::EqualsCaseInsensitiveASCII(param.back(), "none"))
+ LOG(WARNING) << "Unknown hardware support " << param.back();
}
}
@@ -342,6 +351,7 @@ void FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
settings.device_id = base::StringPrintf(kDefaultDeviceIdMask, device_index);
AppendAllCombinationsToFormatsContainer(
pixel_formats, resolutions, frame_rates, &settings.supported_formats);
+ settings.photo_device_config = photo_device_config;
settings.display_media_type = display_media_type;
config->push_back(settings);
}
diff --git a/chromium/media/capture/video/fake_video_capture_device_unittest.cc b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
index 4508a593f6c..cad438c5693 100644
--- a/chromium/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
@@ -33,6 +33,11 @@ using ::testing::Values;
namespace media {
+bool operator==(const FakePhotoDeviceConfig& lhs,
+ const FakePhotoDeviceConfig& rhs) {
+ return std::memcmp(&lhs, &rhs, sizeof(lhs)) == 0;
+}
+
namespace {
class ImageCaptureClient : public base::RefCounted<ImageCaptureClient> {
@@ -405,6 +410,7 @@ struct CommandLineTestData {
size_t expected_device_count;
FakeVideoCaptureDevice::DisplayMediaType expected_display_media_type;
std::vector<VideoPixelFormat> expected_pixel_formats;
+ FakePhotoDeviceConfig expected_photo_device_config;
};
class FakeVideoCaptureDeviceFactoryTest
@@ -442,6 +448,8 @@ TEST_P(FakeVideoCaptureDeviceFactoryTest,
FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
GetParam().switch_value_string, &config);
for (const auto& settings : config) {
+ EXPECT_EQ(GetParam().expected_photo_device_config,
+ settings.photo_device_config);
EXPECT_EQ(GetParam().expected_display_media_type,
settings.display_media_type);
}
@@ -491,7 +499,8 @@ INSTANTIATE_TEST_SUITE_P(
5,
1u,
FakeVideoCaptureDevice::DisplayMediaType::ANY,
- {PIXEL_FORMAT_I420}},
+ {PIXEL_FORMAT_I420},
+ {true, false, false, false}},
CommandLineTestData{"fps=29.97,device-count=1",
29.97f,
1u,
@@ -524,6 +533,18 @@ INSTANTIATE_TEST_SUITE_P(
0u,
FakeVideoCaptureDevice::DisplayMediaType::ANY,
{PIXEL_FORMAT_I420}},
+ CommandLineTestData{"hardware-support=none",
+ 20,
+ 1u,
+ FakeVideoCaptureDevice::DisplayMediaType::ANY,
+ {PIXEL_FORMAT_I420},
+ {false}},
+ CommandLineTestData{"hardware-support=pan-tilt-zoom,fps=60",
+ 60,
+ 1u,
+ FakeVideoCaptureDevice::DisplayMediaType::ANY,
+ {PIXEL_FORMAT_I420},
+ {true}},
CommandLineTestData{"display-media-type=window",
20,
1u,
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
index 32cf755b561..ecc999c707a 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
@@ -7,8 +7,8 @@
#include <lib/sys/cpp/component_context.h>
#include "base/check_op.h"
-#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/process_context.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/strings/string_number_conversions.h"
@@ -186,7 +186,7 @@ void VideoCaptureDeviceFactoryFuchsia::Initialize() {
DCHECK(!device_watcher_);
DCHECK(devices_.empty());
- base::fuchsia::ComponentContextForCurrentProcess()->svc()->Connect(
+ base::ComponentContextForProcess()->svc()->Connect(
device_watcher_.NewRequest());
device_watcher_.set_error_handler(fit::bind_member(
diff --git a/chromium/media/capture/video/gpu_memory_buffer_utils.cc b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
index 62c52b725b4..6001654d2b0 100644
--- a/chromium/media/capture/video/gpu_memory_buffer_utils.cc
+++ b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
@@ -62,7 +62,8 @@ VideoCaptureDevice::Client::ReserveResult AllocateNV12GpuMemoryBuffer(
*out_gpu_memory_buffer = gmb_support->CreateGpuMemoryBufferImplFromHandle(
out_capture_buffer->handle_provider->GetGpuMemoryBufferHandle(),
buffer_size, kOpaqueGfxFormat,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE, base::NullCallback());
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ base::NullCallback());
return reserve_result;
}
diff --git a/chromium/media/capture/video/linux/fake_v4l2_impl.cc b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
index ef2c38c6e27..b46b8ae41f3 100644
--- a/chromium/media/capture/video/linux/fake_v4l2_impl.cc
+++ b/chromium/media/capture/video/linux/fake_v4l2_impl.cc
@@ -132,7 +132,24 @@ class FakeV4L2Impl::OpenedDevice {
int s_ext_ctrls(v4l2_ext_controls* control) { return kSuccessReturnValue; }
- int queryctrl(v4l2_queryctrl* control) { return EINVAL; }
+ int queryctrl(v4l2_queryctrl* control) {
+ switch (control->id) {
+ case V4L2_CID_PAN_ABSOLUTE:
+ case V4L2_CID_TILT_ABSOLUTE:
+ case V4L2_CID_ZOOM_ABSOLUTE:
+ if (!config_.descriptor.pan_tilt_zoom_supported().has_value() ||
+ !config_.descriptor.pan_tilt_zoom_supported().value()) {
+ return EINVAL;
+ }
+ control->flags = 0;
+ control->minimum = 100;
+ control->maximum = 400;
+ control->step = 1;
+ return 0;
+ default:
+ return EINVAL;
+ }
+ }
int s_fmt(v4l2_format* format) {
if (format->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index aab890307c1..60c8597901d 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -215,17 +215,16 @@ void VideoCaptureDeviceFactoryLinux::GetDeviceDescriptors(
device_provider_->GetDeviceDisplayName(unique_id);
if (display_name.empty())
display_name = reinterpret_cast<char*>(cap.card);
-#if defined(OS_CHROMEOS)
device_descriptors->emplace_back(
display_name, unique_id, model_id,
VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE,
VideoCaptureTransportType::OTHER_TRANSPORT,
- device_provider_->GetCameraFacing(unique_id, model_id));
+#if defined(OS_CHROMEOS)
+ device_provider_->GetCameraFacing(unique_id, model_id),
#else
- device_descriptors->emplace_back(
- display_name, unique_id, model_id,
- VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE);
+ VideoFacingMode::MEDIA_VIDEO_FACING_NONE,
#endif
+ IsPanTiltZoomSupported(fd.get()));
}
}
// Since JS doesn't have API to get camera facing, we sort the list to make
@@ -254,6 +253,22 @@ int VideoCaptureDeviceFactoryLinux::DoIoctl(int fd, int request, void* argp) {
return HANDLE_EINTR(v4l2_->ioctl(fd, request, argp));
}
+// Check if the video capture device supports at least one of pan, tilt and zoom
+// controls.
+bool VideoCaptureDeviceFactoryLinux::IsPanTiltZoomSupported(int fd) {
+ for (int control_id : {V4L2_CID_PAN_ABSOLUTE, V4L2_CID_TILT_ABSOLUTE,
+ V4L2_CID_ZOOM_ABSOLUTE}) {
+ v4l2_queryctrl range = {};
+ range.id = control_id;
+ range.type = V4L2_CTRL_TYPE_INTEGER;
+ if (DoIoctl(fd, VIDIOC_QUERYCTRL, &range) == 0 &&
+ range.minimum < range.maximum) {
+ return true;
+ }
+ }
+ return false;
+}
+
bool VideoCaptureDeviceFactoryLinux::HasUsableFormats(int fd,
uint32_t capabilities) {
if (!(capabilities & V4L2_CAP_VIDEO_CAPTURE))
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
index 08c7a831edb..0383924848a 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.h
@@ -67,6 +67,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryLinux
// Simple wrapper to do HANDLE_EINTR(v4l2_->ioctl(fd, ...)).
int DoIoctl(int fd, int request, void* argp);
+ bool IsPanTiltZoomSupported(int fd);
bool HasUsableFormats(int fd, uint32_t capabilities);
std::vector<float> GetFrameRateList(int fd,
uint32_t fourcc,
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
index 88334bb44d5..fd53b895377 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
@@ -67,7 +67,8 @@ class DescriptorDeviceProvider
std::vector<VideoCaptureDeviceDescriptor> descriptors_;
};
-class VideoCaptureDeviceFactoryLinuxTest : public ::testing::Test {
+class VideoCaptureDeviceFactoryLinuxTest
+ : public ::testing::TestWithParam<VideoCaptureDeviceDescriptor> {
public:
VideoCaptureDeviceFactoryLinuxTest() {}
~VideoCaptureDeviceFactoryLinuxTest() override = default;
@@ -89,13 +90,11 @@ class VideoCaptureDeviceFactoryLinuxTest : public ::testing::Test {
std::unique_ptr<VideoCaptureDeviceFactoryLinux> factory_;
};
-TEST_F(VideoCaptureDeviceFactoryLinuxTest, EnumerateSingleFakeV4L2Device) {
+TEST_P(VideoCaptureDeviceFactoryLinuxTest, EnumerateSingleFakeV4L2DeviceUsing) {
// Setup
- const std::string stub_display_name = "Fake Device 0";
- const std::string stub_device_id = "/dev/video0";
- VideoCaptureDeviceDescriptor descriptor(stub_display_name, stub_device_id);
+ const VideoCaptureDeviceDescriptor& descriptor = GetParam();
fake_device_provider_->AddDevice(descriptor);
- fake_v4l2_->AddDevice(stub_device_id, FakeV4L2DeviceConfig(descriptor));
+ fake_v4l2_->AddDevice(descriptor.device_id, FakeV4L2DeviceConfig(descriptor));
// Exercise
VideoCaptureDeviceDescriptors descriptors;
@@ -103,10 +102,27 @@ TEST_F(VideoCaptureDeviceFactoryLinuxTest, EnumerateSingleFakeV4L2Device) {
// Verification
ASSERT_EQ(1u, descriptors.size());
- ASSERT_EQ(stub_device_id, descriptors[0].device_id);
- ASSERT_EQ(stub_display_name, descriptors[0].display_name());
+ EXPECT_EQ(descriptor.device_id, descriptors[0].device_id);
+ EXPECT_EQ(descriptor.display_name(), descriptors[0].display_name());
+ EXPECT_EQ(descriptor.pan_tilt_zoom_supported().value(),
+ descriptors[0].pan_tilt_zoom_supported());
}
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ VideoCaptureDeviceFactoryLinuxTest,
+ ::testing::Values(
+ VideoCaptureDeviceDescriptor("Fake Device 0",
+ "/dev/video0",
+ VideoCaptureApi::UNKNOWN,
+ VideoCaptureTransportType::OTHER_TRANSPORT,
+ /*pan_tilt_zoom_supported=*/false),
+ VideoCaptureDeviceDescriptor("Fake Device 0",
+ "/dev/video0",
+ VideoCaptureApi::UNKNOWN,
+ VideoCaptureTransportType::OTHER_TRANSPORT,
+ /*pan_tilt_zoom_supported=*/true)));
+
TEST_F(VideoCaptureDeviceFactoryLinuxTest,
ReceiveFramesFromSinglePlaneFakeDevice) {
// Setup
diff --git a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
index a2378198222..23babdc6821 100644
--- a/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_decklink_mac.mm
@@ -412,6 +412,7 @@ void VideoCaptureDeviceDeckLinkMac::EnumerateDevices(
JoinDeviceNameAndFormat(device_model_name, format_name);
descriptor.capture_api = VideoCaptureApi::MACOSX_DECKLINK;
descriptor.transport_type = VideoCaptureTransportType::OTHER_TRANSPORT;
+ descriptor.set_pan_tilt_zoom_supported(false);
device_descriptors->push_back(descriptor);
DVLOG(1) << "Blackmagic camera enumerated: "
<< descriptor.display_name();
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
index 551ee4ef116..6482d1f1c8a 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac.mm
@@ -123,7 +123,9 @@ void VideoCaptureDeviceFactoryMac::GetDeviceDescriptors(
device_id, capture_api, device_transport_type);
VideoCaptureDeviceDescriptor descriptor(
[[[capture_devices valueForKey:key] deviceName] UTF8String], device_id,
- model_id, capture_api, device_transport_type);
+ model_id, capture_api, device_transport_type,
+ VideoFacingMode::MEDIA_VIDEO_FACING_NONE,
+ /*pan_tilt_zoom_supported=*/false);
if (IsDeviceBlacklisted(descriptor))
continue;
device_descriptors->push_back(descriptor);
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
index 8329266f5e7..8133c4ac671 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
@@ -46,4 +46,19 @@ TEST(VideoCaptureDeviceFactoryMacTest, ListDevicesAVFoundation) {
}));
}
+TEST(VideoCaptureDeviceFactoryMacTest, ListDevicesWithNoPanTiltZoomSupport) {
+ RunTestCase(base::BindOnce([]() {
+ VideoCaptureDeviceFactoryMac video_capture_device_factory;
+
+ VideoCaptureDeviceDescriptors descriptors;
+ video_capture_device_factory.GetDeviceDescriptors(&descriptors);
+ if (descriptors.empty()) {
+ DVLOG(1) << "No camera available. Exiting test.";
+ return;
+ }
+ for (const auto& descriptor : descriptors)
+ EXPECT_FALSE(descriptor.pan_tilt_zoom_supported().value());
+ }));
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index 2f0540bbd73..b7143dab16c 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -21,7 +21,6 @@
#include "base/callback.h"
#include "base/files/file.h"
-#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index 618be0e51ca..43a88b72155 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -527,10 +527,9 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
const VideoFrameMetadata& additional_metadata) {
DFAKE_SCOPED_RECURSIVE_LOCK(call_from_producer_);
- VideoFrameMetadata metadata;
- metadata.MergeMetadataFrom(&additional_metadata);
- metadata.SetDouble(VideoFrameMetadata::FRAME_RATE, format.frame_rate);
- metadata.SetTimeTicks(VideoFrameMetadata::REFERENCE_TIME, reference_time);
+ VideoFrameMetadata metadata = additional_metadata;
+ metadata.frame_rate = format.frame_rate;
+ metadata.reference_time = reference_time;
mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
info->timestamp = timestamp;
@@ -538,7 +537,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
info->color_space = color_space;
info->coded_size = format.frame_size;
info->visible_rect = visible_rect;
- info->metadata = metadata.GetInternalValues().Clone();
+ info->metadata = metadata;
buffer_pool_->HoldForConsumers(buffer.id, 1);
receiver_->OnFrameReadyInBuffer(
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index 3175ba31e69..9bd1d329af9 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -110,7 +110,8 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
kBufferDimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE, gpu::kNullSurfaceHandle);
+ gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle);
{
InSequence s;
const int expected_buffer_id = 0;
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.cc b/chromium/media/capture/video/video_capture_device_descriptor.cc
index 81ee65a45b8..8ece28fab36 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.cc
+++ b/chromium/media/capture/video/video_capture_device_descriptor.cc
@@ -25,12 +25,14 @@ VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
const std::string& display_name,
const std::string& device_id,
VideoCaptureApi capture_api,
- VideoCaptureTransportType transport_type)
+ VideoCaptureTransportType transport_type,
+ const base::Optional<bool>& pan_tilt_zoom_supported)
: device_id(device_id),
facing(VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
capture_api(capture_api),
transport_type(transport_type),
- display_name_(TrimDisplayName(display_name)) {}
+ display_name_(TrimDisplayName(display_name)),
+ pan_tilt_zoom_supported_(pan_tilt_zoom_supported) {}
VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
const std::string& display_name,
@@ -38,13 +40,15 @@ VideoCaptureDeviceDescriptor::VideoCaptureDeviceDescriptor(
const std::string& model_id,
VideoCaptureApi capture_api,
VideoCaptureTransportType transport_type,
- VideoFacingMode facing)
+ VideoFacingMode facing,
+ const base::Optional<bool>& pan_tilt_zoom_supported)
: device_id(device_id),
model_id(model_id),
facing(facing),
capture_api(capture_api),
transport_type(transport_type),
- display_name_(TrimDisplayName(display_name)) {}
+ display_name_(TrimDisplayName(display_name)),
+ pan_tilt_zoom_supported_(pan_tilt_zoom_supported) {}
VideoCaptureDeviceDescriptor::~VideoCaptureDeviceDescriptor() = default;
diff --git a/chromium/media/capture/video/video_capture_device_descriptor.h b/chromium/media/capture/video/video_capture_device_descriptor.h
index 1c3da3f99dc..b2c3eb1caf4 100644
--- a/chromium/media/capture/video/video_capture_device_descriptor.h
+++ b/chromium/media/capture/video/video_capture_device_descriptor.h
@@ -8,6 +8,7 @@
#include <string>
#include <vector>
+#include "base/optional.h"
#include "media/base/video_facing.h"
#include "media/capture/capture_export.h"
@@ -55,7 +56,8 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
const std::string& device_id,
VideoCaptureApi capture_api = VideoCaptureApi::UNKNOWN,
VideoCaptureTransportType transport_type =
- VideoCaptureTransportType::OTHER_TRANSPORT);
+ VideoCaptureTransportType::OTHER_TRANSPORT,
+ const base::Optional<bool>& pan_tilt_zoom_supported = base::nullopt);
VideoCaptureDeviceDescriptor(
const std::string& display_name,
const std::string& device_id,
@@ -63,7 +65,8 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
VideoCaptureApi capture_api,
VideoCaptureTransportType transport_type =
VideoCaptureTransportType::OTHER_TRANSPORT,
- VideoFacingMode facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE);
+ VideoFacingMode facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE,
+ const base::Optional<bool>& pan_tilt_zoom_supported = base::nullopt);
VideoCaptureDeviceDescriptor(const VideoCaptureDeviceDescriptor& other);
~VideoCaptureDeviceDescriptor();
@@ -83,6 +86,13 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
const std::string& display_name() const { return display_name_; }
void set_display_name(const std::string& name);
+ const base::Optional<bool>& pan_tilt_zoom_supported() const {
+ return pan_tilt_zoom_supported_;
+ }
+ void set_pan_tilt_zoom_supported(bool supported) {
+ pan_tilt_zoom_supported_ = supported;
+ }
+
std::string device_id;
// A unique hardware identifier of the capture device.
// It is of the form "[vid]:[pid]" when a USB device is detected, and empty
@@ -96,6 +106,7 @@ struct CAPTURE_EXPORT VideoCaptureDeviceDescriptor {
private:
std::string display_name_; // Name that is intended for display in the UI
+ base::Optional<bool> pan_tilt_zoom_supported_;
};
using VideoCaptureDeviceDescriptors = std::vector<VideoCaptureDeviceDescriptor>;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index 64474252226..a79f4d7761e 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -22,6 +22,7 @@
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
+#include "base/threading/scoped_thread_priority.h"
#include "base/win/core_winrt_util.h"
#include "base/win/scoped_co_mem.h"
#include "base/win/scoped_variant.h"
@@ -53,13 +54,13 @@ const size_t kVidPidSize = 4;
// Avoid enumerating and/or using certain devices due to they provoking crashes
// or any other reason (http://crbug.com/378494). This enum is defined for the
// purposes of UMA collection. Existing entries cannot be removed.
-enum BlacklistedCameraNames {
- BLACKLISTED_CAMERA_GOOGLE_CAMERA_ADAPTER = 0,
- BLACKLISTED_CAMERA_IP_CAMERA = 1,
- BLACKLISTED_CAMERA_CYBERLINK_WEBCAM_SPLITTER = 2,
- BLACKLISTED_CAMERA_EPOCCAM = 3,
+enum BlockedCameraNames {
+ BLOCKED_CAMERA_GOOGLE_CAMERA_ADAPTER = 0,
+ BLOCKED_CAMERA_IP_CAMERA = 1,
+ BLOCKED_CAMERA_CYBERLINK_WEBCAM_SPLITTER = 2,
+ BLOCKED_CAMERA_EPOCCAM = 3,
// This one must be last, and equal to the previous enumerated value.
- BLACKLISTED_CAMERA_MAX = BLACKLISTED_CAMERA_EPOCCAM,
+ BLOCKED_CAMERA_MAX = BLOCKED_CAMERA_EPOCCAM,
};
#define UWP_ENUM_ERROR_HANDLER(hr, err_log) \
@@ -67,21 +68,23 @@ enum BlacklistedCameraNames {
origin_task_runner_->PostTask(FROM_HERE, \
base::BindOnce(device_info_callback, nullptr))
-// Blacklisted devices are identified by a characteristic prefix of the name.
+// Blocked devices are identified by a characteristic prefix of the name.
// This prefix is used case-insensitively. This list must be kept in sync with
-// |BlacklistedCameraNames|.
-const char* const kBlacklistedCameraNames[] = {
+// |BlockedCameraNames|.
+const char* const kBlockedCameraNames[] = {
// Name of a fake DirectShow filter on computers with GTalk installed.
"Google Camera Adapter",
// The following software WebCams cause crashes.
- "IP Camera [JPEG/MJPEG]", "CyberLink Webcam Splitter", "EpocCam",
+ "IP Camera [JPEG/MJPEG]",
+ "CyberLink Webcam Splitter",
+ "EpocCam",
};
-static_assert(base::size(kBlacklistedCameraNames) == BLACKLISTED_CAMERA_MAX + 1,
- "kBlacklistedCameraNames should be same size as "
- "BlacklistedCameraNames enum");
+static_assert(base::size(kBlockedCameraNames) == BLOCKED_CAMERA_MAX + 1,
+ "kBlockedCameraNames should be same size as "
+ "BlockedCameraNames enum");
// Use this list only for USB webcams.
-const char* const kModelIdsBlacklistedForMediaFoundation[] = {
+const char* const kModelIdsBlockedForMediaFoundation[] = {
// Devices using Empia 2860 or 2820 chips, see https://crbug.com/849636.
"eb1a:2860", "eb1a:2820", "1ce6:2820",
// Elgato HD60 Pro
@@ -105,7 +108,7 @@ const char* const kModelIdsBlacklistedForMediaFoundation[] = {
"0bda:57f2"};
// Use this list only for non-USB webcams.
-const char* const kDisplayNamesBlacklistedForMediaFoundation[] = {
+const char* const kDisplayNamesBlockedForMediaFoundation[] = {
// VMware Virtual Webcams cause hangs when there is no physical Webcam.
// See https://crbug.com/1044974.
"VMware Virtual Webcam"};
@@ -128,20 +131,18 @@ GetMFAttributes() {
return *mf_attributes;
}
-bool IsDeviceBlacklistedForQueryingDetailedFrameRates(
+bool IsDeviceBlockedForQueryingDetailedFrameRates(
const std::string& display_name) {
return display_name.find("WebcamMax") != std::string::npos;
}
-bool IsDeviceBlacklistedForMediaFoundationByModelId(
- const std::string& model_id) {
- return base::Contains(kModelIdsBlacklistedForMediaFoundation, model_id);
+bool IsDeviceBlockedForMediaFoundationByModelId(const std::string& model_id) {
+ return base::Contains(kModelIdsBlockedForMediaFoundation, model_id);
}
-bool IsDeviceBlacklistedForMediaFoundationByDisplayName(
+bool IsDeviceBlockedForMediaFoundationByDisplayName(
const std::string& display_name) {
- return base::Contains(kDisplayNamesBlacklistedForMediaFoundation,
- display_name);
+ return base::Contains(kDisplayNamesBlockedForMediaFoundation, display_name);
}
bool LoadMediaFoundationDlls() {
@@ -150,6 +151,10 @@ bool LoadMediaFoundationDlls() {
L"%WINDIR%\\system32\\mfreadwrite.dll",
L"%WINDIR%\\system32\\MFCaptureEngine.dll"};
+ // Mitigate the issues caused by loading DLLs on a background thread
+ // (http://crbug/973868).
+ SCOPED_MAY_LOAD_LIBRARY_AT_BACKGROUND_PRIORITY_REPEATEDLY();
+
for (const wchar_t* kMfDLL : kMfDLLs) {
wchar_t path[MAX_PATH] = {0};
ExpandEnvironmentStringsW(kMfDLL, path, base::size(path));
@@ -199,15 +204,15 @@ bool CreateVideoCaptureDeviceMediaFoundation(const Descriptor& descriptor,
return SUCCEEDED(MFCreateDeviceSource(attributes.Get(), source));
}
-bool IsDeviceBlackListed(const std::string& name) {
- DCHECK_EQ(BLACKLISTED_CAMERA_MAX + 1,
- static_cast<int>(base::size(kBlacklistedCameraNames)));
- for (size_t i = 0; i < base::size(kBlacklistedCameraNames); ++i) {
- if (base::StartsWith(name, kBlacklistedCameraNames[i],
+bool IsDeviceBlocked(const std::string& name) {
+ DCHECK_EQ(BLOCKED_CAMERA_MAX + 1,
+ static_cast<int>(base::size(kBlockedCameraNames)));
+ for (size_t i = 0; i < base::size(kBlockedCameraNames); ++i) {
+ if (base::StartsWith(name, kBlockedCameraNames[i],
base::CompareCase::INSENSITIVE_ASCII)) {
- DVLOG(1) << "Enumerated blacklisted device: " << name;
+ DVLOG(1) << "Enumerated blocked device: " << name;
UMA_HISTOGRAM_ENUMERATION("Media.VideoCapture.BlacklistedDevice", i,
- BLACKLISTED_CAMERA_MAX + 1);
+ BLOCKED_CAMERA_MAX + 1);
return true;
}
}
@@ -235,6 +240,10 @@ std::string GetDeviceModelId(const std::string& device_id) {
}
HRESULT EnumerateDirectShowDevices(IEnumMoniker** enum_moniker) {
+ // Mitigate the issues caused by loading DLLs on a background thread
+ // (http://crbug/973868).
+ SCOPED_MAY_LOAD_LIBRARY_AT_BACKGROUND_PRIORITY();
+
ComPtr<ICreateDevEnum> dev_enum;
HRESULT hr = ::CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
IID_PPV_ARGS(&dev_enum));
@@ -272,8 +281,7 @@ void GetDeviceSupportedFormatsDirectShow(const Descriptor& descriptor,
DVLOG(1) << "GetDeviceSupportedFormatsDirectShow for "
<< descriptor.display_name();
bool query_detailed_frame_rates =
- !IsDeviceBlacklistedForQueryingDetailedFrameRates(
- descriptor.display_name());
+ !IsDeviceBlockedForQueryingDetailedFrameRates(descriptor.display_name());
CapabilityList capability_list;
VideoCaptureDeviceWin::GetDeviceCapabilityList(
descriptor.device_id, query_detailed_frame_rates, &capability_list);
@@ -662,9 +670,8 @@ void VideoCaptureDeviceFactoryWin::GetDeviceDescriptorsMediaFoundation(
const std::string model_id = GetDeviceModelId(device_id);
const std::string display_name =
base::SysWideToUTF8(std::wstring(name, name_size));
- if (IsDeviceBlacklistedForMediaFoundationByModelId(model_id) ||
- IsDeviceBlacklistedForMediaFoundationByDisplayName(
- display_name)) {
+ if (IsDeviceBlockedForMediaFoundationByModelId(model_id) ||
+ IsDeviceBlockedForMediaFoundationByDisplayName(display_name)) {
continue;
}
if (list_was_empty ||
@@ -763,7 +770,7 @@ void VideoCaptureDeviceFactoryWin::GetDeviceDescriptorsDirectShow(
continue;
const std::string device_name(base::SysWideToUTF8(V_BSTR(name.ptr())));
- if (IsDeviceBlackListed(device_name))
+ if (IsDeviceBlocked(device_name))
continue;
name.Reset();
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index c432b210efd..bcadd51789d 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -125,6 +125,76 @@ class MFPhotoCallback final
DISALLOW_COPY_AND_ASSIGN(MFPhotoCallback);
};
+// Locks the given buffer using the fastest supported method when constructed,
+// and automatically unlocks the buffer when destroyed.
+class ScopedBufferLock {
+ public:
+ ScopedBufferLock(ComPtr<IMFMediaBuffer> buffer) : buffer_(std::move(buffer)) {
+ if (FAILED(buffer_.As(&buffer_2d_))) {
+ LockSlow();
+ return;
+ }
+ // Try lock methods from fastest to slowest: Lock2DSize(), then Lock2D(),
+ // then finally LockSlow().
+ if ((Lock2DSize() || Lock2D()) && !UnlockedNoncontiguousBuffer())
+ return;
+ // Fall back to LockSlow() if 2D buffer was unsupported or noncontiguous.
+ buffer_2d_ = nullptr;
+ LockSlow();
+ }
+
+ // Unlocks |buffer_2d_| and returns true if |buffer_2d_| is non-contiguous or
+ // has negative pitch. If |buffer_2d_| is contiguous with positive pitch,
+ // i.e., the buffer format that the surrounding code expects, returns false.
+ bool UnlockedNoncontiguousBuffer() {
+ BOOL is_contiguous;
+ if (pitch_ > 0 &&
+ SUCCEEDED(buffer_2d_->IsContiguousFormat(&is_contiguous)) &&
+ is_contiguous &&
+ (length_ || SUCCEEDED(buffer_2d_->GetContiguousLength(&length_)))) {
+ return false;
+ }
+ buffer_2d_->Unlock2D();
+ return true;
+ }
+
+ bool Lock2DSize() {
+ ComPtr<IMF2DBuffer2> buffer_2d_2;
+ if (FAILED(buffer_.As(&buffer_2d_2)))
+ return false;
+ BYTE* data_start;
+ return SUCCEEDED(buffer_2d_2->Lock2DSize(MF2DBuffer_LockFlags_Read, &data_,
+ &pitch_, &data_start, &length_));
+ }
+
+ bool Lock2D() { return SUCCEEDED(buffer_2d_->Lock2D(&data_, &pitch_)); }
+
+ void LockSlow() {
+ DWORD max_length = 0;
+ buffer_->Lock(&data_, &max_length, &length_);
+ }
+
+ ~ScopedBufferLock() {
+ if (buffer_2d_)
+ buffer_2d_->Unlock2D();
+ else
+ buffer_->Unlock();
+ }
+
+ ScopedBufferLock(const ScopedBufferLock&) = delete;
+ ScopedBufferLock& operator=(const ScopedBufferLock&) = delete;
+
+ BYTE* data() const { return data_; }
+ DWORD length() const { return length_; }
+
+ private:
+ ComPtr<IMFMediaBuffer> buffer_;
+ ComPtr<IMF2DBuffer> buffer_2d_;
+ BYTE* data_ = nullptr;
+ DWORD length_ = 0;
+ LONG pitch_ = 0;
+};
+
scoped_refptr<IMFCaptureEngineOnSampleCallback> CreateMFPhotoCallback(
VideoCaptureDevice::TakePhotoCallback callback,
VideoCaptureFormat format) {
@@ -414,11 +484,19 @@ class MFVideoCallback final
}
IFACEMETHODIMP OnEvent(IMFMediaEvent* media_event) override {
+ base::AutoLock lock(lock_);
+ if (!observer_) {
+ return S_OK;
+ }
observer_->OnEvent(media_event);
return S_OK;
}
IFACEMETHODIMP OnSample(IMFSample* sample) override {
+ base::AutoLock lock(lock_);
+ if (!observer_) {
+ return S_OK;
+ }
if (!sample) {
observer_->OnFrameDropped(
VideoCaptureFrameDropReason::kWinMediaFoundationReceivedSampleIsNull);
@@ -438,60 +516,16 @@ class MFVideoCallback final
ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, &buffer);
if (buffer) {
- // Lock the buffer using the fastest method that it supports. The
- // Lock2DSize() method is faster than Lock2D(), which is faster than
- // Lock().
- DWORD length = 0;
- BYTE* data = nullptr;
- ComPtr<IMF2DBuffer> buffer_2d;
- if (SUCCEEDED(buffer.As(&buffer_2d))) {
- HRESULT lock_result;
- BYTE* scanline_0 = nullptr;
- LONG pitch = 0;
- ComPtr<IMF2DBuffer2> buffer_2d_2;
- if (SUCCEEDED(buffer.As(&buffer_2d_2))) {
- BYTE* data_start;
- lock_result =
- buffer_2d_2->Lock2DSize(MF2DBuffer_LockFlags_Read, &scanline_0,
- &pitch, &data_start, &length);
- } else {
- lock_result = buffer_2d->Lock2D(&scanline_0, &pitch);
- }
- if (SUCCEEDED(lock_result)) {
- // Use |buffer_2d| only if it is contiguous and has positive pitch.
- BOOL is_contiguous;
- if (pitch > 0 &&
- SUCCEEDED(buffer_2d->IsContiguousFormat(&is_contiguous)) &&
- is_contiguous &&
- (length ||
- SUCCEEDED(buffer_2d->GetContiguousLength(&length)))) {
- data = scanline_0;
- } else {
- buffer_2d->Unlock2D();
- }
- }
- }
- if (!data) {
- // If the faster methods fail, fall back to Lock to lock the buffer.
- buffer_2d = nullptr;
- DWORD max_length = 0;
- buffer->Lock(&data, &max_length, &length);
- }
-
- if (data) {
- observer_->OnIncomingCapturedData(data, length, reference_time,
- timestamp);
+ ScopedBufferLock locked_buffer(buffer);
+ if (locked_buffer.data()) {
+ observer_->OnIncomingCapturedData(locked_buffer.data(),
+ locked_buffer.length(),
+ reference_time, timestamp);
} else {
observer_->OnFrameDropped(
VideoCaptureFrameDropReason::
kWinMediaFoundationLockingBufferDelieveredNullptr);
}
-
- if (buffer_2d)
- buffer_2d->Unlock2D();
- else
- buffer->Unlock();
-
} else {
observer_->OnFrameDropped(
VideoCaptureFrameDropReason::
@@ -501,10 +535,18 @@ class MFVideoCallback final
return S_OK;
}
+ void Shutdown() {
+ base::AutoLock lock(lock_);
+ observer_ = nullptr;
+ }
+
private:
friend class base::RefCountedThreadSafe<MFVideoCallback>;
~MFVideoCallback() {}
- VideoCaptureDeviceMFWin* observer_;
+
+ // Protects access to |observer_|.
+ base::Lock lock_;
+ VideoCaptureDeviceMFWin* observer_ GUARDED_BY(lock_);
};
// static
@@ -657,7 +699,12 @@ VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
has_sent_on_started_to_client_(false),
exposure_mode_manual_(false),
focus_mode_manual_(false),
- white_balance_mode_manual_(false) {
+ white_balance_mode_manual_(false),
+ capture_initialize_(base::WaitableEvent::ResetPolicy::AUTOMATIC,
+ base::WaitableEvent::InitialState::NOT_SIGNALED),
+ // We never want to reset |capture_error_|.
+ capture_error_(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
@@ -673,6 +720,9 @@ VideoCaptureDeviceMFWin::~VideoCaptureDeviceMFWin() {
: false);
}
}
+ if (video_callback_) {
+ video_callback_->Shutdown();
+ }
}
bool VideoCaptureDeviceMFWin::Init() {
@@ -705,6 +755,13 @@ bool VideoCaptureDeviceMFWin::Init() {
LogError(FROM_HERE, hr);
return false;
}
+
+ hr = WaitOnCaptureEvent(MF_CAPTURE_ENGINE_INITIALIZED);
+ if (FAILED(hr)) {
+ LogError(FROM_HERE, hr);
+ return false;
+ }
+
is_initialized_ = true;
return true;
}
@@ -1294,7 +1351,21 @@ void VideoCaptureDeviceMFWin::OnEvent(IMFMediaEvent* media_event) {
base::AutoLock lock(lock_);
HRESULT hr;
+ GUID capture_event_guid = GUID_NULL;
+
media_event->GetStatus(&hr);
+ media_event->GetExtendedType(&capture_event_guid);
+ // TODO(http://crbug.com/1093521): Add cases for Start
+ // MF_CAPTURE_ENGINE_PREVIEW_STARTED and MF_CAPTURE_ENGINE_PREVIEW_STOPPED
+ // When MF_CAPTURE_ENGINE_ERROR is returned the captureengine object is no
+ // longer valid.
+ if (capture_event_guid == MF_CAPTURE_ENGINE_ERROR || FAILED(hr)) {
+ capture_error_.Signal();
+ // There should always be a valid error
+ hr = SUCCEEDED(hr) ? E_UNEXPECTED : hr;
+ } else if (capture_event_guid == MF_CAPTURE_ENGINE_INITIALIZED) {
+ capture_initialize_.Signal();
+ }
if (FAILED(hr))
OnError(VideoCaptureError::kWinMediaFoundationGetMediaEventStatusFailed,
@@ -1324,4 +1395,35 @@ void VideoCaptureDeviceMFWin::SendOnStartedIfNotYetSent() {
client_->OnStarted();
}
+HRESULT VideoCaptureDeviceMFWin::WaitOnCaptureEvent(GUID capture_event_guid) {
+ HRESULT hr = S_OK;
+ HANDLE events[] = {nullptr, capture_error_.handle()};
+
+ // TODO(http://crbug.com/1093521): Add cases for Start
+ // MF_CAPTURE_ENGINE_PREVIEW_STARTED and MF_CAPTURE_ENGINE_PREVIEW_STOPPED
+ if (capture_event_guid == MF_CAPTURE_ENGINE_INITIALIZED) {
+ events[0] = capture_initialize_.handle();
+ } else {
+ // no registered event handle for the event requested
+ hr = E_NOTIMPL;
+ LogError(FROM_HERE, hr);
+ return hr;
+ }
+
+ DWORD wait_result =
+ ::WaitForMultipleObjects(base::size(events), events, FALSE, INFINITE);
+ switch (wait_result) {
+ case WAIT_OBJECT_0:
+ break;
+ case WAIT_FAILED:
+ hr = HRESULT_FROM_WIN32(::GetLastError());
+ LogError(FROM_HERE, hr);
+ break;
+ default:
+ hr = E_UNEXPECTED;
+ LogError(FROM_HERE, hr);
+ break;
+ }
+ return hr;
+}
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.h b/chromium/media/capture/video/win/video_capture_device_mf_win.h
index e5ecd1250ba..a3d0e7db763 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.h
@@ -117,6 +117,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
const base::Location& from_here,
const char* message);
void SendOnStartedIfNotYetSent();
+ HRESULT WaitOnCaptureEvent(GUID capture_event_guid);
VideoFacingMode facing_mode_;
CreateMFPhotoCallbackCB create_mf_photo_callback_;
@@ -145,6 +146,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
bool focus_mode_manual_;
bool white_balance_mode_manual_;
base::queue<TakePhotoCallback> video_stream_take_photo_callbacks_;
+ base::WaitableEvent capture_initialize_;
+ base::WaitableEvent capture_error_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index e8575b891ef..5307405382d 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -8,6 +8,8 @@
#include <wincodec.h>
#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/test/task_environment.h"
#include "media/capture/video/win/sink_filter_win.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
@@ -413,11 +415,34 @@ class MockMFCaptureEngine
EXPECT_TRUE(pAttributes);
EXPECT_TRUE(pVideoSource);
event_callback = pEventCallback;
- OnCorrectInitialize();
+ OnCorrectInitializeQueued();
+
+ ON_CALL(*this, OnInitStatus).WillByDefault(Return(S_OK));
+ ON_CALL(*this, OnInitEventGuid)
+ .WillByDefault(Return(MF_CAPTURE_ENGINE_INITIALIZED));
+ // HW Cameras usually add about 500ms latency on init
+ ON_CALL(*this, InitEventDelay)
+ .WillByDefault(Return(base::TimeDelta::FromMilliseconds(500)));
+
+ base::TimeDelta event_delay = InitEventDelay();
+
+ base::ThreadPool::PostDelayedTask(
+ FROM_HERE,
+ base::BindOnce(&MockMFCaptureEngine::FireCaptureEvent, this,
+ OnInitEventGuid(), OnInitStatus()),
+ event_delay);
+ // if zero is passed ensure event fires before wait starts
+ if (event_delay == base::TimeDelta::FromMilliseconds(0)) {
+ base::PlatformThread::Sleep(base::TimeDelta::FromMilliseconds(200));
+ }
+
return S_OK;
}
- MOCK_METHOD0(OnCorrectInitialize, void(void));
+ MOCK_METHOD0(OnCorrectInitializeQueued, void(void));
+ MOCK_METHOD0(OnInitEventGuid, GUID(void));
+ MOCK_METHOD0(OnInitStatus, HRESULT(void));
+ MOCK_METHOD0(InitEventDelay, base::TimeDelta(void));
IFACEMETHODIMP StartPreview(void) override {
OnStartPreview();
@@ -456,8 +481,14 @@ class MockMFCaptureEngine
}
MOCK_METHOD0(DoGetSource, IMFCaptureSource*());
+ void FireCaptureEvent(GUID event, HRESULT hrStatus) {
+ ComPtr<IMFMediaEvent> captureEvent;
+ MFCreateMediaEvent(MEExtendedType, event, hrStatus, nullptr, &captureEvent);
+ if (event_callback) {
+ event_callback->OnEvent(captureEvent.Get());
+ }
+ }
scoped_refptr<IMFCaptureEngineOnEventCallback> event_callback;
-
private:
friend class base::RefCountedThreadSafe<MockMFCaptureEngine>;
virtual ~MockMFCaptureEngine() = default;
@@ -872,7 +903,7 @@ class VideoCaptureDeviceMFWinTest : public ::testing::Test {
device_->set_max_retry_count_for_testing(3);
device_->set_retry_delay_in_ms_for_testing(1);
- EXPECT_CALL(*(engine_.Get()), OnCorrectInitialize());
+ EXPECT_CALL(*(engine_.Get()), OnCorrectInitializeQueued());
EXPECT_TRUE(device_->Init());
EXPECT_CALL(*(engine_.Get()), DoGetSource())
.WillRepeatedly(Invoke([this]() {
@@ -1079,6 +1110,7 @@ class VideoCaptureDeviceMFWinTest : public ::testing::Test {
scoped_refptr<MockMFCaptureSource> capture_source_;
scoped_refptr<MockCapturePreviewSink> capture_preview_sink_;
+ base::test::TaskEnvironment task_environment_;
private:
const bool media_foundation_supported_;
@@ -1118,6 +1150,91 @@ TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnErrorMediaEvent) {
engine_->event_callback->OnEvent(media_event_error.get());
}
+// Expects Init to fail due to OnError() event
+TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnErrorDurringInit) {
+ if (ShouldSkipTest())
+ return;
+
+ VideoCaptureDeviceDescriptor descriptor = VideoCaptureDeviceDescriptor();
+ Microsoft::WRL::ComPtr<MockMFMediaSource> media_source =
+ new MockMFMediaSource();
+ Microsoft::WRL::ComPtr<MockMFCaptureEngine> engine =
+ new MockMFCaptureEngine();
+ std::unique_ptr<VideoCaptureDeviceMFWin> device =
+ std::make_unique<VideoCaptureDeviceMFWin>(descriptor, media_source,
+ engine);
+
+ EXPECT_CALL(*(engine.Get()), OnInitEventGuid).WillOnce([]() {
+ return MF_CAPTURE_ENGINE_INITIALIZED;
+ });
+ // E_ACCESSDENIED is thrown if application is denied access in settings UI
+ EXPECT_CALL(*(engine.Get()), OnInitStatus).WillOnce([]() {
+ return E_ACCESSDENIED;
+ });
+
+ EXPECT_CALL(*(engine.Get()), OnCorrectInitializeQueued());
+
+ EXPECT_FALSE(device->Init());
+}
+
+// Expects Init to succeed but MF_CAPTURE_ENGINE_INITIALIZED fired before
+// WaitOnCaptureEvent is called.
+TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnFireCaptureEngineInitEarly) {
+ if (ShouldSkipTest())
+ return;
+
+ VideoCaptureDeviceDescriptor descriptor = VideoCaptureDeviceDescriptor();
+ Microsoft::WRL::ComPtr<MockMFMediaSource> media_source =
+ new MockMFMediaSource();
+ Microsoft::WRL::ComPtr<MockMFCaptureEngine> engine =
+ new MockMFCaptureEngine();
+ std::unique_ptr<VideoCaptureDeviceMFWin> device =
+ std::make_unique<VideoCaptureDeviceMFWin>(descriptor, media_source,
+ engine);
+
+ EXPECT_CALL(*(engine.Get()), OnInitEventGuid).WillOnce([]() {
+ return MF_CAPTURE_ENGINE_INITIALIZED;
+ });
+ EXPECT_CALL(*(engine.Get()), InitEventDelay).WillOnce([]() {
+ return base::TimeDelta::FromMilliseconds(0);
+ });
+
+ EXPECT_CALL(*(engine.Get()), OnCorrectInitializeQueued());
+
+ EXPECT_TRUE(device->Init());
+}
+
+// Send MFVideoCallback::OnEvent when VideoCaptureDeviceMFWin has been destroyed
+TEST_F(VideoCaptureDeviceMFWinTest,
+ SendMFVideoCallbackAfterVideoCaptureDeviceMFWinDestructor) {
+ if (ShouldSkipTest())
+ return;
+
+ VideoCaptureDeviceDescriptor descriptor = VideoCaptureDeviceDescriptor();
+ Microsoft::WRL::ComPtr<MockMFMediaSource> media_source =
+ new MockMFMediaSource();
+ Microsoft::WRL::ComPtr<MockMFCaptureEngine> engine =
+ new MockMFCaptureEngine();
+ std::unique_ptr<VideoCaptureDeviceMFWin> device =
+ std::make_unique<VideoCaptureDeviceMFWin>(descriptor, media_source,
+ engine);
+
+ EXPECT_CALL(*(engine.Get()), OnInitEventGuid).WillOnce([]() {
+ return MF_CAPTURE_ENGINE_INITIALIZED;
+ });
+
+ EXPECT_CALL(*(engine.Get()), OnCorrectInitializeQueued());
+
+ EXPECT_TRUE(device->Init());
+
+ // Force ~VideoCaptureDeviceMFWin() which will invalidate
+ // MFVideoCallback::observer_
+ device.reset();
+ // Send event to MFVideoCallback::OnEvent
+ engine->FireCaptureEvent(MF_CAPTURE_ENGINE_ERROR,
+ MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED);
+}
+
// Allocates device with flaky methods failing with MF_E_INVALIDREQUEST and
// expects the device to retry and start correctly
TEST_F(VideoCaptureDeviceMFWinTest, AllocateAndStartWithFlakyInvalidRequest) {
diff --git a/chromium/media/cast/common/expanded_value_base.h b/chromium/media/cast/common/expanded_value_base.h
index f890bc350a5..8021a4fb365 100644
--- a/chromium/media/cast/common/expanded_value_base.h
+++ b/chromium/media/cast/common/expanded_value_base.h
@@ -9,7 +9,7 @@
#include <limits>
-#include "base/logging.h"
+#include "base/check_op.h"
namespace media {
namespace cast {
diff --git a/chromium/media/cast/common/mod_util.h b/chromium/media/cast/common/mod_util.h
index 8ca42af0e6e..fc68c63f26f 100644
--- a/chromium/media/cast/common/mod_util.h
+++ b/chromium/media/cast/common/mod_util.h
@@ -6,7 +6,7 @@
#define MEDIA_CAST_COMMON_MOD_UTIL_H_
#include <map>
-#include "base/logging.h"
+#include "base/check.h"
namespace media {
namespace cast {
diff --git a/chromium/media/cast/common/transport_encryption_handler.cc b/chromium/media/cast/common/transport_encryption_handler.cc
index 3ee8cab1aba..d3b754e85f2 100644
--- a/chromium/media/cast/common/transport_encryption_handler.cc
+++ b/chromium/media/cast/common/transport_encryption_handler.cc
@@ -7,6 +7,7 @@
#include <stddef.h>
#include "base/logging.h"
+#include "base/notreached.h"
#include "crypto/encryptor.h"
#include "crypto/symmetric_key.h"
diff --git a/chromium/media/cast/net/pacing/paced_sender.cc b/chromium/media/cast/net/pacing/paced_sender.cc
index 3e1e8232b42..62317fc0267 100644
--- a/chromium/media/cast/net/pacing/paced_sender.cc
+++ b/chromium/media/cast/net/pacing/paced_sender.cc
@@ -6,6 +6,7 @@
#include "base/big_endian.h"
#include "base/bind.h"
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
namespace media {
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc b/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
index aec48b7841c..3544cbd8023 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_session.cc
@@ -2,9 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include "media/cast/net/rtcp/receiver_rtcp_session.h"
+
#include "base/big_endian.h"
+#include "base/logging.h"
#include "base/time/tick_clock.h"
-#include "media/cast/net/rtcp/receiver_rtcp_session.h"
#include "media/cast/net/rtcp/rtcp_builder.h"
#include "media/cast/net/rtcp/rtcp_defines.h"
#include "media/cast/net/rtcp/rtcp_utility.h"
diff --git a/chromium/media/cast/net/rtcp/rtcp_builder.cc b/chromium/media/cast/net/rtcp/rtcp_builder.cc
index aa296ba72ce..a7fb6509c23 100644
--- a/chromium/media/cast/net/rtcp/rtcp_builder.cc
+++ b/chromium/media/cast/net/rtcp/rtcp_builder.cc
@@ -114,7 +114,7 @@ class NackStringBuilder {
} // namespace
RtcpBuilder::RtcpBuilder(uint32_t sending_ssrc)
- : writer_(NULL, 0), local_ssrc_(sending_ssrc), ptr_of_length_(NULL) {}
+ : writer_(nullptr, 0), local_ssrc_(sending_ssrc), ptr_of_length_(nullptr) {}
RtcpBuilder::~RtcpBuilder() = default;
@@ -127,7 +127,7 @@ void RtcpBuilder::PatchLengthField() {
<< "Packets must be a multiple of 32 bits long";
*ptr_of_length_ = this_packet_length >> 10;
*(ptr_of_length_ + 1) = (this_packet_length >> 2) & 0xFF;
- ptr_of_length_ = NULL;
+ ptr_of_length_ = nullptr;
}
}
@@ -154,7 +154,7 @@ void RtcpBuilder::Start() {
PacketRef RtcpBuilder::Finish() {
PatchLengthField();
packet_->data.resize(kMaxIpPacketSize - writer_.remaining());
- writer_ = base::BigEndianWriter(NULL, 0);
+ writer_ = base::BigEndianWriter(nullptr, 0);
PacketRef ret = packet_;
packet_.reset();
return ret;
diff --git a/chromium/media/cast/net/rtcp/sender_rtcp_session.cc b/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
index 2b2908142f6..dba167915ed 100644
--- a/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
+++ b/chromium/media/cast/net/rtcp/sender_rtcp_session.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/big_endian.h"
+#include "base/logging.h"
#include "base/time/time.h"
#include "media/cast/constants.h"
#include "media/cast/net/pacing/paced_sender.h"
diff --git a/chromium/media/cast/net/rtcp/test_rtcp_packet_builder.cc b/chromium/media/cast/net/rtcp/test_rtcp_packet_builder.cc
index 8c00d8e9b15..626ed742888 100644
--- a/chromium/media/cast/net/rtcp/test_rtcp_packet_builder.cc
+++ b/chromium/media/cast/net/rtcp/test_rtcp_packet_builder.cc
@@ -11,9 +11,9 @@ namespace media {
namespace cast {
TestRtcpPacketBuilder::TestRtcpPacketBuilder()
- : ptr_of_length_(NULL),
+ : ptr_of_length_(nullptr),
big_endian_writer_(reinterpret_cast<char*>(buffer_), kMaxIpPacketSize),
- big_endian_reader_(NULL, 0) {}
+ big_endian_reader_(nullptr, 0) {}
void TestRtcpPacketBuilder::AddSr(uint32_t remote_ssrc,
int number_of_report_blocks) {
@@ -255,7 +255,7 @@ void TestRtcpPacketBuilder::PatchLengthField() {
<< "Packets must be a multiple of 32 bits long";
*ptr_of_length_ = this_packet_length >> 10;
*(ptr_of_length_ + 1) = (this_packet_length >> 2) & 0xFF;
- ptr_of_length_ = NULL;
+ ptr_of_length_ = nullptr;
}
}
diff --git a/chromium/media/cast/net/rtp/cast_message_builder.cc b/chromium/media/cast/net/rtp/cast_message_builder.cc
index 1376061153c..61a152b53a1 100644
--- a/chromium/media/cast/net/rtp/cast_message_builder.cc
+++ b/chromium/media/cast/net/rtp/cast_message_builder.cc
@@ -4,6 +4,7 @@
#include "media/cast/net/rtp/cast_message_builder.h"
+#include "base/logging.h"
#include "media/cast/constants.h"
#include "media/cast/net/rtp/framer.h"
diff --git a/chromium/media/cast/net/udp_packet_pipe_unittest.cc b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
index b5a209a4030..d02b4655ba7 100644
--- a/chromium/media/cast/net/udp_packet_pipe_unittest.cc
+++ b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
@@ -4,6 +4,7 @@
#include "media/cast/net/udp_packet_pipe.h"
+#include <cstring>
#include <memory>
#include <string>
diff --git a/chromium/media/cast/sender/audio_encoder.cc b/chromium/media/cast/sender/audio_encoder.cc
index 43797c6bf59..23110128b33 100644
--- a/chromium/media/cast/sender/audio_encoder.cc
+++ b/chromium/media/cast/sender/audio_encoder.cc
@@ -14,6 +14,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/location.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/stl_util.h"
#include "base/sys_byteorder.h"
diff --git a/chromium/media/cast/sender/audio_encoder_unittest.cc b/chromium/media/cast/sender/audio_encoder_unittest.cc
index 557642b7456..9531c2c1590 100644
--- a/chromium/media/cast/sender/audio_encoder_unittest.cc
+++ b/chromium/media/cast/sender/audio_encoder_unittest.cc
@@ -13,6 +13,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/logging.h"
#include "base/stl_util.h"
#include "build/build_config.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/cast/sender/audio_sender_unittest.cc b/chromium/media/cast/sender/audio_sender_unittest.cc
index 10551754218..54ca3921278 100644
--- a/chromium/media/cast/sender/audio_sender_unittest.cc
+++ b/chromium/media/cast/sender/audio_sender_unittest.cc
@@ -11,6 +11,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/test/simple_test_tick_clock.h"
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index fd8fe6443a1..23e02f0e302 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -325,10 +325,10 @@ class ExternalVideoEncoder::VEAClientImpl
// If FRAME_DURATION metadata was provided in the source VideoFrame,
// compute the utilization metrics.
- base::TimeDelta frame_duration;
- if (request.video_frame->metadata()->GetTimeDelta(
- media::VideoFrameMetadata::FRAME_DURATION, &frame_duration) &&
- frame_duration > base::TimeDelta()) {
+ base::TimeDelta frame_duration =
+ request.video_frame->metadata()->frame_duration.value_or(
+ base::TimeDelta());
+ if (frame_duration > base::TimeDelta()) {
// Compute encoder utilization in terms of the number of frames in
// backlog, including the current frame encode that is finishing
// here. This "backlog" model works as follows: First, assume that all
diff --git a/chromium/media/cast/sender/frame_sender.cc b/chromium/media/cast/sender/frame_sender.cc
index b8328e80995..775688a85de 100644
--- a/chromium/media/cast/sender/frame_sender.cc
+++ b/chromium/media/cast/sender/frame_sender.cc
@@ -11,6 +11,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/numerics/safe_conversions.h"
#include "base/trace_event/trace_event.h"
diff --git a/chromium/media/cast/sender/h264_vt_encoder.cc b/chromium/media/cast/sender/h264_vt_encoder.cc
index 9e7a3698fd3..ff4852a20d0 100644
--- a/chromium/media/cast/sender/h264_vt_encoder.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder.cc
@@ -181,22 +181,17 @@ H264VideoToolboxEncoder::H264VideoToolboxEncoder(
weak_factory_.GetWeakPtr(), cast_environment_));
// Register for power state changes.
- if (base::PowerMonitor::AddObserver(this)) {
- VLOG(1) << "Registered for power state changes.";
- } else {
- DLOG(WARNING) << "No power monitor. Process suspension will invalidate "
- "the encoder.";
- }
+ base::PowerMonitor::AddObserver(this);
+ VLOG(1) << "Registered for power state changes.";
}
}
H264VideoToolboxEncoder::~H264VideoToolboxEncoder() {
DestroyCompressionSession();
- // If video_frame_factory_ is not null, the encoder registered for power state
- // changes in the ctor and it must now unregister.
- if (video_frame_factory_)
- base::PowerMonitor::RemoveObserver(this);
+ // Unregister the power observer. It is valid to remove an observer that was
+ // not added.
+ base::PowerMonitor::RemoveObserver(this);
}
void H264VideoToolboxEncoder::ResetCompressionSession() {
diff --git a/chromium/media/cast/sender/performance_metrics_overlay.cc b/chromium/media/cast/sender/performance_metrics_overlay.cc
index d2ebaa1467c..49328234f9c 100644
--- a/chromium/media/cast/sender/performance_metrics_overlay.cc
+++ b/chromium/media/cast/sender/performance_metrics_overlay.cc
@@ -275,19 +275,16 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
frame->AddDestructionObserver(base::BindOnce(
[](const VideoFrameMetadata* sent_frame_metadata,
scoped_refptr<VideoFrame> source_frame) {
- source_frame->metadata()->Clear();
- source_frame->metadata()->MergeMetadataFrom(sent_frame_metadata);
+ source_frame->set_metadata(*sent_frame_metadata);
},
frame->metadata(), std::move(source)));
// Line 3: Frame duration, resolution, and timestamp.
int frame_duration_ms = 0;
int frame_duration_ms_frac = 0;
- base::TimeDelta frame_duration;
- if (frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- &frame_duration)) {
+ if (frame->metadata()->frame_duration.has_value()) {
const int decimilliseconds = base::saturated_cast<int>(
- frame_duration.InMicroseconds() / 100.0 + 0.5);
+ frame->metadata()->frame_duration->InMicroseconds() / 100.0 + 0.5);
frame_duration_ms = decimilliseconds / 10;
frame_duration_ms_frac = decimilliseconds % 10;
}
@@ -312,13 +309,13 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
// Line 2: Capture duration, target playout delay, low-latency mode, and
// target bitrate.
int capture_duration_ms = 0;
- base::TimeTicks capture_begin_time, capture_end_time;
- if (frame->metadata()->GetTimeTicks(VideoFrameMetadata::CAPTURE_BEGIN_TIME,
- &capture_begin_time) &&
- frame->metadata()->GetTimeTicks(VideoFrameMetadata::CAPTURE_END_TIME,
- &capture_end_time)) {
- capture_duration_ms = base::saturated_cast<int>(
- (capture_end_time - capture_begin_time).InMillisecondsF() + 0.5);
+ if (frame->metadata()->capture_begin_time &&
+ frame->metadata()->capture_end_time) {
+ capture_duration_ms =
+ base::saturated_cast<int>((*frame->metadata()->capture_end_time -
+ *frame->metadata()->capture_begin_time)
+ .InMillisecondsF() +
+ 0.5);
}
const int target_playout_delay_ms =
static_cast<int>(target_playout_delay.InMillisecondsF() + 0.5);
diff --git a/chromium/media/cast/sender/size_adaptable_video_encoder_base.cc b/chromium/media/cast/sender/size_adaptable_video_encoder_base.cc
index 9a5fa1e0cb5..03b76b76ecd 100644
--- a/chromium/media/cast/sender/size_adaptable_video_encoder_base.cc
+++ b/chromium/media/cast/sender/size_adaptable_video_encoder_base.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/location.h"
+#include "base/logging.h"
#include "media/base/video_frame.h"
namespace media {
diff --git a/chromium/media/cast/sender/video_encoder_unittest.cc b/chromium/media/cast/sender/video_encoder_unittest.cc
index a0ba87508f0..b729a8f9b92 100644
--- a/chromium/media/cast/sender/video_encoder_unittest.cc
+++ b/chromium/media/cast/sender/video_encoder_unittest.cc
@@ -12,6 +12,7 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
diff --git a/chromium/media/cast/sender/video_sender.cc b/chromium/media/cast/sender/video_sender.cc
index dc3798be28a..18d28106c2c 100644
--- a/chromium/media/cast/sender/video_sender.cc
+++ b/chromium/media/cast/sender/video_sender.cc
@@ -63,12 +63,12 @@ void LogVideoCaptureTimestamps(CastEnvironment* cast_environment,
capture_end_event->width = video_frame.visible_rect().width();
capture_end_event->height = video_frame.visible_rect().height();
- if (!video_frame.metadata()->GetTimeTicks(
- media::VideoFrameMetadata::CAPTURE_BEGIN_TIME,
- &capture_begin_event->timestamp) ||
- !video_frame.metadata()->GetTimeTicks(
- media::VideoFrameMetadata::CAPTURE_END_TIME,
- &capture_end_event->timestamp)) {
+ if (video_frame.metadata()->capture_begin_time.has_value() &&
+ video_frame.metadata()->capture_end_time.has_value()) {
+ capture_begin_event->timestamp =
+ *video_frame.metadata()->capture_begin_time;
+ capture_end_event->timestamp = *video_frame.metadata()->capture_end_time;
+ } else {
// The frame capture timestamps were not provided by the video capture
// source. Simply log the events as happening right now.
capture_begin_event->timestamp = capture_end_event->timestamp =
@@ -147,14 +147,13 @@ void VideoSender::InsertRawVideoFrame(
(reference_time - base::TimeTicks()).InMicroseconds(),
"rtp_timestamp", rtp_timestamp.lower_32_bits());
- bool low_latency_mode;
- if (video_frame->metadata()->GetBoolean(
- VideoFrameMetadata::INTERACTIVE_CONTENT, &low_latency_mode)) {
- if (low_latency_mode && !low_latency_mode_) {
+ {
+ bool new_low_latency_mode = video_frame->metadata()->interactive_content;
+ if (new_low_latency_mode && !low_latency_mode_) {
VLOG(1) << "Interactive mode playout time " << min_playout_delay_;
playout_delay_change_cb_.Run(min_playout_delay_);
}
- low_latency_mode_ = low_latency_mode;
+ low_latency_mode_ = new_low_latency_mode;
}
// Drop the frame if either its RTP or reference timestamp is not an increase
@@ -327,10 +326,10 @@ void VideoSender::OnEncodedVideoFrame(
// Key frames are artificially capped to 1.0 because their actual
// utilization is atypical compared to the other frames in the stream, and
// this can misguide the producer of the input video frames.
- video_frame->metadata()->SetDouble(
- media::VideoFrameMetadata::RESOURCE_UTILIZATION,
- encoded_frame->dependency == EncodedFrame::KEY ?
- std::min(1.0, attenuated_utilization) : attenuated_utilization);
+ video_frame->metadata()->resource_utilization =
+ encoded_frame->dependency == EncodedFrame::KEY
+ ? std::min(1.0, attenuated_utilization)
+ : attenuated_utilization;
}
SendEncodedFrame(encoder_bitrate, std::move(encoded_frame));
diff --git a/chromium/media/cast/sender/video_sender_unittest.cc b/chromium/media/cast/sender/video_sender_unittest.cc
index e97396f68d0..b783fb03f33 100644
--- a/chromium/media/cast/sender/video_sender_unittest.cc
+++ b/chromium/media/cast/sender/video_sender_unittest.cc
@@ -11,6 +11,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/test/simple_test_tick_clock.h"
@@ -572,23 +573,20 @@ TEST_F(VideoSenderTest, PopulatesResourceUtilizationInFrameMetadata) {
for (int i = 0; i < 3; ++i) {
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
- ASSERT_FALSE(video_frame->metadata()->HasKey(
- media::VideoFrameMetadata::RESOURCE_UTILIZATION));
+ ASSERT_FALSE(video_frame->metadata()->resource_utilization.has_value());
const base::TimeTicks reference_time = testing_clock_.NowTicks();
video_sender_->InsertRawVideoFrame(video_frame, reference_time);
// Run encode tasks. VideoSender::OnEncodedVideoFrame() will be called once
// encoding of the frame is complete, and this is when the
- // RESOURCE_UTILIZATION metadata is populated.
+ // resource_utilization metadata is populated.
RunTasks(33);
- // Check that the RESOURCE_UTILIZATION value is set and non-negative. Don't
+ // Check that the resource_utilization value is set and non-negative. Don't
// check for specific values because they are dependent on real-world CPU
// encode time, which can vary across test runs.
- double utilization = -1.0;
- EXPECT_TRUE(video_frame->metadata()->GetDouble(
- media::VideoFrameMetadata::RESOURCE_UTILIZATION, &utilization));
+ double utilization = *video_frame->metadata()->resource_utilization;
EXPECT_LE(0.0, utilization);
if (i == 0)
EXPECT_GE(1.0, utilization); // Key frames never exceed 1.0.
diff --git a/chromium/media/cast/sender/vp8_encoder.cc b/chromium/media/cast/sender/vp8_encoder.cc
index 8d7794ffa56..2ef3a9d9247 100644
--- a/chromium/media/cast/sender/vp8_encoder.cc
+++ b/chromium/media/cast/sender/vp8_encoder.cc
@@ -231,11 +231,9 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
const base::TimeDelta maximum_frame_duration =
base::TimeDelta::FromSecondsD(static_cast<double>(kRestartFramePeriods) /
cast_config_.max_frame_rate);
- base::TimeDelta predicted_frame_duration;
- if (!video_frame->metadata()->GetTimeDelta(
- media::VideoFrameMetadata::FRAME_DURATION,
- &predicted_frame_duration) ||
- predicted_frame_duration <= base::TimeDelta()) {
+ base::TimeDelta predicted_frame_duration =
+ video_frame->metadata()->frame_duration.value_or(base::TimeDelta());
+ if (predicted_frame_duration <= base::TimeDelta()) {
// The source of the video frame did not provide the frame duration. Use
// the actual amount of time between the current and previous frame as a
// prediction for the next frame's duration.
diff --git a/chromium/media/cdm/aes_decryptor.cc b/chromium/media/cdm/aes_decryptor.cc
index 9b49a33dc2b..c8247812ef7 100644
--- a/chromium/media/cdm/aes_decryptor.cc
+++ b/chromium/media/cdm/aes_decryptor.cc
@@ -488,7 +488,8 @@ void AesDecryptor::RegisterNewKeyCB(StreamType stream_type,
void AesDecryptor::Decrypt(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
DecryptCB decrypt_cb) {
- DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
+ DVLOG(3) << __func__ << ": "
+ << encrypted->AsHumanReadableString(/*verbose=*/true);
if (!encrypted->decrypt_config()) {
// If there is no DecryptConfig, then the data is unencrypted so return it
diff --git a/chromium/media/cdm/aes_decryptor_unittest.cc b/chromium/media/cdm/aes_decryptor_unittest.cc
index 0a812f3c310..df16bd852cd 100644
--- a/chromium/media/cdm/aes_decryptor_unittest.cc
+++ b/chromium/media/cdm/aes_decryptor_unittest.cc
@@ -284,7 +284,6 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
std::unique_ptr<CdmAuxiliaryHelper> cdm_helper(
new MockCdmAuxiliaryHelper(std::move(allocator)));
CdmAdapter::Create(helper_->KeySystemName(),
- url::Origin::Create(GURL("http://foo.com")),
cdm_config, create_cdm_func, std::move(cdm_helper),
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
diff --git a/chromium/media/cdm/cbcs_decryptor_fuzzer.cc b/chromium/media/cdm/cbcs_decryptor_fuzzer.cc
index 8b3bdad5ebe..c0a328594dc 100644
--- a/chromium/media/cdm/cbcs_decryptor_fuzzer.cc
+++ b/chromium/media/cdm/cbcs_decryptor_fuzzer.cc
@@ -9,6 +9,7 @@
#include <string>
#include <vector>
+#include "base/logging.h"
#include "crypto/symmetric_key.h"
#include "media/base/decoder_buffer.h"
#include "media/base/encryption_pattern.h"
diff --git a/chromium/media/cdm/cdm_adapter.cc b/chromium/media/cdm/cdm_adapter.cc
index caf9974ec57..bd619e71dc1 100644
--- a/chromium/media/cdm/cdm_adapter.cc
+++ b/chromium/media/cdm/cdm_adapter.cc
@@ -171,7 +171,6 @@ using crash_reporter::ScopedCrashKeyString;
// static
void CdmAdapter::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
@@ -187,8 +186,8 @@ void CdmAdapter::Create(
DCHECK(session_expiration_update_cb);
scoped_refptr<CdmAdapter> cdm =
- new CdmAdapter(key_system, security_origin, cdm_config, create_cdm_func,
- std::move(helper), session_message_cb, session_closed_cb,
+ new CdmAdapter(key_system, cdm_config, create_cdm_func, std::move(helper),
+ session_message_cb, session_closed_cb,
session_keys_change_cb, session_expiration_update_cb);
// |cdm| ownership passed to the promise.
@@ -198,7 +197,6 @@ void CdmAdapter::Create(
CdmAdapter::CdmAdapter(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
@@ -207,7 +205,6 @@ CdmAdapter::CdmAdapter(
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb)
: key_system_(key_system),
- origin_string_(security_origin.Serialize()),
cdm_config_(cdm_config),
create_cdm_func_(create_cdm_func),
helper_(std::move(helper)),
@@ -215,6 +212,7 @@ CdmAdapter::CdmAdapter(
session_closed_cb_(session_closed_cb),
session_keys_change_cb_(session_keys_change_cb),
session_expiration_update_cb_(session_expiration_update_cb),
+ cdm_origin_(helper_->GetCdmOrigin().Serialize()),
task_runner_(base::ThreadTaskRunnerHandle::Get()),
pool_(new AudioBufferMemoryPool()) {
DVLOG(1) << __func__;
@@ -440,10 +438,11 @@ void CdmAdapter::RegisterNewKeyCB(StreamType stream_type,
void CdmAdapter::Decrypt(StreamType stream_type,
scoped_refptr<DecoderBuffer> encrypted,
DecryptCB decrypt_cb) {
- DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
+ DVLOG(3) << __func__ << ": "
+ << encrypted->AsHumanReadableString(/*verbose=*/true);
DCHECK(task_runner_->BelongsToCurrentThread());
- ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, origin_string_);
+ ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, cdm_origin_);
cdm::InputBuffer_2 input_buffer = {};
std::vector<cdm::SubsampleEntry> subsamples;
@@ -559,10 +558,11 @@ void CdmAdapter::InitializeVideoDecoder(const VideoDecoderConfig& config,
void CdmAdapter::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
const AudioDecodeCB& audio_decode_cb) {
- DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
+ DVLOG(3) << __func__ << ": "
+ << encrypted->AsHumanReadableString(/*verbose=*/true);
DCHECK(task_runner_->BelongsToCurrentThread());
- ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, origin_string_);
+ ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, cdm_origin_);
cdm::InputBuffer_2 input_buffer = {};
std::vector<cdm::SubsampleEntry> subsamples;
@@ -597,10 +597,11 @@ void CdmAdapter::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
const VideoDecodeCB& video_decode_cb) {
- DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
+ DVLOG(3) << __func__ << ": "
+ << encrypted->AsHumanReadableString(/*verbose=*/true);
DCHECK(task_runner_->BelongsToCurrentThread());
- ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, origin_string_);
+ ScopedCrashKeyString scoped_crash_key(&g_origin_crash_key, cdm_origin_);
cdm::InputBuffer_2 input_buffer = {};
std::vector<cdm::SubsampleEntry> subsamples;
@@ -633,10 +634,7 @@ void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
return;
}
- if (is_video_encrypted_) {
- decoded_frame->metadata()->SetBoolean(VideoFrameMetadata::PROTECTED_VIDEO,
- true);
- }
+ decoded_frame->metadata()->protected_video = is_video_encrypted_;
video_decode_cb.Run(Decryptor::kSuccess, decoded_frame);
}
diff --git a/chromium/media/cdm/cdm_adapter.h b/chromium/media/cdm/cdm_adapter.h
index 2b87592bb62..199c1b51d67 100644
--- a/chromium/media/cdm/cdm_adapter.h
+++ b/chromium/media/cdm/cdm_adapter.h
@@ -55,7 +55,6 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
// |cdm_created_cb| will be called when the CDM is initialized.
static void Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
@@ -157,7 +156,6 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
private:
CdmAdapter(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
CreateCdmFunc create_cdm_func,
std::unique_ptr<CdmAuxiliaryHelper> helper,
@@ -209,7 +207,6 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
void OnFileRead(int file_size_bytes);
const std::string key_system_;
- const std::string origin_string_;
const CdmConfig cdm_config_;
CreateCdmFunc create_cdm_func_;
@@ -223,6 +220,9 @@ class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
SessionKeysChangeCB session_keys_change_cb_;
SessionExpirationUpdateCB session_expiration_update_cb_;
+ // CDM origin used in crash reporting.
+ const std::string cdm_origin_;
+
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
scoped_refptr<AudioBufferMemoryPool> pool_;
diff --git a/chromium/media/cdm/cdm_adapter_factory.cc b/chromium/media/cdm/cdm_adapter_factory.cc
index 358988cc632..7af6fbd35bd 100644
--- a/chromium/media/cdm/cdm_adapter_factory.cc
+++ b/chromium/media/cdm/cdm_adapter_factory.cc
@@ -22,7 +22,6 @@ CdmAdapterFactory::~CdmAdapterFactory() = default;
void CdmAdapterFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -31,14 +30,6 @@ void CdmAdapterFactory::Create(
CdmCreatedCB cdm_created_cb) {
DVLOG(1) << __func__ << ": key_system=" << key_system;
- if (security_origin.opaque()) {
- LOG(ERROR) << "Invalid Origin: " << security_origin;
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE,
- base::BindOnce(std::move(cdm_created_cb), nullptr, "Invalid origin."));
- return;
- }
-
CdmAdapter::CreateCdmFunc create_cdm_func =
CdmModule::GetInstance()->GetCreateCdmFunc();
if (!create_cdm_func) {
@@ -56,7 +47,7 @@ void CdmAdapterFactory::Create(
return;
}
- CdmAdapter::Create(key_system, security_origin, cdm_config, create_cdm_func,
+ CdmAdapter::Create(key_system, cdm_config, create_cdm_func,
std::move(cdm_helper), session_message_cb,
session_closed_cb, session_keys_change_cb,
session_expiration_update_cb, std::move(cdm_created_cb));
diff --git a/chromium/media/cdm/cdm_adapter_factory.h b/chromium/media/cdm/cdm_adapter_factory.h
index 2bffc6ec3da..a7ed60de4a7 100644
--- a/chromium/media/cdm/cdm_adapter_factory.h
+++ b/chromium/media/cdm/cdm_adapter_factory.h
@@ -26,7 +26,6 @@ class MEDIA_EXPORT CdmAdapterFactory final : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index c8d9ad159b1..3b8cc935370 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -140,9 +140,8 @@ class CdmAdapterTestBase : public testing::Test,
std::unique_ptr<StrictMock<MockCdmAuxiliaryHelper>> cdm_helper(
new StrictMock<MockCdmAuxiliaryHelper>(std::move(allocator)));
cdm_helper_ = cdm_helper.get();
- CdmAdapter::Create(GetKeySystemName(),
- url::Origin::Create(GURL("http://foo.com")), cdm_config,
- GetCreateCdmFunc(), std::move(cdm_helper),
+ CdmAdapter::Create(GetKeySystemName(), cdm_config, GetCreateCdmFunc(),
+ std::move(cdm_helper),
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionClosed,
diff --git a/chromium/media/cdm/cdm_auxiliary_helper.cc b/chromium/media/cdm/cdm_auxiliary_helper.cc
index 4d163e112aa..6183e10da98 100644
--- a/chromium/media/cdm/cdm_auxiliary_helper.cc
+++ b/chromium/media/cdm/cdm_auxiliary_helper.cc
@@ -18,6 +18,10 @@ cdm::FileIO* CdmAuxiliaryHelper::CreateCdmFileIO(cdm::FileIOClient* client) {
return nullptr;
}
+url::Origin CdmAuxiliaryHelper::GetCdmOrigin() {
+ return url::Origin();
+}
+
cdm::Buffer* CdmAuxiliaryHelper::CreateCdmBuffer(size_t capacity) {
return nullptr;
}
diff --git a/chromium/media/cdm/cdm_auxiliary_helper.h b/chromium/media/cdm/cdm_auxiliary_helper.h
index b6c829351a1..9bbf1301d12 100644
--- a/chromium/media/cdm/cdm_auxiliary_helper.h
+++ b/chromium/media/cdm/cdm_auxiliary_helper.h
@@ -17,6 +17,7 @@
#include "media/cdm/output_protection.h"
#include "media/cdm/platform_verification.h"
#include "media/media_buildflags.h"
+#include "url/origin.h"
namespace cdm {
class FileIO;
@@ -46,6 +47,10 @@ class MEDIA_EXPORT CdmAuxiliaryHelper : public CdmAllocator,
// needed anymore.
virtual cdm::FileIO* CreateCdmFileIO(cdm::FileIOClient* client);
+ // Gets the origin of the frame associated with the CDM, which could be empty
+ // if the origin is unavailable or if error happened.
+ virtual url::Origin GetCdmOrigin();
+
// CdmAllocator implementation.
cdm::Buffer* CreateCdmBuffer(size_t capacity) override;
std::unique_ptr<VideoFrameImpl> CreateCdmVideoFrame() override;
diff --git a/chromium/media/cdm/cdm_host_file.cc b/chromium/media/cdm/cdm_host_file.cc
index 839dc5784f5..e4ad487d1f4 100644
--- a/chromium/media/cdm/cdm_host_file.cc
+++ b/chromium/media/cdm/cdm_host_file.cc
@@ -8,7 +8,9 @@
#include "base/command_line.h"
#include "base/feature_list.h"
+#include "base/files/file_util.h"
#include "base/logging.h"
+#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "media/cdm/api/content_decryption_module_ext.h"
@@ -36,6 +38,9 @@ std::unique_ptr<CdmHostFile> CdmHostFile::Create(
DVLOG(1) << " " << sig_file.IsValid() << ": "
<< sig_file_path.MaybeAsASCII();
+ // Preread file at |file_path| for better performance.
+ ignore_result(PreReadFile(file_path, /*is_executable=*/false));
+
return std::unique_ptr<CdmHostFile>(
new CdmHostFile(file_path, std::move(file), std::move(sig_file)));
}
diff --git a/chromium/media/cdm/cdm_host_files.h b/chromium/media/cdm/cdm_host_files.h
index a6782f7239e..7ed87f68f9a 100644
--- a/chromium/media/cdm/cdm_host_files.h
+++ b/chromium/media/cdm/cdm_host_files.h
@@ -11,7 +11,6 @@
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/lazy_instance.h"
-#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/native_library.h"
diff --git a/chromium/media/cdm/cdm_module.cc b/chromium/media/cdm/cdm_module.cc
index 6f451fcfad7..28cba10d248 100644
--- a/chromium/media/cdm/cdm_module.cc
+++ b/chromium/media/cdm/cdm_module.cc
@@ -9,6 +9,7 @@
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
+#include "base/notreached.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "components/crash/core/common/crash_key.h"
diff --git a/chromium/media/cdm/cdm_wrapper.h b/chromium/media/cdm/cdm_wrapper.h
index ed49aa29bf6..ef0eab69bf2 100644
--- a/chromium/media/cdm/cdm_wrapper.h
+++ b/chromium/media/cdm/cdm_wrapper.h
@@ -9,9 +9,9 @@
#include <string>
+#include "base/check.h"
#include "base/compiler_specific.h"
#include "base/feature_list.h"
-#include "base/logging.h"
#include "base/macros.h"
#include "media/base/media_switches.h"
#include "media/cdm/api/content_decryption_module.h"
diff --git a/chromium/media/cdm/cenc_decryptor_fuzzer.cc b/chromium/media/cdm/cenc_decryptor_fuzzer.cc
index 3574f130822..1f485394db8 100644
--- a/chromium/media/cdm/cenc_decryptor_fuzzer.cc
+++ b/chromium/media/cdm/cenc_decryptor_fuzzer.cc
@@ -9,6 +9,7 @@
#include <string>
#include <vector>
+#include "base/logging.h"
#include "crypto/symmetric_key.h"
#include "media/base/decoder_buffer.h"
#include "media/base/subsample_entry.h"
diff --git a/chromium/media/cdm/default_cdm_factory.cc b/chromium/media/cdm/default_cdm_factory.cc
index 2d1ea7eb27a..49b7ae0f8e9 100644
--- a/chromium/media/cdm/default_cdm_factory.cc
+++ b/chromium/media/cdm/default_cdm_factory.cc
@@ -34,20 +34,12 @@ static bool ShouldCreateAesDecryptor(const std::string& key_system) {
void DefaultCdmFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
const SessionKeysChangeCB& session_keys_change_cb,
const SessionExpirationUpdateCB& session_expiration_update_cb,
CdmCreatedCB cdm_created_cb) {
- if (security_origin.opaque()) {
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE,
- base::BindOnce(std::move(cdm_created_cb), nullptr, "Invalid origin."));
- return;
- }
-
if (!ShouldCreateAesDecryptor(key_system)) {
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE, base::BindOnce(std::move(cdm_created_cb), nullptr,
diff --git a/chromium/media/cdm/default_cdm_factory.h b/chromium/media/cdm/default_cdm_factory.h
index 5c1941eced4..62c50f22617 100644
--- a/chromium/media/cdm/default_cdm_factory.h
+++ b/chromium/media/cdm/default_cdm_factory.h
@@ -20,7 +20,6 @@ class MEDIA_EXPORT DefaultCdmFactory : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
index 22ebd602a0a..728706ebe9a 100644
--- a/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
+++ b/chromium/media/cdm/library_cdm/cdm_host_proxy_impl.h
@@ -7,7 +7,6 @@
#include "media/cdm/library_cdm/cdm_host_proxy.h"
-#include "base/logging.h"
#include "base/macros.h"
namespace media {
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
index 35a9c0d8ae6..c52f6c47eee 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
@@ -42,7 +42,7 @@
#include "media/filters/dav1d_video_decoder.h"
#endif
-#if BUILDFLAG(ENABLE_FFMPEG)
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
#include "media/filters/ffmpeg_video_decoder.h"
#endif
@@ -260,7 +260,7 @@ class VideoDecoderAdapter : public CdmVideoDecoder {
void OnVideoFrameReady(scoped_refptr<VideoFrame> video_frame) {
// Do not queue EOS frames, which is not needed.
- if (video_frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM))
+ if (video_frame->metadata()->end_of_stream)
return;
decoded_video_frames_.push(std::move(video_frame));
@@ -326,7 +326,7 @@ std::unique_ptr<CdmVideoDecoder> CreateVideoDecoder(
#endif
}
-#if BUILDFLAG(ENABLE_FFMPEG)
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
if (!video_decoder)
video_decoder.reset(new FFmpegVideoDecoder(null_media_log.get()));
#endif
diff --git a/chromium/media/cdm/library_cdm/mock_library_cdm.cc b/chromium/media/cdm/library_cdm/mock_library_cdm.cc
index 44bb672b3b8..46a1c576e60 100644
--- a/chromium/media/cdm/library_cdm/mock_library_cdm.cc
+++ b/chromium/media/cdm/library_cdm/mock_library_cdm.cc
@@ -6,6 +6,7 @@
#include <string>
+#include "base/check.h"
#include "base/logging.h"
#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/cdm/library_cdm/cdm_host_proxy_impl.h"
diff --git a/chromium/media/ffmpeg/ffmpeg_decoding_loop.cc b/chromium/media/ffmpeg/ffmpeg_decoding_loop.cc
index 5cc9782f123..a005f0197cd 100644
--- a/chromium/media/ffmpeg/ffmpeg_decoding_loop.cc
+++ b/chromium/media/ffmpeg/ffmpeg_decoding_loop.cc
@@ -4,6 +4,7 @@
#include "media/ffmpeg/ffmpeg_decoding_loop.h"
#include "base/callback.h"
+#include "base/logging.h"
#include "media/ffmpeg/ffmpeg_common.h"
namespace media {
diff --git a/chromium/media/filters/BUILD.gn b/chromium/media/filters/BUILD.gn
index c49ebe92141..0bc8f30af08 100644
--- a/chromium/media/filters/BUILD.gn
+++ b/chromium/media/filters/BUILD.gn
@@ -14,7 +14,6 @@ jumbo_source_set("filters") {
visibility = [
"//media",
"//media/renderers",
- "//media/webcodecs",
]
sources = [
@@ -262,6 +261,8 @@ static_library("test_support") {
visibility = [ "//media:test_support" ]
sources = [
+ "fake_video_decoder.cc",
+ "fake_video_decoder.h",
"ivf_parser.cc",
"ivf_parser.h",
]
@@ -290,8 +291,6 @@ source_set("unit_tests") {
"decrypting_demuxer_stream_unittest.cc",
"decrypting_media_resource_unittest.cc",
"decrypting_video_decoder_unittest.cc",
- "fake_video_decoder.cc",
- "fake_video_decoder.h",
"fake_video_decoder_unittest.cc",
"file_data_source_unittest.cc",
"frame_buffer_pool_unittest.cc",
diff --git a/chromium/media/filters/audio_renderer_algorithm.cc b/chromium/media/filters/audio_renderer_algorithm.cc
index 6702b17d931..a07630bcae0 100644
--- a/chromium/media/filters/audio_renderer_algorithm.cc
+++ b/chromium/media/filters/audio_renderer_algorithm.cc
@@ -190,6 +190,15 @@ int AudioRendererAlgorithm::ResampleAndFill(AudioBus* dest,
base::Unretained(this)));
}
+ if (reached_end_of_stream_ && !audio_buffer_.frames()) {
+ // Previous calls to ResampleAndFill() and OnResamplerRead() have used all
+ // of the available buffers from |audio_buffer_|. Some valid input buffers
+ // might be stuck in |resampler_.BufferedFrames()|, but the rest is silence.
+ // Forgo the few remaining valid buffers, or else we will keep playing out
+ // silence forever and never trigger any "ended" events.
+ return 0;
+ }
+
// |resampler_| can request more than |requested_frames|, due to the
// requests size not being aligned. To prevent having to fill it with silence,
// we find the max number of reads it could request, and make sure we have
@@ -252,15 +261,9 @@ int AudioRendererAlgorithm::FillBuffer(AudioBus* dest,
return frames_read;
}
- // WSOLA at playback rates that are close to 1.0 produces noticeable
- // warbling and stuttering. We prefer resampling the audio at these speeds.
- // This does results in a noticeable pitch shift.
- // NOTE: The cutoff values are arbitrary, and picked based off of a tradeoff
- // between "resample pitch shift" vs "WSOLA distortions".
- if (kLowerResampleThreshold <= playback_rate &&
- playback_rate <= kUpperResampleThreshold) {
+ // Use resampling when no pitch adjustments are needed.
+ if (!preserves_pitch_)
return ResampleAndFill(dest, dest_offset, requested_frames, playback_rate);
- }
// Destroy the resampler if it was used before, but it's no longer needed
// (e.g. before playback rate has changed). This ensures that we don't try to
@@ -272,11 +275,11 @@ int AudioRendererAlgorithm::FillBuffer(AudioBus* dest,
// chunk of memory. ~56kB for stereo 48kHz, up to ~765kB for 7.1 192kHz.
if (!ola_window_) {
ola_window_.reset(new float[ola_window_size_]);
- internal::GetSymmetricHanningWindow(ola_window_size_, ola_window_.get());
+ internal::GetPeriodicHanningWindow(ola_window_size_, ola_window_.get());
transition_window_.reset(new float[ola_window_size_ * 2]);
- internal::GetSymmetricHanningWindow(2 * ola_window_size_,
- transition_window_.get());
+ internal::GetPeriodicHanningWindow(2 * ola_window_size_,
+ transition_window_.get());
// Initialize for overlap-and-add of the first block.
wsola_output_ =
@@ -592,4 +595,8 @@ void AudioRendererAlgorithm::CreateSearchWrappers() {
AudioBus::WrapVector(search_block_->frames(), active_search_channels);
}
+void AudioRendererAlgorithm::SetPreservesPitch(bool preserves_pitch) {
+ preserves_pitch_ = preserves_pitch;
+}
+
} // namespace media
diff --git a/chromium/media/filters/audio_renderer_algorithm.h b/chromium/media/filters/audio_renderer_algorithm.h
index 89187f7c03f..1511fbbe928 100644
--- a/chromium/media/filters/audio_renderer_algorithm.h
+++ b/chromium/media/filters/audio_renderer_algorithm.h
@@ -40,11 +40,6 @@ class AudioBus;
class MEDIA_EXPORT AudioRendererAlgorithm {
public:
- // Upper and lower bounds at which we prefer to use a resampler rather than
- // WSOLA, to prevent audio artifacts.
- static constexpr double kUpperResampleThreshold = 1.06;
- static constexpr double kLowerResampleThreshold = 0.95;
-
AudioRendererAlgorithm(MediaLog* media_log);
AudioRendererAlgorithm(MediaLog* media_log,
AudioRendererAlgorithmParameters params);
@@ -90,6 +85,11 @@ class MEDIA_EXPORT AudioRendererAlgorithm {
// value of nullopt indicates the algorithm should restore the default value.
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint);
+ // Sets a flag indicating whether apply pitch adjustments when playing back
+ // at rates other than 1.0. Concretely, we use WSOLA when this is true, and
+ // resampling when this is false.
+ void SetPreservesPitch(bool preserves_pitch);
+
// Returns true if the |audio_buffer_| is >= |playback_threshold_|.
bool IsQueueAdequateForPlayback();
@@ -202,6 +202,11 @@ class MEDIA_EXPORT AudioRendererAlgorithm {
// start latency. See SetLatencyHint();
base::Optional<base::TimeDelta> latency_hint_;
+ // Whether to apply pitch adjusments or not when playing back at rates other
+ // than 1.0. In other words, we use WSOLA to preserve pitch when this is on,
+ // and resampling when this
+ bool preserves_pitch_ = true;
+
// How many frames to have in queue before beginning playback.
int64_t playback_threshold_;
diff --git a/chromium/media/filters/audio_renderer_algorithm_unittest.cc b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
index 7e22b92f7a5..405d0b645ab 100644
--- a/chromium/media/filters/audio_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/audio_renderer_algorithm_unittest.cc
@@ -282,13 +282,9 @@ class AudioRendererAlgorithmTest : public testing::Test {
EXPECT_NEAR(playback_rate, actual_playback_rate, playback_rate / 100.0);
}
- void TestPlaybackRateWithUnderflow(double playback_rate, bool end_of_stream) {
- if (playback_rate > AudioRendererAlgorithm::kUpperResampleThreshold ||
- playback_rate < AudioRendererAlgorithm::kLowerResampleThreshold) {
- // This test is only used for the range in which we resample data instead
- // of using WSOLA.
- return;
- }
+ void TestResamplingWithUnderflow(double playback_rate, bool end_of_stream) {
+ // We are only testing the behavior of the resampling case.
+ algorithm_.SetPreservesPitch(false);
if (end_of_stream) {
algorithm_.MarkEndOfStream();
@@ -452,13 +448,20 @@ TEST_F(AudioRendererAlgorithmTest, FillBuffer_NearlyNormalSlowerRate) {
// The range of playback rates in which we use resampling is [0.95, 1.06].
TEST_F(AudioRendererAlgorithmTest, FillBuffer_ResamplingRates) {
Initialize();
- TestPlaybackRate(0.94); // WSOLA.
- TestPlaybackRate(AudioRendererAlgorithm::kLowerResampleThreshold);
- TestPlaybackRate(0.97);
+ // WSOLA.
+ TestPlaybackRate(0.50);
+ TestPlaybackRate(0.95);
+ TestPlaybackRate(1.00);
+ TestPlaybackRate(1.05);
+ TestPlaybackRate(2.00);
+
+ // Resampling.
+ algorithm_.SetPreservesPitch(false);
+ TestPlaybackRate(0.50);
+ TestPlaybackRate(0.95);
TestPlaybackRate(1.00);
- TestPlaybackRate(1.04);
- TestPlaybackRate(AudioRendererAlgorithm::kUpperResampleThreshold);
- TestPlaybackRate(1.07); // WSOLA.
+ TestPlaybackRate(1.05);
+ TestPlaybackRate(2.00);
}
TEST_F(AudioRendererAlgorithmTest, FillBuffer_WithOffset) {
@@ -480,14 +483,10 @@ TEST_F(AudioRendererAlgorithmTest, FillBuffer_WithOffset) {
TEST_F(AudioRendererAlgorithmTest, FillBuffer_UnderFlow) {
Initialize();
- TestPlaybackRateWithUnderflow(AudioRendererAlgorithm::kLowerResampleThreshold,
- true);
- TestPlaybackRateWithUnderflow(AudioRendererAlgorithm::kLowerResampleThreshold,
- false);
- TestPlaybackRateWithUnderflow(AudioRendererAlgorithm::kUpperResampleThreshold,
- true);
- TestPlaybackRateWithUnderflow(AudioRendererAlgorithm::kUpperResampleThreshold,
- false);
+ TestResamplingWithUnderflow(0.75, true);
+ TestResamplingWithUnderflow(0.75, false);
+ TestResamplingWithUnderflow(1.25, true);
+ TestResamplingWithUnderflow(1.25, false);
}
TEST_F(AudioRendererAlgorithmTest, FillBuffer_OneAndAQuarterRate) {
diff --git a/chromium/media/filters/dav1d_video_decoder.cc b/chromium/media/filters/dav1d_video_decoder.cc
index 2202510d59d..d21d2d356b1 100644
--- a/chromium/media/filters/dav1d_video_decoder.cc
+++ b/chromium/media/filters/dav1d_video_decoder.cc
@@ -10,6 +10,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/bits.h"
#include "base/callback.h"
#include "base/logging.h"
#include "base/strings/stringprintf.h"
@@ -52,6 +53,9 @@ static void GetDecoderThreadCounts(const int coded_height,
static VideoPixelFormat Dav1dImgFmtToVideoPixelFormat(
const Dav1dPictureParameters* pic) {
switch (pic->layout) {
+ // Single plane monochrome images will be converted to standard 3 plane ones
+ // since Chromium doesn't support single Y plane images.
+ case DAV1D_PIXEL_LAYOUT_I400:
case DAV1D_PIXEL_LAYOUT_I420:
switch (pic->bpc) {
case 8:
@@ -88,9 +92,6 @@ static VideoPixelFormat Dav1dImgFmtToVideoPixelFormat(
DLOG(ERROR) << "Unsupported bit depth: " << pic->bpc;
return PIXEL_FORMAT_UNKNOWN;
}
- default:
- DLOG(ERROR) << "Unsupported pixel format: " << pic->layout;
- return PIXEL_FORMAT_UNKNOWN;
}
}
@@ -348,7 +349,7 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
continue;
}
- auto frame = CopyImageToVideoFrame(p.get());
+ auto frame = BindImageToVideoFrame(p.get());
if (!frame) {
MEDIA_LOG(DEBUG, media_log_)
<< "Failed to produce video frame from Dav1dPicture.";
@@ -357,7 +358,7 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
// AV1 color space defines match ISO 23001-8:2016 via ISO/IEC 23091-4/ITU-T
// H.273. https://aomediacodec.github.io/av1-spec/#color-config-semantics
- media::VideoColorSpace color_space(
+ VideoColorSpace color_space(
p->seq_hdr->pri, p->seq_hdr->trc, p->seq_hdr->mtrx,
p->seq_hdr->color_range ? gfx::ColorSpace::RangeID::FULL
: gfx::ColorSpace::RangeID::LIMITED);
@@ -367,10 +368,12 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
color_space = config_.color_space_info();
frame->set_color_space(color_space.ToGfxColorSpace());
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, false);
+ frame->metadata()->power_efficient = false;
+
+ // When we use bind mode, our image data is dependent on the Dav1dPicture,
+ // so we must ensure it stays alive along enough.
frame->AddDestructionObserver(base::BindOnce(
base::DoNothing::Once<ScopedPtrDav1dPicture>(), std::move(p)));
-
output_cb_.Run(std::move(frame));
}
@@ -378,22 +381,65 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
return true;
}
-scoped_refptr<VideoFrame> Dav1dVideoDecoder::CopyImageToVideoFrame(
+scoped_refptr<VideoFrame> Dav1dVideoDecoder::BindImageToVideoFrame(
const Dav1dPicture* pic) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ const gfx::Size visible_size(pic->p.w, pic->p.h);
VideoPixelFormat pixel_format = Dav1dImgFmtToVideoPixelFormat(&pic->p);
if (pixel_format == PIXEL_FORMAT_UNKNOWN)
return nullptr;
- // Since we're making a copy, only copy the visible area.
- const gfx::Size visible_size(pic->p.w, pic->p.h);
- return VideoFrame::WrapExternalYuvData(
+ auto uv_plane_stride = pic->stride[1];
+ auto* u_plane = static_cast<uint8_t*>(pic->data[1]);
+ auto* v_plane = static_cast<uint8_t*>(pic->data[2]);
+
+ const bool needs_fake_uv_planes = pic->p.layout == DAV1D_PIXEL_LAYOUT_I400;
+ if (needs_fake_uv_planes) {
+ // UV planes are half the size of the Y plane.
+ uv_plane_stride = base::bits::Align(pic->stride[0] / 2, 2);
+ const auto uv_plane_height = (pic->p.h + 1) / 2;
+ const size_t size_needed = uv_plane_stride * uv_plane_height;
+
+ if (!fake_uv_data_ || fake_uv_data_->size() != size_needed) {
+ if (pic->p.bpc == 8) {
+ // Avoid having base::RefCountedBytes zero initialize the memory just to
+ // fill it with a different value.
+ constexpr uint8_t kBlankUV = 256 / 2;
+ std::vector<unsigned char> empty_data(size_needed, kBlankUV);
+
+ // When we resize, existing frames will keep their refs on the old data.
+ fake_uv_data_ = base::RefCountedBytes::TakeVector(&empty_data);
+ } else {
+ DCHECK(pic->p.bpc == 10 || pic->p.bpc == 12);
+ const uint16_t kBlankUV = (1 << pic->p.bpc) / 2;
+ fake_uv_data_ =
+ base::MakeRefCounted<base::RefCountedBytes>(size_needed);
+
+ uint16_t* data = fake_uv_data_->front_as<uint16_t>();
+ std::fill(data, data + size_needed / 2, kBlankUV);
+ }
+ }
+
+ u_plane = v_plane = fake_uv_data_->front_as<uint8_t>();
+ }
+
+ auto frame = VideoFrame::WrapExternalYuvData(
pixel_format, visible_size, gfx::Rect(visible_size),
- config_.natural_size(), pic->stride[0], pic->stride[1], pic->stride[1],
- static_cast<uint8_t*>(pic->data[0]), static_cast<uint8_t*>(pic->data[1]),
- static_cast<uint8_t*>(pic->data[2]),
+ config_.natural_size(), pic->stride[0], uv_plane_stride, uv_plane_stride,
+ static_cast<uint8_t*>(pic->data[0]), u_plane, v_plane,
base::TimeDelta::FromMicroseconds(pic->m.timestamp));
+ if (!frame)
+ return nullptr;
+
+ // Each frame needs a ref on the fake UV data to keep it alive until done.
+ if (needs_fake_uv_planes) {
+ frame->AddDestructionObserver(base::BindOnce(
+ base::DoNothing::Once<scoped_refptr<base::RefCountedBytes>>(),
+ fake_uv_data_));
+ }
+
+ return frame;
}
} // namespace media
diff --git a/chromium/media/filters/dav1d_video_decoder.h b/chromium/media/filters/dav1d_video_decoder.h
index fb74e8d9d61..67efae99e1e 100644
--- a/chromium/media/filters/dav1d_video_decoder.h
+++ b/chromium/media/filters/dav1d_video_decoder.h
@@ -9,6 +9,7 @@
#include "base/callback_forward.h"
#include "base/macros.h"
+#include "base/memory/ref_counted_memory.h"
#include "base/threading/thread_checker.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
@@ -56,7 +57,7 @@ class MEDIA_EXPORT Dav1dVideoDecoder : public OffloadableVideoDecoder {
// Invokes the decoder and calls |output_cb_| for any returned frames.
bool DecodeBuffer(scoped_refptr<DecoderBuffer> buffer);
- scoped_refptr<VideoFrame> CopyImageToVideoFrame(const Dav1dPicture* img);
+ scoped_refptr<VideoFrame> BindImageToVideoFrame(const Dav1dPicture* img);
// Used to report error messages to the client.
MediaLog* const media_log_ = nullptr;
@@ -67,6 +68,10 @@ class MEDIA_EXPORT Dav1dVideoDecoder : public OffloadableVideoDecoder {
SEQUENCE_CHECKER(sequence_checker_);
+ // "Zero" filled UV data for monochrome images to use since Chromium doesn't
+ // have support for I400P(8|10|12) images.
+ scoped_refptr<base::RefCountedBytes> fake_uv_data_;
+
// Current decoder state. Used to ensure methods are called as expected.
DecoderState state_ = DecoderState::kUninitialized;
diff --git a/chromium/media/filters/dav1d_video_decoder_unittest.cc b/chromium/media/filters/dav1d_video_decoder_unittest.cc
index 446ec5a93db..69640dfe642 100644
--- a/chromium/media/filters/dav1d_video_decoder_unittest.cc
+++ b/chromium/media/filters/dav1d_video_decoder_unittest.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/hash/md5.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
#include "build/build_config.h"
@@ -169,10 +170,19 @@ class Dav1dVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
output_frames_.push_back(std::move(frame));
}
+ std::string GetVideoFrameHash(const VideoFrame& frame) {
+ base::MD5Context md5_context;
+ base::MD5Init(&md5_context);
+ VideoFrame::HashFrameForTesting(&md5_context, frame);
+ base::MD5Digest digest;
+ base::MD5Final(&digest, &md5_context);
+ return base::MD5DigestToBase16(digest);
+ }
+
MOCK_METHOD1(DecodeDone, void(DecodeStatus));
testing::StrictMock<MockMediaLog> media_log_;
@@ -215,13 +225,52 @@ TEST_F(Dav1dVideoDecoderTest, DecodeFrame_Normal) {
// Simulate decoding a single frame.
EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(i_frame_buffer_));
ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
+ EXPECT_EQ("589dc641b7742ffe7a2b0d4c16aa3e86", GetVideoFrameHash(*frame));
+}
+
+TEST_F(Dav1dVideoDecoderTest, DecodeFrame_8bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-8bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
+ EXPECT_EQ(frame->data(VideoFrame::kUPlane), frame->data(VideoFrame::kVPlane));
+ EXPECT_EQ("eeba03dcc9c22c4632bf74b481db36b2", GetVideoFrameHash(*frame));
+}
+
+TEST_F(Dav1dVideoDecoderTest, DecodeFrame_10bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-10bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_YUV420P10, frame->format());
+ EXPECT_EQ(frame->data(VideoFrame::kUPlane), frame->data(VideoFrame::kVPlane));
+ EXPECT_EQ("026c1fed9e161f09d816ac7278458a80", GetVideoFrameHash(*frame));
+}
+
+TEST_F(Dav1dVideoDecoderTest, DecodeFrame_12bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-12bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_YUV420P12, frame->format());
+ EXPECT_EQ(frame->data(VideoFrame::kUPlane), frame->data(VideoFrame::kVPlane));
+ EXPECT_EQ("32115092dc00fbe86823b0b714a0f63e", GetVideoFrameHash(*frame));
}
// Decode |i_frame_buffer_| and then a frame with a larger width and verify
// the output size was adjusted.
-// TODO(dalecurtis): Get an I-frame from a larger video.
-TEST_F(Dav1dVideoDecoderTest, DISABLED_DecodeFrame_LargerWidth) {
- DecodeIFrameThenTestFile("av1-I-frame-320x240", gfx::Size(1280, 720));
+TEST_F(Dav1dVideoDecoderTest, DecodeFrame_LargerWidth) {
+ DecodeIFrameThenTestFile("av1-I-frame-1280x720", gfx::Size(1280, 720));
}
// Decode a VP9 frame which should trigger a decoder error.
diff --git a/chromium/media/filters/decoder_selector.cc b/chromium/media/filters/decoder_selector.cc
index bb06378b92d..9f510bb578c 100644
--- a/chromium/media/filters/decoder_selector.cc
+++ b/chromium/media/filters/decoder_selector.cc
@@ -165,13 +165,11 @@ void DecoderSelector<StreamType>::OnDecoderInitializeDone(Status status) {
DCHECK(task_runner_->BelongsToCurrentThread());
if (!status.is_ok()) {
- // TODO(tmathmeyer) this might be noisy in media log. Consider batching
- // all failures as causes to a single Status object and only surfacing it if
- // decoder selection fails entirely.
- media_log_->NotifyError(
- Status(StatusCode::kDecoderFailedInitialization)
- .WithData("Decoder name", decoder_->GetDisplayName())
- .AddCause(std::move(status)));
+ // TODO(tmathmeyer) this was too noisy in media log. Batch all the logs
+ // together and then send them as an informational notice instead of
+ // using NotifyError.
+ MEDIA_LOG(INFO, media_log_)
+ << "Failed to initialize " << decoder_->GetDisplayName();
// Try the next decoder on the list.
decoder_.reset();
diff --git a/chromium/media/filters/decoder_stream_traits.cc b/chromium/media/filters/decoder_stream_traits.cc
index 2eaa2a97cc2..c9e20bfd654 100644
--- a/chromium/media/filters/decoder_stream_traits.cc
+++ b/chromium/media/filters/decoder_stream_traits.cc
@@ -234,20 +234,16 @@ PostDecodeAction DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecodeDone(
return PostDecodeAction::DELIVER;
// Add a timestamp here to enable buffering delay measurements down the line.
- buffer->metadata()->SetTimeTicks(VideoFrameMetadata::DECODE_BEGIN_TIME,
- it->second.decode_begin_time);
- buffer->metadata()->SetTimeTicks(VideoFrameMetadata::DECODE_END_TIME,
- base::TimeTicks::Now());
+ buffer->metadata()->decode_begin_time = it->second.decode_begin_time;
+ buffer->metadata()->decode_end_time = base::TimeTicks::Now();
auto action = it->second.should_drop ? PostDecodeAction::DROP
: PostDecodeAction::DELIVER;
// Provide duration information to help the rendering algorithm on the very
// first and very last frames.
- if (it->second.duration != kNoTimestamp) {
- buffer->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- it->second.duration);
- }
+ if (it->second.duration != kNoTimestamp)
+ buffer->metadata()->frame_duration = it->second.duration;
// We erase from the beginning onward to our target frame since frames should
// be returned in presentation order. It's possible to accumulate entries in
@@ -259,14 +255,12 @@ PostDecodeAction DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecodeDone(
void DecoderStreamTraits<DemuxerStream::VIDEO>::OnOutputReady(
OutputType* buffer) {
- base::TimeTicks decode_begin_time;
- if (!buffer->metadata()->GetTimeTicks(VideoFrameMetadata::DECODE_BEGIN_TIME,
- &decode_begin_time)) {
+ if (!buffer->metadata()->decode_begin_time.has_value())
return;
- }
+
// Tag buffer with elapsed time since creation.
- buffer->metadata()->SetTimeDelta(VideoFrameMetadata::PROCESSING_TIME,
- base::TimeTicks::Now() - decode_begin_time);
+ buffer->metadata()->processing_time =
+ base::TimeTicks::Now() - *buffer->metadata()->decode_begin_time;
}
} // namespace media
diff --git a/chromium/media/filters/decoder_stream_traits.h b/chromium/media/filters/decoder_stream_traits.h
index 31edc86b5f8..66fe7cd85e9 100644
--- a/chromium/media/filters/decoder_stream_traits.h
+++ b/chromium/media/filters/decoder_stream_traits.h
@@ -12,7 +12,6 @@
#include "base/time/time.h"
#include "media/base/audio_decoder.h"
#include "media/base/audio_decoder_config.h"
-#include "media/base/cdm_context.h"
#include "media/base/channel_layout.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_log_properties.h"
diff --git a/chromium/media/filters/decrypting_audio_decoder.h b/chromium/media/filters/decrypting_audio_decoder.h
index baa09f6ec04..348cf154233 100644
--- a/chromium/media/filters/decrypting_audio_decoder.h
+++ b/chromium/media/filters/decrypting_audio_decoder.h
@@ -13,7 +13,6 @@
#include "base/memory/weak_ptr.h"
#include "base/time/time.h"
#include "media/base/audio_decoder.h"
-#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
#include "media/base/demuxer_stream.h"
diff --git a/chromium/media/filters/decrypting_demuxer_stream.cc b/chromium/media/filters/decrypting_demuxer_stream.cc
index 8d5b39e96c4..5a5282bf8c3 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream.cc
@@ -12,6 +12,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_log.h"
#include "media/base/media_util.h"
diff --git a/chromium/media/filters/decrypting_demuxer_stream.h b/chromium/media/filters/decrypting_demuxer_stream.h
index 5663d0f2792..12acbca4c05 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.h
+++ b/chromium/media/filters/decrypting_demuxer_stream.h
@@ -10,7 +10,6 @@
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "media/base/audio_decoder_config.h"
-#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
#include "media/base/demuxer_stream.h"
#include "media/base/pipeline_status.h"
@@ -23,6 +22,7 @@ class SingleThreadTaskRunner;
namespace media {
+class CdmContext;
class DecoderBuffer;
class MediaLog;
diff --git a/chromium/media/filters/decrypting_video_decoder.cc b/chromium/media/filters/decrypting_video_decoder.cc
index 692772edb1d..3ce8f1b6b58 100644
--- a/chromium/media/filters/decrypting_video_decoder.cc
+++ b/chromium/media/filters/decrypting_video_decoder.cc
@@ -291,7 +291,7 @@ void DecryptingVideoDecoder::DeliverFrame(Decryptor::Status status,
CHECK(frame);
// Frame returned with kSuccess should not be an end-of-stream frame.
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
// If color space is not set, use the color space in the |config_|.
if (!frame->ColorSpace().IsValid()) {
diff --git a/chromium/media/filters/decrypting_video_decoder.h b/chromium/media/filters/decrypting_video_decoder.h
index 77ce3e4bf07..71a0ac099c6 100644
--- a/chromium/media/filters/decrypting_video_decoder.h
+++ b/chromium/media/filters/decrypting_video_decoder.h
@@ -10,7 +10,6 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
-#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
diff --git a/chromium/media/filters/fake_video_decoder.cc b/chromium/media/filters/fake_video_decoder.cc
index 144394e9f18..fa47c5f4d2a 100644
--- a/chromium/media/filters/fake_video_decoder.cc
+++ b/chromium/media/filters/fake_video_decoder.cc
@@ -122,14 +122,18 @@ void FakeVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
state_ = STATE_END_OF_STREAM;
} else {
DCHECK(VerifyFakeVideoBufferForTest(*buffer, current_config_));
- scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateColorFrame(
- current_config_.coded_size(), 0, 0, 0, buffer->timestamp());
- decoded_frames_.push_back(video_frame);
+ decoded_frames_.push_back(MakeVideoFrame(*buffer));
}
RunOrHoldDecode(std::move(wrapped_decode_cb));
}
+scoped_refptr<VideoFrame> FakeVideoDecoder::MakeVideoFrame(
+ const DecoderBuffer& buffer) {
+ return VideoFrame::CreateColorFrame(current_config_.coded_size(), 0, 0, 0,
+ buffer.timestamp());
+}
+
void FakeVideoDecoder::Reset(base::OnceClosure closure) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(reset_cb_.IsNull());
@@ -222,7 +226,8 @@ void FakeVideoDecoder::OnFrameDecoded(int buffer_size,
if (status == DecodeStatus::OK) {
total_bytes_decoded_ += buffer_size;
- bytes_decoded_cb_.Run(buffer_size);
+ if (bytes_decoded_cb_)
+ bytes_decoded_cb_.Run(buffer_size);
}
std::move(decode_cb).Run(status);
}
diff --git a/chromium/media/filters/fake_video_decoder.h b/chromium/media/filters/fake_video_decoder.h
index 18c1e004f08..868f5b96664 100644
--- a/chromium/media/filters/fake_video_decoder.h
+++ b/chromium/media/filters/fake_video_decoder.h
@@ -75,7 +75,7 @@ class FakeVideoDecoder : public VideoDecoder {
int total_bytes_decoded() const { return total_bytes_decoded_; }
- private:
+ protected:
enum State {
STATE_UNINITIALIZED,
STATE_NORMAL,
@@ -83,6 +83,9 @@ class FakeVideoDecoder : public VideoDecoder {
STATE_ERROR,
};
+ // Derived classes may override to customize the VideoFrame.
+ virtual scoped_refptr<VideoFrame> MakeVideoFrame(const DecoderBuffer& buffer);
+
// Callback for updating |total_bytes_decoded_|.
void OnFrameDecoded(int buffer_size, DecodeCB decode_cb, DecodeStatus status);
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index 0a40d6a2307..ec6897176a4 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -88,7 +88,7 @@ class FakeVideoDecoderTest
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
last_decoded_frame_ = std::move(frame);
num_decoded_frames_++;
}
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index 97812376c52..a7ed542b5fc 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -214,7 +214,7 @@ std::unique_ptr<FFmpegDemuxerStream> FFmpegDemuxerStream::Create(
std::unique_ptr<VideoDecoderConfig> video_config;
if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
- audio_config.reset(new AudioDecoderConfig());
+ audio_config = std::make_unique<AudioDecoderConfig>();
// TODO(chcunningham): Change AVStreamToAudioDecoderConfig to check
// IsValidConfig internally and return a null scoped_ptr if not valid.
@@ -231,7 +231,7 @@ std::unique_ptr<FFmpegDemuxerStream> FFmpegDemuxerStream::Create(
MEDIA_LOG(INFO, media_log) << "FFmpegDemuxer: created audio stream, config "
<< audio_config->AsHumanReadableString();
} else if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
- video_config.reset(new VideoDecoderConfig());
+ video_config = std::make_unique<VideoDecoderConfig>();
// TODO(chcunningham): Change AVStreamToVideoDecoderConfig to check
// IsValidConfig internally and return a null scoped_ptr if not valid.
@@ -781,8 +781,9 @@ void FFmpegDemuxerStream::InitBitstreamConverter() {
// consume that data.
if (video_config_)
video_config_->SetExtraData(std::vector<uint8_t>());
- bitstream_converter_.reset(
- new FFmpegH264ToAnnexBBitstreamConverter(stream_->codecpar));
+ bitstream_converter_ =
+ std::make_unique<FFmpegH264ToAnnexBBitstreamConverter>(
+ stream_->codecpar);
break;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
case AV_CODEC_ID_HEVC:
@@ -795,8 +796,8 @@ void FFmpegDemuxerStream::InitBitstreamConverter() {
// ADTS anyways, so skip bitstream conversion when the profile is
// unknown.
if (audio_config_->profile() != AudioCodecProfile::kXHE_AAC) {
- bitstream_converter_.reset(
- new FFmpegAACBitstreamConverter(stream_->codecpar));
+ bitstream_converter_ =
+ std::make_unique<FFmpegAACBitstreamConverter>(stream_->codecpar);
}
break;
default:
@@ -953,10 +954,10 @@ void FFmpegDemuxer::Initialize(DemuxerHost* host,
// Give a WeakPtr to BlockingUrlProtocol since we'll need to release it on the
// blocking thread pool.
- url_protocol_.reset(new BlockingUrlProtocol(
+ url_protocol_ = std::make_unique<BlockingUrlProtocol>(
data_source_, BindToCurrentLoop(base::Bind(
- &FFmpegDemuxer::OnDataSourceError, weak_this_))));
- glue_.reset(new FFmpegGlue(url_protocol_.get()));
+ &FFmpegDemuxer::OnDataSourceError, weak_this_)));
+ glue_ = std::make_unique<FFmpegGlue>(url_protocol_.get());
AVFormatContext* format_context = glue_->format_context();
// Disable ID3v1 tag reading to avoid costly seeks to end of file for data we
@@ -1075,9 +1076,12 @@ void FFmpegDemuxer::SeekInternal(base::TimeDelta time,
// Additionally, to workaround limitations in how we expose seekable ranges to
// Blink (http://crbug.com/137275), we also want to clamp seeks before the
// start time to the start time.
- base::TimeDelta seek_time = start_time_ < base::TimeDelta()
- ? time + start_time_
- : time < start_time_ ? start_time_ : time;
+ base::TimeDelta seek_time;
+ if (start_time_ < base::TimeDelta()) {
+ seek_time = time + start_time_;
+ } else {
+ seek_time = std::max(start_time_, time);
+ }
// When seeking in an opus stream we need to ensure we deliver enough data to
// satisfy the seek preroll; otherwise the audio at the actual seek time will
@@ -1437,7 +1441,7 @@ void FFmpegDemuxer::OnFindStreamInfoDone(int result) {
// Note: This value is used for seeking, so we must take the true value and
// not the one possibly clamped to zero below.
- if (start_time < start_time_)
+ if (start_time != kNoTimestamp && start_time < start_time_)
start_time_ = start_time;
const bool is_opus_or_vorbis =
@@ -1518,6 +1522,12 @@ void FFmpegDemuxer::OnFindStreamInfoDone(int result) {
if (glue_->container() == container_names::CONTAINER_AVI)
format_context->flags |= AVFMT_FLAG_GENPTS;
+ // FFmpeg will incorrectly adjust the start time of MP3 files into the future
+ // based on discard samples. We were unable to fix this upstream without
+ // breaking ffmpeg functionality. https://crbug.com/1062037
+ if (glue_->container() == container_names::CONTAINER_MP3)
+ start_time_ = base::TimeDelta();
+
// For testing purposes, don't overwrite the timeline offset if set already.
if (timeline_offset_.is_null()) {
timeline_offset_ =
@@ -1562,8 +1572,7 @@ void FFmpegDemuxer::LogMetadata(AVFormatContext* avctx,
DCHECK_EQ(avctx->nb_streams, streams_.size());
- for (size_t i = 0; i < streams_.size(); ++i) {
- FFmpegDemuxerStream* stream = streams_[i].get();
+ for (auto const& stream : streams_) {
if (!stream)
continue;
if (stream->type() == DemuxerStream::AUDIO) {
diff --git a/chromium/media/filters/ffmpeg_glue.h b/chromium/media/filters/ffmpeg_glue.h
index 8e6607c5f03..c5aec54b87c 100644
--- a/chromium/media/filters/ffmpeg_glue.h
+++ b/chromium/media/filters/ffmpeg_glue.h
@@ -25,7 +25,7 @@
#include <memory>
-#include "base/logging.h"
+#include "base/check.h"
#include "base/macros.h"
#include "media/base/container_names.h"
#include "media/base/media_export.h"
diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
index 51f39f7faf4..721b8b697a3 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder.cc
@@ -358,8 +358,7 @@ bool FFmpegVideoDecoder::OnNewFrame(AVFrame* frame) {
reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(frame->buf[0]));
video_frame->set_timestamp(
base::TimeDelta::FromMicroseconds(frame->reordered_opaque));
- video_frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT,
- false);
+ video_frame->metadata()->power_efficient = false;
output_cb_.Run(video_frame);
return true;
}
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index 4eee572d0ce..7c705bdbf20 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -197,7 +197,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/filters/frame_processor.cc b/chromium/media/filters/frame_processor.cc
index 4bf60e708df..11c72893684 100644
--- a/chromium/media/filters/frame_processor.cc
+++ b/chromium/media/filters/frame_processor.cc
@@ -564,6 +564,9 @@ void FrameProcessor::OnPossibleAudioConfigUpdate(
current_audio_config_ = config;
sample_duration_ = base::TimeDelta::FromSecondsD(
1.0 / current_audio_config_.samples_per_second());
+ has_dependent_audio_frames_ =
+ current_audio_config_.profile() == AudioCodecProfile::kXHE_AAC;
+ last_audio_pts_for_nonkeyframe_monotonicity_check_ = kNoTimestamp;
}
MseTrackBuffer* FrameProcessor::FindTrack(StreamParser::TrackId id) {
@@ -602,7 +605,7 @@ bool FrameProcessor::HandlePartialAppendWindowTrimming(
scoped_refptr<StreamParserBuffer> buffer) {
DCHECK(buffer->duration() >= base::TimeDelta());
DCHECK_EQ(DemuxerStream::AUDIO, buffer->type());
- DCHECK(buffer->is_key_frame());
+ DCHECK(has_dependent_audio_frames_ || buffer->is_key_frame());
const base::TimeDelta frame_end_timestamp =
buffer->timestamp() + buffer->duration();
@@ -611,7 +614,13 @@ bool FrameProcessor::HandlePartialAppendWindowTrimming(
// for the first buffer which overlaps |append_window_start|.
if (buffer->timestamp() < append_window_start &&
frame_end_timestamp <= append_window_start) {
- audio_preroll_buffer_ = std::move(buffer);
+ // But if the buffer is not a keyframe, do not use it for preroll, nor use
+ // any previous preroll buffer for simplicity here.
+ if (has_dependent_audio_frames_ && !buffer->is_key_frame()) {
+ audio_preroll_buffer_.reset();
+ } else {
+ audio_preroll_buffer_ = std::move(buffer);
+ }
return false;
}
@@ -708,6 +717,37 @@ bool FrameProcessor::HandlePartialAppendWindowTrimming(
return processed_buffer;
}
+bool FrameProcessor::CheckAudioPresentationOrder(
+ const StreamParserBuffer& frame,
+ bool track_buffer_needs_random_access_point) {
+ DCHECK_EQ(DemuxerStream::AUDIO, frame.type());
+ DCHECK(has_dependent_audio_frames_);
+ if (frame.is_key_frame()) {
+ // Audio keyframes trivially succeed here. They start a new PTS baseline for
+ // the purpose of the checks in this method.
+ last_audio_pts_for_nonkeyframe_monotonicity_check_ = frame.timestamp();
+ return true;
+ }
+ if (track_buffer_needs_random_access_point) {
+ // This nonkeyframe trivially succeeds here, though it will not be buffered
+ // later in the caller since a keyframe is required first.
+ last_audio_pts_for_nonkeyframe_monotonicity_check_ = kNoTimestamp;
+ return true;
+ }
+
+ // We're not waiting for a random access point, so we must have a valid PTS
+ // baseline.
+ DCHECK_NE(kNoTimestamp, last_audio_pts_for_nonkeyframe_monotonicity_check_);
+
+ if (frame.timestamp() >= last_audio_pts_for_nonkeyframe_monotonicity_check_) {
+ last_audio_pts_for_nonkeyframe_monotonicity_check_ = frame.timestamp();
+ return true;
+ }
+
+ last_audio_pts_for_nonkeyframe_monotonicity_check_ = kNoTimestamp;
+ return false; // Caller should fail parse in this case.
+}
+
bool FrameProcessor::ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
base::TimeDelta append_window_start,
base::TimeDelta append_window_end,
@@ -744,14 +784,16 @@ bool FrameProcessor::ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
// assumption that all audio coded frames are key frames. Metadata in the
// bytestream may not indicate that, so we need to enforce that assumption
// here with a warning log.
- if (frame->type() == DemuxerStream::AUDIO && !frame->is_key_frame()) {
+ if (frame->type() == DemuxerStream::AUDIO && !has_dependent_audio_frames_ &&
+ !frame->is_key_frame()) {
LIMITED_MEDIA_LOG(DEBUG, media_log_, num_audio_non_keyframe_warnings_,
kMaxAudioNonKeyframeWarnings)
<< "Bytestream with audio frame PTS "
<< presentation_timestamp.InMicroseconds() << "us and DTS "
<< decode_timestamp.InMicroseconds()
<< "us indicated the frame is not a random access point (key frame). "
- "All audio frames are expected to be key frames.";
+ "All audio frames are expected to be key frames for the current "
+ "audio codec.";
frame->set_is_key_frame(true);
}
@@ -906,8 +948,18 @@ bool FrameProcessor::ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
frame->set_timestamp(presentation_timestamp);
frame->SetDecodeTimestamp(decode_timestamp);
+ if (has_dependent_audio_frames_ && frame->type() == DemuxerStream::AUDIO &&
+ !CheckAudioPresentationOrder(
+ *frame, track_buffer->needs_random_access_point())) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "Dependent audio frame with invalid decreasing presentation "
+ "timestamp detected.";
+ return false;
+ }
+
// Attempt to trim audio exactly to fit the append window.
if (frame->type() == DemuxerStream::AUDIO &&
+ (frame->is_key_frame() || !track_buffer->needs_random_access_point()) &&
HandlePartialAppendWindowTrimming(append_window_start,
append_window_end, frame)) {
// |frame| has been partially trimmed or had preroll added. Though
diff --git a/chromium/media/filters/frame_processor.h b/chromium/media/filters/frame_processor.h
index 3a5474ba3fd..7095bb603f6 100644
--- a/chromium/media/filters/frame_processor.h
+++ b/chromium/media/filters/frame_processor.h
@@ -116,13 +116,25 @@ class MEDIA_EXPORT FrameProcessor {
// after |append_window_end| will be marked for post-decode discard.
//
// If |buffer| lies entirely before |append_window_start|, and thus would
- // normally be discarded, |audio_preroll_buffer_| will be set to |buffer| and
- // the method will return false.
+ // normally be discarded, |audio_preroll_buffer_| will be updated and the
+ // method will return false. In this case, the updated preroll will be
+ // |buffer| iff |buffer| is a keyframe, otherwise the preroll will be cleared.
bool HandlePartialAppendWindowTrimming(
base::TimeDelta append_window_start,
base::TimeDelta append_window_end,
scoped_refptr<StreamParserBuffer> buffer);
+ // Enables rejection of audio frame streams with nonkeyframe timestamps that
+ // do not monotonically increase since the last keyframe. Returns true if
+ // |frame| appears to be in order, false if |frame|'s order is not supported.
+ // |track_needs_random_access_point| should be the corresponding value for the
+ // frame's track buffer. This helper should only be called when
+ // |has_dependent_audio_frames_| is true, and only for an audio |frame|. This
+ // method also uses and updates
+ // |last_audio_pts_for_nonkeyframe_monotonicity_check_|.
+ bool CheckAudioPresentationOrder(const StreamParserBuffer& frame,
+ bool track_needs_random_access_point);
+
// Helper that processes one frame with the coded frame processing algorithm.
// Returns false on error or true on success.
bool ProcessFrame(scoped_refptr<StreamParserBuffer> frame,
@@ -140,9 +152,23 @@ class MEDIA_EXPORT FrameProcessor {
scoped_refptr<StreamParserBuffer> audio_preroll_buffer_;
// The AudioDecoderConfig associated with buffers handed to ProcessFrames().
+ // TODO(wolenetz): Associate current audio config and the derived
+ // |has_dependent_audio_frames_|, |sample_duration_| and
+ // |last_audio_pts_for_nonkeyframe_monotonicity_check_| with MseTrackBuffer
+ // instead to enable handling more than 1 audio track in a SourceBuffer
+ // simultaneously. See https://crbug.com/1081952.
AudioDecoderConfig current_audio_config_;
+ bool has_dependent_audio_frames_ = false;
base::TimeDelta sample_duration_;
+ // When |has_dependent_audio_frames_| is true, holds the PTS of the last
+ // successfully processed audio frame. If the next audio frame is not a
+ // keyframe and has lower PTS, the stream is invalid. Currently, the only
+ // supported audio streams that could contain nonkeyframes are in-order (PTS
+ // increases monotonically since last keyframe), e.g. xHE-AAC.
+ base::TimeDelta last_audio_pts_for_nonkeyframe_monotonicity_check_ =
+ kNoTimestamp;
+
// The AppendMode of the associated SourceBuffer.
// See SetSequenceMode() for interpretation of |sequence_mode_|.
// Per http://www.w3.org/TR/media-source/#widl-SourceBuffer-mode:
diff --git a/chromium/media/filters/frame_processor_unittest.cc b/chromium/media/filters/frame_processor_unittest.cc
index 7c5c09a136a..9547f723297 100644
--- a/chromium/media/filters/frame_processor_unittest.cc
+++ b/chromium/media/filters/frame_processor_unittest.cc
@@ -99,7 +99,8 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
enum StreamFlags {
HAS_AUDIO = 1 << 0,
HAS_VIDEO = 1 << 1,
- OBSERVE_APPENDS_AND_GROUP_STARTS = 1 << 2
+ OBSERVE_APPENDS_AND_GROUP_STARTS = 1 << 2,
+ USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES = 1 << 3
};
void AddTestTracks(int stream_flags) {
@@ -110,14 +111,19 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
const bool setup_observers =
(stream_flags & OBSERVE_APPENDS_AND_GROUP_STARTS) != 0;
+ const bool support_audio_nonkeyframes =
+ (stream_flags & USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES) != 0;
+ ASSERT_TRUE(has_audio || !support_audio_nonkeyframes);
+
if (has_audio) {
- CreateAndConfigureStream(DemuxerStream::AUDIO, setup_observers);
+ CreateAndConfigureStream(DemuxerStream::AUDIO, setup_observers,
+ support_audio_nonkeyframes);
ASSERT_TRUE(audio_);
EXPECT_TRUE(frame_processor_->AddTrack(audio_id_, audio_.get()));
SeekStream(audio_.get(), Milliseconds(0));
}
if (has_video) {
- CreateAndConfigureStream(DemuxerStream::VIDEO, setup_observers);
+ CreateAndConfigureStream(DemuxerStream::VIDEO, setup_observers, false);
ASSERT_TRUE(video_);
EXPECT_TRUE(frame_processor_->AddTrack(video_id_, video_.get()));
SeekStream(video_.get(), Milliseconds(0));
@@ -234,12 +240,30 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
EXPECT_FALSE(read_callback_called_);
}
- // Format of |expected| is a space-delimited sequence of
- // timestamp_in_ms:original_timestamp_in_ms
- // original_timestamp_in_ms (and the colon) must be omitted if it is the same
- // as timestamp_in_ms.
+ // Doesn't check keyframeness, but otherwise is the same as
+ // CheckReadsAndOptionallyKeyframenessThenReadStalls().
void CheckReadsThenReadStalls(ChunkDemuxerStream* stream,
const std::string& expected) {
+ CheckReadsAndOptionallyKeyframenessThenReadStalls(stream, expected, false);
+ }
+
+ // Checks keyframeness using
+ // CheckReadsAndOptionallyKeyframenessThenReadStalls().
+ void CheckReadsAndKeyframenessThenReadStalls(ChunkDemuxerStream* stream,
+ const std::string& expected) {
+ CheckReadsAndOptionallyKeyframenessThenReadStalls(stream, expected, true);
+ }
+
+ // Format of |expected| is a space-delimited sequence of
+ // timestamp_in_ms:original_timestamp_in_ms. original_timestamp_in_ms (and the
+ // colon) must be omitted if it is the same as timestamp_in_ms. If
+ // |check_keyframeness| is true, then each frame in |expected| must end with
+ // 'K' or 'N', which respectively must match the read result frames'
+ // keyframeness.
+ void CheckReadsAndOptionallyKeyframenessThenReadStalls(
+ ChunkDemuxerStream* stream,
+ const std::string& expected,
+ bool check_keyframeness) {
std::vector<std::string> timestamps = base::SplitString(
expected, " ", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
std::stringstream ss;
@@ -279,6 +303,14 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
last_read_buffer_->discard_padding().second.is_zero()) {
ss << "P";
}
+
+ // Conditionally check keyframeness.
+ if (check_keyframeness) {
+ if (last_read_buffer_->is_key_frame())
+ ss << "K";
+ else
+ ss << "N";
+ }
}
EXPECT_EQ(expected, ss.str());
@@ -336,18 +368,27 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
}
void CreateAndConfigureStream(DemuxerStream::Type type,
- bool setup_observers) {
+ bool setup_observers,
+ bool support_audio_nonkeyframes) {
// TODO(wolenetz/dalecurtis): Also test with splicing disabled?
ChunkDemuxerStream* stream;
switch (type) {
case DemuxerStream::AUDIO: {
ASSERT_FALSE(audio_);
- audio_.reset(
- new ChunkDemuxerStream(DemuxerStream::AUDIO, MediaTrack::Id("1")));
- AudioDecoderConfig decoder_config(
- kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ audio_ = std::make_unique<ChunkDemuxerStream>(DemuxerStream::AUDIO,
+ MediaTrack::Id("1"));
+ AudioDecoderConfig decoder_config;
+ if (support_audio_nonkeyframes) {
+ decoder_config = AudioDecoderConfig(
+ kCodecAAC, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ decoder_config.set_profile(AudioCodecProfile::kXHE_AAC);
+ } else {
+ decoder_config = AudioDecoderConfig(
+ kCodecVorbis, kSampleFormatPlanarF32, CHANNEL_LAYOUT_STEREO, 1000,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ }
frame_processor_->OnPossibleAudioConfigUpdate(decoder_config);
ASSERT_TRUE(
audio_->UpdateAudioConfig(decoder_config, false, &media_log_));
@@ -357,8 +398,9 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
}
case DemuxerStream::VIDEO: {
ASSERT_FALSE(video_);
- video_.reset(
- new ChunkDemuxerStream(DemuxerStream::VIDEO, MediaTrack::Id("2")));
+ ASSERT_FALSE(support_audio_nonkeyframes);
+ video_ = std::make_unique<ChunkDemuxerStream>(DemuxerStream::VIDEO,
+ MediaTrack::Id("2"));
ASSERT_TRUE(video_->UpdateVideoConfig(TestVideoConfig::Normal(), false,
&media_log_));
stream = video_.get();
@@ -1199,6 +1241,8 @@ TEST_P(FrameProcessorTest, AudioNonKeyframeChangedToKeyframe) {
// to a keyframe, so no longer depends on the original preceding keyframe).
// The sequence mode test version uses SetTimestampOffset to make it behave
// like segments mode to simplify the tests.
+ // Note, see the NonkeyframeAudioBuffering tests to verify buffering of audio
+ // nonkeyframes for codec(s) that use nonkeyframes.
InSequence s;
AddTestTracks(HAS_AUDIO);
frame_processor_->SetSequenceMode(use_sequence_mode_);
@@ -1915,6 +1959,340 @@ TEST_P(FrameProcessorTest,
CheckReadsThenReadStalls(video_.get(), "0 5");
}
+TEST_P(FrameProcessorTest, NonkeyframeAudioBuffering_BasicOperation) {
+ // With the support for audio nonkeyframe buffering enabled, buffer a couple
+ // continuous groups of audio key and nonkey frames.
+ // Note, see the AudioNonKeyframeChangedToKeyframe test that tests where
+ // nonkeyframe audio buffering is not supported, and instead takes a
+ // workaround that forces all audio to be keyframe.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+
+ // Default test frame duration is 10 milliseconds.
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(80)));
+ EXPECT_TRUE(ProcessFrames("0K 10 20 30 40K 50 60 70", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [0,80) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(),
+ "0K 10N 20N 30N 40K 50N 60N 70N");
+}
+
+TEST_P(FrameProcessorTest, NonkeyframeAudioBuffering_BasicOverlaps) {
+ // With the support for audio nonkeyframe buffering enabled, buffer a few
+ // groups of audio key and nonkey frames which overlap each other.
+ // For sequence mode versions, timestampOffset is adjusted to make it act like
+ // segments mode.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_) {
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(10));
+ }
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(60)));
+ EXPECT_TRUE(ProcessFrames("10K 20 30 40 50", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [10,60) }");
+
+ // End-overlap the last nonkeyframe appended with a keyframe.
+
+ if (use_sequence_mode_)
+ SetTimestampOffset(Milliseconds(50));
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(70)));
+ EXPECT_TRUE(ProcessFrames("50K 60", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [10,70) }");
+
+ // Front-overlap the original group of frames.
+
+ if (use_sequence_mode_)
+ SetTimestampOffset(Milliseconds(0));
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(20)));
+ EXPECT_TRUE(ProcessFrames("0K 10", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [0,70) }");
+
+ SeekStream(audio_.get(), Milliseconds(0));
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "0K 10N 50K 60N");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_InitialNonkeyframesNotBuffered) {
+ // With the support for audio nonkeyframe buffering enabled, try to buffer
+ // some frames beginning with a nonkeyframe and observe initial nonkeyframe(s)
+ // are not buffered.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(60)));
+ EXPECT_TRUE(ProcessFrames("0 10 20K 30 40 50", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [20,60) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "20K 30N 40N 50N");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_InvalidDecreasingNonkeyframePts) {
+ // With the support for audio nonkeyframe buffering enabled, try to buffer an
+ // invalid sequence of nonkeyframes: decreasing presentation timestamps are
+ // not supported for audio nonkeyframes. For sequence mode versions,
+ // timestampOffset is adjusted to make it act like segments mode.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_) {
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(100));
+ }
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(110)));
+ EXPECT_TRUE(ProcessFrames("100K", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,110) }");
+
+ // Processing an audio nonkeyframe with lower PTS than the previous frame
+ // should fail.
+ EXPECT_MEDIA_LOG(AudioNonKeyframeOutOfOrder());
+ EXPECT_FALSE(ProcessFrames("90|110", ""));
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_ValidDecreasingKeyframePts) {
+ // With the support for audio nonkeyframe buffering enabled, try to buffer a
+ // valid sequence of key and nonkeyframes: decreasing presentation timestamps
+ // are supported for keyframes. For sequence mode versions, timestampOffset is
+ // adjusted to make it act like segments mode.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_) {
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(100));
+ }
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(130)));
+ EXPECT_TRUE(ProcessFrames("100K 110 120", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,130) }");
+
+ // Processing an audio keyframe with lower PTS than the previous frame
+ // should succeed, since it is a keyframe. Here, we use continuous DTS to
+ // ensure we precisely target the nonkeyframe monotonicity check when a
+ // keyframe is not required by the track buffer currently (and to make
+ // sequence mode versions act like segments mode without further manual
+ // adjustment of timestamp offset.) The original nonkeyframe at PTS 110 should
+ // be overlap-removed, and the one at PTS 120 should have be removed as a
+ // result of depending on that removed PTS 110 nonkeyframe.
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(130)));
+ EXPECT_TRUE(ProcessFrames("110|130K", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,120) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "100K 110K");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_ValidSameNonKeyframePts_1) {
+ // With the support for audio nonkeyframe buffering enabled, try to buffer a
+ // valid sequence of a keyframe and a nonkeyframe: non-increasing presentation
+ // timestamps are supported for audio nonkeyframes, so long as they don't
+ // decrease. For sequence mode versions, timestampOffset is adjusted to make
+ // it act like segments mode.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_) {
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(100));
+ }
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(110)));
+ EXPECT_TRUE(ProcessFrames("100K", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,110) }");
+
+ // Processing an audio nonkeyframe with same PTS as the previous frame should
+ // succeed, though there is presentation interval overlap causing removal of
+ // the previous frame (in this case, a keyframe), and hence the new dependent
+ // nonkeyframe is not buffered.
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(110)));
+ EXPECT_TRUE(ProcessFrames("100|110", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_ValidSameNonKeyframePts_2) {
+ // With the support for audio nonkeyframe buffering enabled, try to buffer a
+ // valid sequence of nonkeyframes: non-increasing presentation timestamps are
+ // supported for audio nonkeyframes, so long as they don't decrease. For
+ // sequence mode versions, timestampOffset is adjusted to make it act like
+ // segments mode.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_) {
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(100));
+ }
+
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(120)));
+ EXPECT_TRUE(ProcessFrames("100K 110", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,120) }");
+
+ // Processing an audio nonkeyframe with same PTS as the previous frame should
+ // succeed, though there is presentation interval overlap causing removal of
+ // the previous nonkeyframe, and hence the new dependent nonkeyframe is not
+ // buffered.
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(120)));
+ EXPECT_TRUE(ProcessFrames("110|120", ""));
+ EXPECT_EQ(Milliseconds(0), timestamp_offset_);
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [100,110) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "100K");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_AppendWindowFilterDroppedPrerollKeyframe) {
+ // For simplicity currently, if the preroll (keyframe) buffer was entirely
+ // prior to the append window and dropped, an approximately continuous
+ // keyframe is still required to use that dropped frame as preroll (for
+ // simplicity). This may change in future if append window trimming of
+ // nonkeyframes with a fully excluded preroll keyframe is commonly needed to
+ // be supported.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(-10));
+
+ EXPECT_MEDIA_LOG(DroppedFrame("audio", -10000));
+ if (use_sequence_mode_)
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(-10)));
+ else
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(0)));
+ EXPECT_TRUE(ProcessFrames("0K", ""));
+
+ // This nonkeyframe is dropped for simplicity since it depends on a preroll
+ // keyframe which was entirely outside the append window.
+ if (use_sequence_mode_)
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(-10)));
+ else
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(0)));
+ EXPECT_TRUE(ProcessFrames("10", ""));
+
+ // Only the following keyframe should buffer successfully, with no preroll.
+ EXPECT_MEDIA_LOG(DroppedAppendWindowUnusedPreroll(-10000, -10000, 10000));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(20)));
+ EXPECT_TRUE(ProcessFrames("20K", ""));
+
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [10,20) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "10:20K");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_AppendWindowFilter_TrimFront) {
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+ SetTimestampOffset(Milliseconds(-4));
+ EXPECT_MEDIA_LOG(TruncatedFrame(-4000, 6000, "start", 0));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(26)));
+ EXPECT_TRUE(ProcessFrames("0K 10 20", ""));
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [0,26) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "0K 6:10N 16:20N");
+}
+
+TEST_P(FrameProcessorTest,
+ NonkeyframeAudioBuffering_AppendWindowFilter_TrimEnd) {
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+
+ append_window_end_ = Milliseconds(26);
+
+ EXPECT_MEDIA_LOG(TruncatedFrame(20000, 30000, "end", 26000));
+ EXPECT_MEDIA_LOG(DroppedFrameCheckAppendWindow("audio", 0, 26000));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(26)));
+ EXPECT_TRUE(ProcessFrames("0K 10 20 30", ""));
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [0,26) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(), "0K 10N 20N");
+}
+
+TEST_P(FrameProcessorTest, NonkeyframeAudioBuffering_TrimSpliceOverlap) {
+ // White-box test which focuses on the behavior of underlying
+ // SourceBufferStream::TrimSpliceOverlap() for frame sequences involving
+ // nonkeyframes appended by the FrameProcessor. That method detects and
+ // performs splice trimming on every audio frame following either a
+ // discontinuity or the beginning of ProcessFrames(), and also on audio frames
+ // with PTS not directly continuous with the highest frame end PTS already
+ // processed. We vary |frame_duration_| in this test to avoid confusing
+ // int:decimal pairs in the eventual CheckReads* call.
+ InSequence s;
+ AddTestTracks(HAS_AUDIO | USE_AUDIO_CODEC_SUPPORTING_NONKEYFRAMES);
+ if (use_sequence_mode_)
+ frame_processor_->SetSequenceMode(true);
+
+ frame_duration_ = base::TimeDelta::FromMicroseconds(9750);
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(frame_duration_));
+ EXPECT_TRUE(ProcessFrames("0K", ""));
+
+ // As with all-keyframe streams, a slight jump forward should not trigger any
+ // splicing logic, though accumulations of these may result in loss of A/V
+ // sync.
+ frame_duration_ = base::TimeDelta::FromMicroseconds(10250);
+ EXPECT_CALL(callbacks_,
+ PossibleDurationIncrease(Milliseconds(10) + frame_duration_));
+ EXPECT_TRUE(ProcessFrames("10", ""));
+
+ // As with all-keyframe streams, a slightly end-overlapping nonkeyframe should
+ // not trigger any splicing logic, though accumulations of these may result in
+ // loss of A/V sync. The difference here is there isn't even any emission of a
+ // "too little splice overlap" media log, since the new frame is a
+ // nonkeyframe.
+ frame_duration_ = Milliseconds(10);
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(30)));
+ EXPECT_TRUE(ProcessFrames("20", ""));
+
+ // A heavily overlapping nonkeyframe should not trigger any splicing logic,
+ // so long as it isn't completely discontinuous. This is unlike all-keyframe
+ // audio streams, where such a heavy overlap would end-trim the overlapped
+ // frame. Accumulations of these could rapidly lead to loss of A/V sync.
+ // Nonkeyframe timestamp & duration metadata sequences need to be correctly
+ // muxed to avoid this.
+ frame_duration_ = base::TimeDelta::FromMicroseconds(10250);
+ EXPECT_CALL(callbacks_,
+ PossibleDurationIncrease(Milliseconds(22) + frame_duration_));
+ EXPECT_TRUE(ProcessFrames("22", ""));
+
+ // A keyframe that end-overlaps a nonkeyframe will trigger splicing logic.
+ // Here, we test a "too little splice overlap" case.
+ frame_duration_ = Milliseconds(10);
+ EXPECT_MEDIA_LOG(SkippingSpliceTooLittleOverlap(32000, 250));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(42)));
+ EXPECT_TRUE(ProcessFrames("32K", ""));
+
+ // And a keyframe that significantly end-overlaps a nonkeyframe will trigger
+ // splicing logic that can perform end-trimming of the overlapped frame.
+ // First, we buffer another nonkeyframe.
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(52)));
+ EXPECT_TRUE(ProcessFrames("42", ""));
+ // Verify correct splice behavior on significant overlap of the nonkeyframe by
+ // a new keyframe.
+ EXPECT_MEDIA_LOG(TrimmedSpliceOverlap(45000, 42000, 7000));
+ EXPECT_CALL(callbacks_, PossibleDurationIncrease(Milliseconds(55)));
+ EXPECT_TRUE(ProcessFrames("45K", ""));
+
+ CheckExpectedRangesByTimestamp(audio_.get(), "{ [0,55) }");
+ CheckReadsAndKeyframenessThenReadStalls(audio_.get(),
+ "0K 10N 20N 22N 32K 42N 45K");
+}
+
INSTANTIATE_TEST_SUITE_P(SequenceMode, FrameProcessorTest, Values(true));
INSTANTIATE_TEST_SUITE_P(SegmentsMode, FrameProcessorTest, Values(false));
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
index 8706bc45b3a..befca6194a6 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -15,8 +15,8 @@
#include "base/bits.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
-#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/process_context.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
@@ -102,8 +102,8 @@ class OutputMailbox {
coded_size, visible_rect, natural_size, timestamp);
// Request a fence we'll wait on before reusing the buffer.
- frame->metadata()->SetBoolean(VideoFrameMetadata::READ_LOCK_FENCES_ENABLED,
- true);
+ frame->metadata()->read_lock_fences_enabled = true;
+
return frame;
}
@@ -420,7 +420,7 @@ void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
decoder_params.set_promise_separate_access_units_on_input(true);
decoder_params.set_require_hw(!enable_sw_decoding_);
- auto decoder_factory = base::fuchsia::ComponentContextForCurrentProcess()
+ auto decoder_factory = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::mediacodec::CodecFactory>();
decoder_factory->CreateDecoder(std::move(decoder_params),
@@ -932,10 +932,9 @@ void FuchsiaVideoDecoder::OnOutputPacket(fuchsia::media::Packet output_packet,
// Mark the frame as power-efficient when software decoders are disabled. The
// codec may still decode on hardware even when |enable_sw_decoding_| is set
- // (i.e. POWER_EFFICIENT flag would not be set correctly in that case). It
+ // (i.e. power_efficient flag would not be set correctly in that case). It
// doesn't matter because software decoders can be enabled only for tests.
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT,
- !enable_sw_decoding_);
+ frame->metadata()->power_efficient = !enable_sw_decoding_;
output_cb_.Run(std::move(frame));
}
@@ -1016,7 +1015,8 @@ void FuchsiaVideoDecoder::InitializeOutputBufferCollection(
output_buffer_collection_id_ = gfx::SysmemBufferCollectionId::Create();
shared_image_interface_->RegisterSysmemBufferCollection(
output_buffer_collection_id_,
- collection_token_for_gpu.Unbind().TakeChannel());
+ collection_token_for_gpu.Unbind().TakeChannel(),
+ gfx::BufferFormat::YUV_420_BIPLANAR, gfx::BufferUsage::GPU_READ);
// Pass new output buffer settings to the codec.
fuchsia::media::StreamBufferPartialSettings settings;
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index bc293b3df7a..a3faeed5b60 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -11,8 +11,8 @@
#include "base/bind_helpers.h"
#include "base/containers/flat_map.h"
#include "base/containers/flat_set.h"
-#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/process_context.h"
#include "base/test/task_environment.h"
#include "components/viz/test/test_context_support.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
@@ -30,7 +30,7 @@ namespace {
class TestBufferCollection {
public:
explicit TestBufferCollection(zx::channel collection_token) {
- sysmem_allocator_ = base::fuchsia::ComponentContextForCurrentProcess()
+ sysmem_allocator_ = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::sysmem::Allocator>();
sysmem_allocator_.set_error_handler([](zx_status_t status) {
@@ -148,7 +148,11 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
}
void RegisterSysmemBufferCollection(gfx::SysmemBufferCollectionId id,
- zx::channel token) override {
+ zx::channel token,
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage) override {
+ EXPECT_EQ(format, gfx::BufferFormat::YUV_420_BIPLANAR);
+ EXPECT_EQ(usage, gfx::BufferUsage::GPU_READ);
std::unique_ptr<TestBufferCollection>& collection =
sysmem_buffer_collections_[id];
EXPECT_FALSE(collection);
@@ -168,6 +172,10 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
gpu::CommandBufferId(33), 1);
}
+ void WaitSyncToken(const gpu::SyncToken& sync_token) override {
+ NOTREACHED();
+ }
+
void Flush() override { NOTREACHED(); }
scoped_refptr<gfx::NativePixmap> GetNativePixmap(
diff --git a/chromium/media/filters/gav1_video_decoder.cc b/chromium/media/filters/gav1_video_decoder.cc
index dbf7acd11fc..a255254d14c 100644
--- a/chromium/media/filters/gav1_video_decoder.cc
+++ b/chromium/media/filters/gav1_video_decoder.cc
@@ -28,6 +28,9 @@ VideoPixelFormat Libgav1ImageFormatToVideoPixelFormat(
const libgav1::ImageFormat libgav1_format,
int bitdepth) {
switch (libgav1_format) {
+ // Single plane monochrome images will be converted to standard 3 plane ones
+ // since Chromium doesn't support single Y plane images.
+ case libgav1::kImageFormatMonochrome400:
case libgav1::kImageFormatYuv420:
switch (bitdepth) {
case 8:
@@ -64,9 +67,6 @@ VideoPixelFormat Libgav1ImageFormatToVideoPixelFormat(
DLOG(ERROR) << "Unsupported bit depth: " << bitdepth;
return PIXEL_FORMAT_UNKNOWN;
}
- default:
- DLOG(ERROR) << "Unsupported pixel format: " << libgav1_format;
- return PIXEL_FORMAT_UNKNOWN;
}
}
@@ -157,6 +157,25 @@ libgav1::StatusCode GetFrameBufferImpl(void* callback_private_data,
frame_buffer->plane[i] = video_frame->visible_data(i);
frame_buffer->stride[i] = video_frame->stride(i);
}
+ if (image_format == libgav1::kImageFormatMonochrome400) {
+ int uv_height = (height + 1) >> 1;
+ const size_t size_needed = video_frame->stride(1) * uv_height;
+ for (int i = 1; i < 3; i++) {
+ frame_buffer->plane[i] = nullptr;
+ frame_buffer->stride[i] = 0;
+ // An AV1 monochrome (grayscale) frame has no U and V planes. Set all U
+ // and V samples in video_frame to the blank value.
+ if (bitdepth == 8) {
+ constexpr uint8_t kBlankUV = 256 / 2;
+ memset(video_frame->visible_data(i), kBlankUV, size_needed);
+ } else {
+ const uint16_t kBlankUV = (1 << bitdepth) / 2;
+ uint16_t* data =
+ reinterpret_cast<uint16_t*>(video_frame->visible_data(i));
+ std::fill(data, data + size_needed / 2, kBlankUV);
+ }
+ }
+ }
frame_buffer->private_data = video_frame.get();
video_frame->AddRef();
@@ -192,7 +211,7 @@ scoped_refptr<VideoFrame> FormatVideoFrame(
color_space = container_color_space;
frame->set_color_space(color_space.ToGfxColorSpace());
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, false);
+ frame->metadata()->power_efficient = false;
return frame;
}
@@ -228,11 +247,6 @@ std::string Gav1VideoDecoder::GetDisplayName() const {
return "Gav1VideoDecoder";
}
-int Gav1VideoDecoder::GetMaxDecodeRequests() const {
- DCHECK(libgav1_decoder_);
- return libgav1_decoder_->GetMaxAllowedFrames();
-}
-
void Gav1VideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* /* cdm_context */,
diff --git a/chromium/media/filters/gav1_video_decoder.h b/chromium/media/filters/gav1_video_decoder.h
index c94ebd248dd..91b1fbdad9c 100644
--- a/chromium/media/filters/gav1_video_decoder.h
+++ b/chromium/media/filters/gav1_video_decoder.h
@@ -36,7 +36,6 @@ class MEDIA_EXPORT Gav1VideoDecoder : public OffloadableVideoDecoder {
// VideoDecoder implementation.
std::string GetDisplayName() const override;
- int GetMaxDecodeRequests() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -89,7 +88,7 @@ class MEDIA_EXPORT Gav1VideoDecoder : public OffloadableVideoDecoder {
// A decoded buffer used in libgav1 is allocated and managed by
// |frame_pool_|. The buffer can be reused only if libgav1's decoder doesn't
- // use the buffer and rendering the frame is complete.
+ // use the buffer and rendering the frame is complete.
VideoFramePool frame_pool_;
base::queue<DecodeRequest> decode_queue_;
diff --git a/chromium/media/filters/gav1_video_decoder_unittest.cc b/chromium/media/filters/gav1_video_decoder_unittest.cc
index d26f99bb363..5a4d2fda6dc 100644
--- a/chromium/media/filters/gav1_video_decoder_unittest.cc
+++ b/chromium/media/filters/gav1_video_decoder_unittest.cc
@@ -9,7 +9,9 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "base/hash/md5.h"
#include "base/run_loop.h"
+#include "base/strings/string_piece.h"
#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/decoder_buffer.h"
@@ -33,6 +35,32 @@ MATCHER(ContainsDecoderErrorLog, "") {
return CONTAINS_STRING(arg, "libgav1::Decoder::DequeueFrame failed");
}
+// Similar to VideoFrame::HashFrameForTesting(), but uses visible_data() and
+// visible_rect() instead of data() and coded_size() to determine the region to
+// hash.
+//
+// The VideoFrame objects created by Gav1VideoDecoder have extended pixels
+// outside the visible_rect(). Those extended pixels are for libgav1 internal
+// use and are not part of the actual video frames. Unlike
+// VideoFrame::HashFrameForTesting(), this function excludes the extended pixels
+// and hashes only the actual video frames.
+void HashFrameVisibleRectForTesting(base::MD5Context* context,
+ const VideoFrame& frame) {
+ DCHECK(context);
+ for (size_t plane = 0; plane < VideoFrame::NumPlanes(frame.format());
+ ++plane) {
+ int rows = frame.Rows(plane, frame.format(), frame.visible_rect().height());
+ for (int row = 0; row < rows; ++row) {
+ int row_bytes =
+ frame.RowBytes(plane, frame.format(), frame.visible_rect().width());
+ base::MD5Update(context, base::StringPiece(reinterpret_cast<const char*>(
+ frame.visible_data(plane) +
+ frame.stride(plane) * row),
+ row_bytes));
+ }
+ }
+}
+
} // namespace
class Gav1VideoDecoderTest : public testing::Test {
@@ -167,10 +195,19 @@ class Gav1VideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
output_frames_.push_back(std::move(frame));
}
+ std::string GetVideoFrameHash(const VideoFrame& frame) {
+ base::MD5Context md5_context;
+ base::MD5Init(&md5_context);
+ HashFrameVisibleRectForTesting(&md5_context, frame);
+ base::MD5Digest digest;
+ base::MD5Final(&digest, &md5_context);
+ return base::MD5DigestToBase16(digest);
+ }
+
MOCK_METHOD1(DecodeDone, void(DecodeStatus));
testing::StrictMock<MockMediaLog> media_log_;
@@ -213,13 +250,50 @@ TEST_F(Gav1VideoDecoderTest, DecodeFrame_Normal) {
// Simulate decoding a single frame.
EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(i_frame_buffer_));
ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
+ EXPECT_EQ("589dc641b7742ffe7a2b0d4c16aa3e86", GetVideoFrameHash(*frame));
+}
+
+TEST_F(Gav1VideoDecoderTest, DecodeFrame_8bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-8bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
+ EXPECT_EQ("eeba03dcc9c22c4632bf74b481db36b2", GetVideoFrameHash(*frame));
+}
+
+TEST_F(Gav1VideoDecoderTest, DecodeFrame_10bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-10bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_YUV420P10, frame->format());
+ EXPECT_EQ("026c1fed9e161f09d816ac7278458a80", GetVideoFrameHash(*frame));
+}
+
+// libgav1 does not support bit depth 12.
+TEST_F(Gav1VideoDecoderTest, DISABLED_DecodeFrame_12bitMono) {
+ Initialize();
+ EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(ReadTestDataFile(
+ "av1-monochrome-I-frame-320x240-12bpp")));
+ ASSERT_EQ(1U, output_frames_.size());
+
+ const auto& frame = output_frames_.front();
+ EXPECT_EQ(PIXEL_FORMAT_YUV420P12, frame->format());
+ EXPECT_EQ("32115092dc00fbe86823b0b714a0f63e", GetVideoFrameHash(*frame));
}
// Decode |i_frame_buffer_| and then a frame with a larger width and verify
// the output size was adjusted.
-// TODO(dalecurtis): Get an I-frame from a larger video.
-TEST_F(Gav1VideoDecoderTest, DISABLED_DecodeFrame_LargerWidth) {
- DecodeIFrameThenTestFile("av1-I-frame-320x240", gfx::Size(1280, 720));
+TEST_F(Gav1VideoDecoderTest, DecodeFrame_LargerWidth) {
+ DecodeIFrameThenTestFile("av1-I-frame-1280x720", gfx::Size(1280, 720));
}
// Decode a VP9 frame which should trigger a decoder error.
diff --git a/chromium/media/filters/ivf_parser.cc b/chromium/media/filters/ivf_parser.cc
index 4e3a149e8d8..8991799d41d 100644
--- a/chromium/media/filters/ivf_parser.cc
+++ b/chromium/media/filters/ivf_parser.cc
@@ -6,6 +6,8 @@
#include <cstring>
+#include "base/check.h"
+#include "base/check_op.h"
#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "base/sys_byteorder.h"
diff --git a/chromium/media/filters/pipeline_controller.cc b/chromium/media/filters/pipeline_controller.cc
index c2f32d9ec8d..97e430651a7 100644
--- a/chromium/media/filters/pipeline_controller.cc
+++ b/chromium/media/filters/pipeline_controller.cc
@@ -389,6 +389,10 @@ void PipelineController::SetLatencyHint(
pipeline_->SetLatencyHint(latency_hint);
}
+void PipelineController::SetPreservesPitch(bool preserves_pitch) {
+ pipeline_->SetPreservesPitch(preserves_pitch);
+}
+
base::TimeDelta PipelineController::GetMediaTime() const {
return pipeline_->GetMediaTime();
}
diff --git a/chromium/media/filters/pipeline_controller.h b/chromium/media/filters/pipeline_controller.h
index 52bb4757b4d..08db9dcf214 100644
--- a/chromium/media/filters/pipeline_controller.h
+++ b/chromium/media/filters/pipeline_controller.h
@@ -13,9 +13,10 @@
#include "base/time/time.h"
#include "media/base/media_export.h"
#include "media/base/pipeline.h"
-#include "media/base/renderer.h"
namespace media {
+
+class CdmContext;
class Demuxer;
// PipelineController wraps a Pipeline to expose the one-at-a-time operations
@@ -46,6 +47,7 @@ class MEDIA_EXPORT PipelineController {
using SuspendedCB = base::RepeatingClosure;
using BeforeResumeCB = base::RepeatingClosure;
using ResumedCB = base::RepeatingClosure;
+ using CdmAttachedCB = base::OnceCallback<void(bool)>;
// Construct a PipelineController wrapping |pipeline_|.
// The callbacks are:
@@ -130,6 +132,7 @@ class MEDIA_EXPORT PipelineController {
float GetVolume() const;
void SetVolume(float volume);
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint);
+ void SetPreservesPitch(bool preserves_pitch);
base::TimeDelta GetMediaTime() const;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const;
base::TimeDelta GetMediaDuration() const;
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index bf795f37fe8..ffc509be02d 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -548,4 +548,13 @@ TEST_F(PipelineControllerTest, ResumePlaybackDuringSwitchingTracksState) {
PipelineController::State::SUSPENDED);
}
+TEST_F(PipelineControllerTest, PreservesPitch) {
+ Complete(StartPipeline());
+ EXPECT_CALL(*pipeline_, SetPreservesPitch(false));
+ pipeline_controller_.SetPreservesPitch(false);
+
+ EXPECT_CALL(*pipeline_, SetPreservesPitch(true));
+ pipeline_controller_.SetPreservesPitch(true);
+}
+
} // namespace media
diff --git a/chromium/media/filters/source_buffer_range.cc b/chromium/media/filters/source_buffer_range.cc
index 92da7cc8380..80f776452f2 100644
--- a/chromium/media/filters/source_buffer_range.cc
+++ b/chromium/media/filters/source_buffer_range.cc
@@ -665,7 +665,6 @@ bool SourceBufferRange::GetBuffersInRange(base::TimeDelta start,
if (buffer->timestamp() + buffer->duration() <= start)
continue;
- DCHECK(buffer->is_key_frame());
buffers->emplace_back(std::move(buffer));
}
return previous_size < buffers->size();
diff --git a/chromium/media/filters/source_buffer_range.h b/chromium/media/filters/source_buffer_range.h
index eb5b830565e..19326580397 100644
--- a/chromium/media/filters/source_buffer_range.h
+++ b/chromium/media/filters/source_buffer_range.h
@@ -235,10 +235,11 @@ class MEDIA_EXPORT SourceBufferRange {
// this range, then kNoTimestamp is returned.
base::TimeDelta KeyframeBeforeTimestamp(base::TimeDelta timestamp) const;
- // Adds all buffers which overlap [start, end) to the end of |buffers|. If
- // no buffers exist in the range returns false, true otherwise.
- // This method is used for finding audio splice overlap buffers, so all
- // buffers are expected to be keyframes here (so DTS doesn't matter at all).
+ // Adds all buffers which overlap [start, end) to the end of |buffers|. If no
+ // buffers exist in the range returns false, true otherwise. This method is
+ // only used for finding audio splice overlap buffers, so all buffers are
+ // expected to be keyframes here, or if not keyframes, to at least be in PTS
+ // order since the previous keyframe.
bool GetBuffersInRange(base::TimeDelta start,
base::TimeDelta end,
BufferQueue* buffers) const;
diff --git a/chromium/media/filters/source_buffer_stream.cc b/chromium/media/filters/source_buffer_stream.cc
index 27fdc98e3bc..53a9012e7ff 100644
--- a/chromium/media/filters/source_buffer_stream.cc
+++ b/chromium/media/filters/source_buffer_stream.cc
@@ -680,11 +680,13 @@ bool SourceBufferStream::IsDtsMonotonicallyIncreasing(
<< (*itr)->GetDecodeTimestamp().InMicroseconds() << "us dur "
<< (*itr)->duration().InMicroseconds() << "us";
- // FrameProcessor should have enforced that all audio frames are keyframes
- // already.
- DCHECK(current_is_keyframe || GetType() != SourceBufferStreamType::kAudio);
-
- // Only verify DTS monotonicity within the current GOP.
+ // Only verify DTS monotonicity within the current GOP (since the last
+ // keyframe). FrameProcessor should have enforced that all audio frames are
+ // keyframes already, or are nonkeyframes with monotonically increasing PTS
+ // since the last keyframe for those types of audio for which nonkeyframes
+ // may be involved, e.g. xHE-AAC. Video nonkeyframes are not restricted to
+ // being in-order by PTS, but both audio and video nonkeyframes must be in
+ // decode sequence since the last keyframe.
if (current_is_keyframe) {
// Reset prev_dts tracking since a new GOP is starting.
prev_dts = kNoDecodeTimestamp();
@@ -1117,8 +1119,21 @@ void SourceBufferStream::TrimSpliceOverlap(const BufferQueue& new_buffers) {
DCHECK(!new_buffers.empty());
DCHECK_EQ(SourceBufferStreamType::kAudio, GetType());
- // Find the overlapped range (if any).
const base::TimeDelta splice_timestamp = new_buffers.front()->timestamp();
+
+ // Since some audio formats may have nonkeyframes (in PTS order since last
+ // keyframe), if the front of the new buffers is one of those, it cannot be
+ // used to begin a decode following an overlap. Here, we bail in this case,
+ // since such a splice could not be coherently decoded.
+ if (!new_buffers.front()->is_key_frame()) {
+ DVLOG(3) << __func__
+ << " No splice trimming. Front of |new_buffers| is not a "
+ "keyframe, at time "
+ << splice_timestamp.InMicroseconds();
+ return;
+ }
+
+ // Find the overlapped range (if any).
auto range_itr = FindExistingRangeFor(splice_timestamp);
if (range_itr == ranges_.end()) {
DVLOG(3) << __func__ << " No splice trimming. No range overlap at time "
@@ -1131,7 +1146,9 @@ void SourceBufferStream::TrimSpliceOverlap(const BufferQueue& new_buffers) {
const base::TimeDelta end_pts =
splice_timestamp + base::TimeDelta::FromMicroseconds(1);
- // Find if new buffer's start would overlap an existing buffer.
+ // Find if new buffer's start would overlap an existing buffer. Note that
+ // overlapped audio buffers might be nonkeyframes, but if so, FrameProcessor
+ // ensures they are in PTS order since the previous keyframe.
BufferQueue overlapped_buffers;
if (!(*range_itr)
->GetBuffersInRange(splice_timestamp, end_pts,
@@ -1214,10 +1231,11 @@ void SourceBufferStream::TrimSpliceOverlap(const BufferQueue& new_buffers) {
// here due to the overlapped buffer's truncation because the range tracks
// that end time using a pointer to the buffer (which should be
// |overlapped_buffer| if the overlap occurred at the end of the range).
- // Every audio frame is a keyframe, so there is no out-of-order PTS vs DTS
- // sequencing to overcome. If the overlap occurs in the middle of the range,
- // the caller invokes methods on the range which internally update the end
- // time(s) of the resulting range(s) involved in the append.
+ // Every audio frame is either a keyframe, or if a nonkeyframe is in PTS order
+ // since the last keyframe, so there is no out-of-order PTS vs DTS sequencing
+ // to overcome. If the overlap occurs in the middle of the range, the caller
+ // invokes methods on the range which internally update the end time(s) of the
+ // resulting range(s) involved in the append.
std::stringstream log_string;
log_string << "Audio buffer splice at PTS="
diff --git a/chromium/media/filters/video_decoder_stream_unittest.cc b/chromium/media/filters/video_decoder_stream_unittest.cc
index b5c02cf464d..ffd8951f2e6 100644
--- a/chromium/media/filters/video_decoder_stream_unittest.cc
+++ b/chromium/media/filters/video_decoder_stream_unittest.cc
@@ -321,12 +321,9 @@ class VideoDecoderStreamTest
DCHECK(pending_read_);
frame_read_ = frame;
last_read_status_ = status;
- if (frame &&
- !frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) {
- base::TimeDelta metadata_frame_duration;
- EXPECT_TRUE(frame->metadata()->GetTimeDelta(
- VideoFrameMetadata::FRAME_DURATION, &metadata_frame_duration));
- EXPECT_EQ(metadata_frame_duration, demuxer_stream_->duration());
+ if (frame && !frame->metadata()->end_of_stream) {
+ EXPECT_EQ(*frame->metadata()->frame_duration,
+ demuxer_stream_->duration());
num_decoded_frames_++;
}
@@ -356,8 +353,7 @@ class VideoDecoderStreamTest
void ReadAllFrames(int expected_decoded_frames) {
do {
ReadOneFrame();
- } while (frame_read_.get() && !frame_read_->metadata()->IsTrue(
- VideoFrameMetadata::END_OF_STREAM));
+ } while (frame_read_.get() && !frame_read_->metadata()->end_of_stream);
DCHECK_EQ(expected_decoded_frames, num_decoded_frames_);
}
@@ -613,25 +609,14 @@ TEST_P(VideoDecoderStreamTest, Read_ProperMetadata) {
auto* metadata = frame_read_->metadata();
// Verify the decoding metadata is accurate.
- base::TimeTicks decode_start;
- EXPECT_TRUE(metadata->GetTimeTicks(VideoFrameMetadata::DECODE_BEGIN_TIME,
- &decode_start));
-
- base::TimeTicks decode_end;
- EXPECT_TRUE(
- metadata->GetTimeTicks(VideoFrameMetadata::DECODE_END_TIME, &decode_end));
-
- EXPECT_EQ(decode_end - decode_start, kDecodeDelay);
+ EXPECT_EQ(*metadata->decode_end_time - *metadata->decode_begin_time,
+ kDecodeDelay);
// Verify the processing metadata is accurate.
const base::TimeDelta expected_processing_time =
GetParam().has_prepare ? (kDecodeDelay + kPrepareDelay) : kDecodeDelay;
- base::TimeDelta processing_time;
- EXPECT_TRUE(metadata->GetTimeDelta(VideoFrameMetadata::PROCESSING_TIME,
- &processing_time));
-
- EXPECT_EQ(processing_time, expected_processing_time);
+ EXPECT_EQ(*metadata->processing_time, expected_processing_time);
}
TEST_P(VideoDecoderStreamTest, Read_BlockedDemuxer) {
@@ -748,8 +733,7 @@ TEST_P(VideoDecoderStreamTest, Read_DuringEndOfStreamDecode) {
// The read output should indicate end of stream.
ASSERT_TRUE(frame_read_.get());
- EXPECT_TRUE(
- frame_read_->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_TRUE(frame_read_->metadata()->end_of_stream);
}
TEST_P(VideoDecoderStreamTest, Read_DemuxerStreamReadError) {
@@ -997,16 +981,14 @@ TEST_P(VideoDecoderStreamTest,
// A frame should have been emitted.
EXPECT_FALSE(pending_read_);
EXPECT_EQ(last_read_status_, VideoDecoderStream::OK);
- EXPECT_FALSE(
- frame_read_->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_FALSE(frame_read_->metadata()->end_of_stream);
EXPECT_GT(decoder_->total_bytes_decoded(), 0);
ReadOneFrame();
EXPECT_FALSE(pending_read_);
EXPECT_EQ(0, video_decoder_stream_->get_fallback_buffers_size_for_testing());
- EXPECT_TRUE(
- frame_read_->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_TRUE(frame_read_->metadata()->end_of_stream);
}
TEST_P(VideoDecoderStreamTest,
diff --git a/chromium/media/filters/video_renderer_algorithm.cc b/chromium/media/filters/video_renderer_algorithm.cc
index 717398818b6..afc599007d8 100644
--- a/chromium/media/filters/video_renderer_algorithm.cc
+++ b/chromium/media/filters/video_renderer_algorithm.cc
@@ -332,13 +332,12 @@ int64_t VideoRendererAlgorithm::GetMemoryUsage() const {
void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
DCHECK(frame);
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
// Note: Not all frames have duration. E.g., this class is used with WebRTC
// which does not provide duration information for its frames.
- base::TimeDelta metadata_frame_duration;
- auto has_duration = frame->metadata()->GetTimeDelta(
- VideoFrameMetadata::FRAME_DURATION, &metadata_frame_duration);
+ base::TimeDelta metadata_frame_duration =
+ frame->metadata()->frame_duration.value_or(base::TimeDelta());
auto timestamp = frame->timestamp();
ReadyFrame ready_frame(std::move(frame));
auto it = frame_queue_.empty()
@@ -388,7 +387,7 @@ void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
//
// Note: This duration value is not compensated for playback rate and
// thus is different than |average_frame_duration_| which is compensated.
- if (!frame_duration_calculator_.count() && has_duration &&
+ if (!frame_duration_calculator_.count() &&
metadata_frame_duration > base::TimeDelta()) {
media_timestamps.push_back(timestamp + metadata_frame_duration);
}
@@ -405,8 +404,7 @@ void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
wallclock_duration = ready_frame.end_time - ready_frame.start_time;
}
- ready_frame.frame->metadata()->SetTimeDelta(
- VideoFrameMetadata::WALLCLOCK_FRAME_DURATION, wallclock_duration);
+ ready_frame.frame->metadata()->wallclock_frame_duration = wallclock_duration;
// The vast majority of cases should always append to the back, but in rare
// circumstance we get out of order timestamps, http://crbug.com/386551.
@@ -487,10 +485,9 @@ void VideoRendererAlgorithm::UpdateFrameStatistics() {
bool have_metadata_duration = false;
{
const auto& last_frame = frame_queue_.back().frame;
- base::TimeDelta metadata_frame_duration;
- if (last_frame->metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- &metadata_frame_duration) &&
- metadata_frame_duration > base::TimeDelta()) {
+ base::TimeDelta metadata_frame_duration =
+ last_frame->metadata()->frame_duration.value_or(base::TimeDelta());
+ if (metadata_frame_duration > base::TimeDelta()) {
have_metadata_duration = true;
media_timestamps.push_back(last_frame->timestamp() +
metadata_frame_duration);
diff --git a/chromium/media/filters/video_renderer_algorithm_unittest.cc b/chromium/media/filters/video_renderer_algorithm_unittest.cc
index c17c24ab7b5..6707f4eb751 100644
--- a/chromium/media/filters/video_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/video_renderer_algorithm_unittest.cc
@@ -1212,8 +1212,7 @@ TEST_F(VideoRendererAlgorithmTest, RemoveExpiredFramesWithoutRendering) {
// as effective since we know the duration of it. It is not removed since we
// only have one frame in the queue though.
auto frame = CreateFrame(tg.interval(0));
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- tg.interval(1));
+ frame->metadata()->frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
ASSERT_EQ(0u, algorithm_.RemoveExpiredFrames(tg.current() + tg.interval(3)));
EXPECT_EQ(0u, EffectiveFramesQueued());
@@ -1585,8 +1584,7 @@ TEST_F(VideoRendererAlgorithmTest, InfiniteDurationMetadata) {
TickGenerator tg(tick_clock_->NowTicks(), 50);
auto frame = CreateFrame(kInfiniteDuration);
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- tg.interval(1));
+ frame->metadata()->frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
// This should not crash or fail.
@@ -1599,8 +1597,7 @@ TEST_F(VideoRendererAlgorithmTest, UsesFrameDuration) {
TickGenerator tg(tick_clock_->NowTicks(), 50);
auto frame = CreateFrame(tg.interval(0));
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- tg.interval(1));
+ frame->metadata()->frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
// This should not crash or fail.
@@ -1612,8 +1609,7 @@ TEST_F(VideoRendererAlgorithmTest, UsesFrameDuration) {
constexpr base::TimeDelta kLongDuration = base::TimeDelta::FromSeconds(3);
for (int i = 1; i < 4; ++i) {
frame = CreateFrame(tg.interval(i));
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- i == 3 ? kLongDuration : tg.interval(1));
+ frame->metadata()->frame_duration = i == 3 ? kLongDuration : tg.interval(1);
algorithm_.EnqueueFrame(frame);
}
@@ -1635,8 +1631,7 @@ TEST_F(VideoRendererAlgorithmTest, WallClockDurationMetadataSet) {
for (int i = 0; i < frame_count; i++) {
auto frame = CreateFrame(tg.interval(i));
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- tg.interval(1));
+ frame->metadata()->frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
}
@@ -1645,12 +1640,8 @@ TEST_F(VideoRendererAlgorithmTest, WallClockDurationMetadataSet) {
auto frame = RenderAndStep(&tg, &frames_dropped);
SCOPED_TRACE(base::StringPrintf("Frame #%d", i));
- base::TimeDelta wallclock_duration;
- EXPECT_TRUE(frame->metadata()->GetTimeDelta(
- media::VideoFrameMetadata::WALLCLOCK_FRAME_DURATION,
- &wallclock_duration));
- EXPECT_EQ(wallclock_duration, intended_duration);
+ EXPECT_EQ(*frame->metadata()->wallclock_frame_duration, intended_duration);
EXPECT_EQ(algorithm_.average_frame_duration(), intended_duration);
}
}
diff --git a/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc b/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
index ac102a6a274..a1d5c8e7603 100644
--- a/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
+++ b/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
@@ -7,6 +7,7 @@
#include <fuzzer/FuzzedDataProvider.h>
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "media/base/decrypt_config.h"
diff --git a/chromium/media/filters/vp9_parser_fuzzertest.cc b/chromium/media/filters/vp9_parser_fuzzertest.cc
index a4343d56e87..07fb0ad2847 100644
--- a/chromium/media/filters/vp9_parser_fuzzertest.cc
+++ b/chromium/media/filters/vp9_parser_fuzzertest.cc
@@ -5,6 +5,7 @@
#include <stddef.h>
#include <stdint.h>
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "media/filters/ivf_parser.h"
#include "media/filters/vp9_parser.h"
diff --git a/chromium/media/filters/vpx_video_decoder.cc b/chromium/media/filters/vpx_video_decoder.cc
index d900542c134..9c1dcb6f95e 100644
--- a/chromium/media/filters/vpx_video_decoder.cc
+++ b/chromium/media/filters/vpx_video_decoder.cc
@@ -182,8 +182,7 @@ void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
// We might get a successful VpxDecode but not a frame if only a partial
// decode happened.
if (video_frame) {
- video_frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT,
- false);
+ video_frame->metadata()->power_efficient = false;
output_cb_.Run(video_frame);
}
@@ -242,6 +241,14 @@ bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
<< vpx_codec_error(vpx_codec_.get());
return false;
}
+
+ vpx_codec_err_t status =
+ vpx_codec_control(vpx_codec_.get(), VP9D_SET_LOOP_FILTER_OPT, 1);
+ if (status != VPX_CODEC_OK) {
+ DLOG(ERROR) << "Failed to enable VP9D_SET_LOOP_FILTER_OPT. "
+ << vpx_codec_error(vpx_codec_.get());
+ return false;
+ }
}
if (config.alpha_mode() == VideoDecoderConfig::AlphaMode::kIsOpaque)
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index b1d19c3bfc8..f8893579f3e 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -161,7 +161,7 @@ class VpxVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/filters/wsola_internals.cc b/chromium/media/filters/wsola_internals.cc
index dd269a67ef3..684c9568478 100644
--- a/chromium/media/filters/wsola_internals.cc
+++ b/chromium/media/filters/wsola_internals.cc
@@ -308,7 +308,7 @@ int OptimalIndex(const AudioBus* search_block,
energy_candidate_blocks.get());
}
-void GetSymmetricHanningWindow(int window_length, float* window) {
+void GetPeriodicHanningWindow(int window_length, float* window) {
const float scale = 2.0f * base::kPiFloat / window_length;
for (int n = 0; n < window_length; ++n)
window[n] = 0.5f * (1.0f - std::cos(n * scale));
diff --git a/chromium/media/filters/wsola_internals.h b/chromium/media/filters/wsola_internals.h
index 13d2875cb31..527b89d32c8 100644
--- a/chromium/media/filters/wsola_internals.h
+++ b/chromium/media/filters/wsola_internals.h
@@ -78,7 +78,7 @@ MEDIA_EXPORT int OptimalIndex(const AudioBus* search_block,
// Return a "periodic" Hann window. This is the first L samples of an L+1
// Hann window. It is perfect reconstruction for overlap-and-add.
-MEDIA_EXPORT void GetSymmetricHanningWindow(int window_length, float* window);
+MEDIA_EXPORT void GetPeriodicHanningWindow(int window_length, float* window);
} // namespace internal
diff --git a/chromium/media/formats/common/offset_byte_queue.cc b/chromium/media/formats/common/offset_byte_queue.cc
index b355e1adf26..831c4d0ffe9 100644
--- a/chromium/media/formats/common/offset_byte_queue.cc
+++ b/chromium/media/formats/common/offset_byte_queue.cc
@@ -4,16 +4,17 @@
#include "media/formats/common/offset_byte_queue.h"
+#include "base/check.h"
#include "base/logging.h"
namespace media {
-OffsetByteQueue::OffsetByteQueue() : buf_(NULL), size_(0), head_(0) {}
+OffsetByteQueue::OffsetByteQueue() : buf_(nullptr), size_(0), head_(0) {}
OffsetByteQueue::~OffsetByteQueue() = default;
void OffsetByteQueue::Reset() {
queue_.Reset();
- buf_ = NULL;
+ buf_ = nullptr;
size_ = 0;
head_ = 0;
}
@@ -25,7 +26,7 @@ void OffsetByteQueue::Push(const uint8_t* buf, int size) {
}
void OffsetByteQueue::Peek(const uint8_t** buf, int* size) {
- *buf = size_ > 0 ? buf_ : NULL;
+ *buf = size_ > 0 ? buf_ : nullptr;
*size = size_;
}
@@ -38,7 +39,7 @@ void OffsetByteQueue::Pop(int count) {
void OffsetByteQueue::PeekAt(int64_t offset, const uint8_t** buf, int* size) {
DCHECK(offset >= head());
if (offset < head() || offset >= tail()) {
- *buf = NULL;
+ *buf = nullptr;
*size = 0;
return;
}
diff --git a/chromium/media/formats/mp2t/es_adapter_video.cc b/chromium/media/formats/mp2t/es_adapter_video.cc
index 15a3dd398d9..88bdecc667f 100644
--- a/chromium/media/formats/mp2t/es_adapter_video.cc
+++ b/chromium/media/formats/mp2t/es_adapter_video.cc
@@ -6,6 +6,7 @@
#include <stddef.h>
+#include "base/logging.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_decoder_config.h"
#include "media/formats/mp2t/mp2t_common.h"
diff --git a/chromium/media/formats/mp2t/mp2t_common.h b/chromium/media/formats/mp2t/mp2t_common.h
index 37e5891bb15..ebced862cbe 100644
--- a/chromium/media/formats/mp2t/mp2t_common.h
+++ b/chromium/media/formats/mp2t/mp2t_common.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_FORMATS_MP2T_MP2T_COMMON_H_
#define MEDIA_FORMATS_MP2T_MP2T_COMMON_H_
+#include "base/logging.h"
+
#define LOG_LEVEL_TS 5
#define LOG_LEVEL_PES 4
#define LOG_LEVEL_ES 3
diff --git a/chromium/media/formats/mp2t/ts_packet.cc b/chromium/media/formats/mp2t/ts_packet.cc
index 9d19b2290af..ea9e9072cdb 100644
--- a/chromium/media/formats/mp2t/ts_packet.cc
+++ b/chromium/media/formats/mp2t/ts_packet.cc
@@ -6,6 +6,7 @@
#include <memory>
+#include "base/logging.h"
#include "media/base/bit_reader.h"
#include "media/formats/mp2t/mp2t_common.h"
diff --git a/chromium/media/formats/mp4/box_reader.cc b/chromium/media/formats/mp4/box_reader.cc
index 131ae34e0df..d41f2f91494 100644
--- a/chromium/media/formats/mp4/box_reader.cc
+++ b/chromium/media/formats/mp4/box_reader.cc
@@ -177,9 +177,9 @@ bool BoxReader::IsValidTopLevelBox(const FourCC& type, MediaLog* media_log) {
case FOURCC_EMSG:
return true;
default:
- // Hex is used to show nonprintable characters and aid in debugging
- MEDIA_LOG(DEBUG, media_log) << "Unrecognized top-level box type "
- << FourCCToString(type);
+ // Hex is used to show nonprintable characters and aid in debugging.
+ MEDIA_LOG(ERROR, media_log)
+ << "Invalid top-level ISO BMFF box type " << FourCCToString(type);
return false;
}
}
diff --git a/chromium/media/formats/mp4/box_reader_unittest.cc b/chromium/media/formats/mp4/box_reader_unittest.cc
index cc496d9eaa9..33ebe37b8f5 100644
--- a/chromium/media/formats/mp4/box_reader_unittest.cc
+++ b/chromium/media/formats/mp4/box_reader_unittest.cc
@@ -213,7 +213,10 @@ TEST_F(BoxReaderTest, WrongFourCCTest) {
buf[6] = 0x4c;
buf[7] = 0x45;
- EXPECT_MEDIA_LOG(HasSubstr("Unrecognized top-level box type DALE"));
+ // Also, tests that the offending FourCC is emitted only in a debug media log.
+ EXPECT_MEDIA_LOG(
+ AllOf(HasSubstr("error"),
+ HasSubstr("Invalid top-level ISO BMFF box type DALE")));
std::unique_ptr<BoxReader> reader;
ParseResult result =
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.cc b/chromium/media/formats/mp4/mp4_stream_parser.cc
index 6fa01d734b6..6a1db373af6 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser.cc
@@ -257,8 +257,7 @@ ParseResult MP4StreamParser::ParseBox() {
} else {
// TODO(wolenetz,chcunningham): Enforce more strict adherence to MSE byte
// stream spec for ftyp and styp. See http://crbug.com/504514.
- DVLOG(2) << "Skipping unrecognized top-level box: "
- << FourCCToString(reader->type());
+ DVLOG(2) << "Skipping top-level box: " << FourCCToString(reader->type());
}
queue_.Pop(reader->box_size());
diff --git a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
index c04a2c82c3c..1955d5610d3 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -17,6 +17,7 @@
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/test/metrics/histogram_tester.h"
+#include "base/test/mock_callback.h"
#include "base/time/time.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
@@ -43,6 +44,21 @@ using base::TimeDelta;
namespace media {
namespace mp4 {
+namespace {
+
+// Useful in single-track test media cases that need to verify
+// keyframe/non-keyframe sequence in output of parse.
+enum class Keyframeness {
+ kKeyframe = 0,
+ kNonKeyframe,
+};
+
+// Tells gtest how to print our Keyframeness enum values.
+std::ostream& operator<<(std::ostream& os, Keyframeness k) {
+ return os << (k == Keyframeness::kKeyframe ? "kKeyframe" : "kNonKeyframe");
+}
+
+} // namespace
// Matchers for verifying common media log entry strings.
MATCHER(SampleEncryptionInfoUnavailableLog, "") {
@@ -80,6 +96,7 @@ class MP4StreamParserTest : public testing::Test {
StreamParser::TrackId audio_track_id_;
StreamParser::TrackId video_track_id_;
bool verifying_keyframeness_sequence_;
+ StrictMock<base::MockRepeatingCallback<void(Keyframeness)>> keyframeness_cb_;
bool AppendData(const uint8_t* data, size_t length) {
return parser_->Parse(data, length);
@@ -141,11 +158,6 @@ class MP4StreamParserTest : public testing::Test {
return true;
}
- // Useful in single-track test media cases that need to verify
- // keyframe/non-keyframe sequence in output of parse.
- MOCK_METHOD0(ParsedKeyframe, void());
- MOCK_METHOD0(ParsedNonKeyframe, void());
-
bool NewBuffersF(const StreamParser::BufferQueueMap& buffer_queue_map) {
DecodeTimestamp lowest_end_dts = kNoDecodeTimestamp();
for (const auto& it : buffer_queue_map) {
@@ -167,10 +179,9 @@ class MP4StreamParserTest : public testing::Test {
// Let single-track tests verify the sequence of keyframes/nonkeyframes.
if (verifying_keyframeness_sequence_) {
- if (buf->is_key_frame())
- ParsedKeyframe();
- else
- ParsedNonKeyframe();
+ keyframeness_cb_.Run(buf->is_key_frame()
+ ? Keyframeness::kKeyframe
+ : Keyframeness::kNonKeyframe);
}
}
}
@@ -347,8 +358,8 @@ TEST_F(MP4StreamParserTest, AVC_KeyAndNonKeyframeness_Match_Container) {
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
verifying_keyframeness_sequence_ = true;
- EXPECT_CALL(*this, ParsedKeyframe());
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File("bear-640x360-v-2frames_frag.mp4", 512);
}
@@ -366,8 +377,8 @@ TEST_F(MP4StreamParserTest, AVC_Keyframeness_Mismatches_Container) {
EXPECT_MEDIA_LOG(DebugLog(
"ISO-BMFF container metadata for video frame indicates that the frame is "
"not a keyframe, but the video frame contents indicate the opposite."));
- EXPECT_CALL(*this, ParsedKeyframe());
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File("bear-640x360-v-2frames-keyframe-is-non-sync-sample_frag.mp4",
512);
}
@@ -383,11 +394,11 @@ TEST_F(MP4StreamParserTest, AVC_NonKeyframeness_Mismatches_Container) {
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
verifying_keyframeness_sequence_ = true;
- EXPECT_CALL(*this, ParsedKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
EXPECT_MEDIA_LOG(DebugLog(
"ISO-BMFF container metadata for video frame indicates that the frame is "
"a keyframe, but the video frame contents indicate the opposite."));
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File("bear-640x360-v-2frames-nonkeyframe-is-sync-sample_frag.mp4",
512);
}
@@ -405,16 +416,21 @@ TEST_F(MP4StreamParserTest, MPEG2_AAC_LC) {
}
TEST_F(MP4StreamParserTest, MPEG4_XHE_AAC) {
- InSequence s;
+ InSequence s; // The keyframeness sequence matters for this test.
std::set<int> audio_object_types;
audio_object_types.insert(kISO_14496_3);
parser_.reset(new MP4StreamParser(audio_object_types, false, false));
auto params = GetDefaultInitParametersExpectations();
- params.duration = base::TimeDelta::FromMicroseconds(1024000);
- params.liveness = DemuxerStream::LIVENESS_RECORDED;
params.detected_video_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
+
+ // This test file contains a single audio keyframe followed by 23
+ // non-keyframes.
+ verifying_keyframeness_sequence_ = true;
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe)).Times(23);
+
ParseMP4File("noise-xhe-aac.mp4", 512);
EXPECT_EQ(audio_decoder_config_.profile(), AudioCodecProfile::kXHE_AAC);
}
@@ -501,8 +517,8 @@ TEST_F(MP4StreamParserTest, HEVC_KeyAndNonKeyframeness_Match_Container) {
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
verifying_keyframeness_sequence_ = true;
- EXPECT_CALL(*this, ParsedKeyframe());
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File("bear-320x240-v-2frames_frag-hevc.mp4", 256);
}
@@ -520,8 +536,8 @@ TEST_F(MP4StreamParserTest, HEVC_Keyframeness_Mismatches_Container) {
EXPECT_MEDIA_LOG(DebugLog(
"ISO-BMFF container metadata for video frame indicates that the frame is "
"not a keyframe, but the video frame contents indicate the opposite."));
- EXPECT_CALL(*this, ParsedKeyframe());
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File(
"bear-320x240-v-2frames-keyframe-is-non-sync-sample_frag-hevc.mp4", 256);
}
@@ -537,11 +553,11 @@ TEST_F(MP4StreamParserTest, HEVC_NonKeyframeness_Mismatches_Container) {
params.detected_audio_track_count = 0;
InitializeParserWithInitParametersExpectations(params);
verifying_keyframeness_sequence_ = true;
- EXPECT_CALL(*this, ParsedKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kKeyframe));
EXPECT_MEDIA_LOG(DebugLog(
"ISO-BMFF container metadata for video frame indicates that the frame is "
"a keyframe, but the video frame contents indicate the opposite."));
- EXPECT_CALL(*this, ParsedNonKeyframe());
+ EXPECT_CALL(keyframeness_cb_, Run(Keyframeness::kNonKeyframe));
ParseMP4File(
"bear-320x240-v-2frames-nonkeyframe-is-sync-sample_frag-hevc.mp4", 256);
}
diff --git a/chromium/media/formats/webm/webm_parser.cc b/chromium/media/formats/webm/webm_parser.cc
index f59bec22748..a31fc1667cb 100644
--- a/chromium/media/formats/webm/webm_parser.cc
+++ b/chromium/media/formats/webm/webm_parser.cc
@@ -17,7 +17,9 @@
#include <iomanip>
#include <limits>
+#include "base/check_op.h"
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/numerics/safe_conversions.h"
#include "base/stl_util.h"
#include "media/formats/webm/webm_constants.h"
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
index b4f6476516c..0b1c04bca4c 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
@@ -7,7 +7,6 @@
#include <lib/sys/cpp/component_context.h>
#include "base/bind.h"
-#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
#include "base/logging.h"
#include "base/sequenced_task_runner.h"
@@ -235,6 +234,8 @@ void FuchsiaAudioRenderer::SetLatencyHint(
// shape and usefulness outside of fuchsia.
}
+void FuchsiaAudioRenderer::SetPreservesPitch(bool preserves_pitch) {}
+
void FuchsiaAudioRenderer::StartTicking() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -290,8 +291,10 @@ void FuchsiaAudioRenderer::SetMediaTime(base::TimeDelta time) {
base::TimeDelta FuchsiaAudioRenderer::CurrentMediaTime() {
base::AutoLock lock(timeline_lock_);
- if (state_ != PlaybackState::kPlaying)
+ if (state_ != PlaybackState::kPlaying &&
+ state_ != PlaybackState::kEndOfStream) {
return media_pos_;
+ }
return CurrentMediaTimeLocked();
}
@@ -304,7 +307,8 @@ bool FuchsiaAudioRenderer::GetWallClockTimes(
base::AutoLock lock(timeline_lock_);
- const bool is_time_moving = state_ == PlaybackState::kPlaying;
+ const bool is_time_moving = state_ == PlaybackState::kPlaying ||
+ state_ == PlaybackState::kEndOfStream;
if (media_timestamps.empty()) {
wall_clock_times->push_back(is_time_moving ? now : base::TimeTicks());
@@ -428,8 +432,7 @@ void FuchsiaAudioRenderer::OnAudioConsumerStatusChanged(
void FuchsiaAudioRenderer::ScheduleReadDemuxerStream() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!demuxer_stream_ || read_timer_.IsRunning() ||
- demuxer_stream_->IsReadPending() ||
+ if (!demuxer_stream_ || read_timer_.IsRunning() || is_demuxer_read_pending_ ||
GetPlaybackState() == PlaybackState::kEndOfStream ||
num_pending_packets_ >= stream_sink_buffers_.size()) {
return;
@@ -456,7 +459,9 @@ void FuchsiaAudioRenderer::ScheduleReadDemuxerStream() {
void FuchsiaAudioRenderer::ReadDemuxerStream() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(demuxer_stream_);
+ DCHECK(!is_demuxer_read_pending_);
+ is_demuxer_read_pending_ = true;
demuxer_stream_->Read(
base::BindOnce(&FuchsiaAudioRenderer::OnDemuxerStreamReadDone,
weak_factory_.GetWeakPtr()));
@@ -466,6 +471,9 @@ void FuchsiaAudioRenderer::OnDemuxerStreamReadDone(
DemuxerStream::Status read_status,
scoped_refptr<DecoderBuffer> buffer) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(is_demuxer_read_pending_);
+
+ is_demuxer_read_pending_ = false;
if (read_status != DemuxerStream::kOk) {
if (read_status == DemuxerStream::kError) {
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
index b22ff67d179..fec88cb1a2f 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
@@ -43,6 +43,7 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
void StartPlaying() final;
void SetVolume(float volume) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
+ void SetPreservesPitch(bool preserves_pitch) final;
// TimeSource implementation.
void StartTicking() final;
@@ -132,6 +133,8 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
fuchsia::media::audio::VolumeControlPtr volume_control_;
DemuxerStream* demuxer_stream_ = nullptr;
+ bool is_demuxer_read_pending_ = false;
+
RendererClient* client_ = nullptr;
// Initialize() completion callback.
diff --git a/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc b/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
index 921a1b6cfb1..cae3ecab9f6 100644
--- a/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
+++ b/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
@@ -7,7 +7,7 @@
#include <fuchsia/sysmem/cpp/fidl.h>
#include <lib/sys/cpp/component_context.h>
-#include "base/fuchsia/default_context.h"
+#include "base/fuchsia/process_context.h"
#include "base/memory/platform_shared_memory_region.h"
#include "base/memory/writable_shared_memory_region.h"
#include "base/message_loop/message_loop_current.h"
@@ -290,7 +290,7 @@ void FakeCameraStream::SetBufferCollection(
SendBufferCollection();
// Initialize the new collection using |local_token|.
- auto allocator = base::fuchsia::ComponentContextForCurrentProcess()
+ auto allocator = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::sysmem::Allocator>();
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc
index a95c5e3e597..0f2e5828ffb 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc
@@ -25,7 +25,6 @@ FuchsiaCdmFactory::~FuchsiaCdmFactory() = default;
void FuchsiaCdmFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -35,11 +34,6 @@ void FuchsiaCdmFactory::Create(
CdmCreatedCB bound_cdm_created_cb =
BindToCurrentLoop(std::move(cdm_created_cb));
- if (security_origin.opaque()) {
- std::move(bound_cdm_created_cb).Run(nullptr, "Invalid origin.");
- return;
- }
-
if (CanUseAesDecryptor(key_system)) {
auto cdm = base::MakeRefCounted<AesDecryptor>(
session_message_cb, session_closed_cb, session_keys_change_cb,
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
index ad8cd9248e9..76f4c05d6da 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
@@ -22,7 +22,6 @@ class MEDIA_EXPORT FuchsiaCdmFactory : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_pool.cc b/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
index 94c1a1f43d9..2b4b0b5487a 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
@@ -8,8 +8,8 @@
#include <algorithm>
#include "base/bind.h"
-#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
+#include "base/fuchsia/process_context.h"
#include "media/fuchsia/common/sysmem_buffer_reader.h"
#include "media/fuchsia/common/sysmem_buffer_writer.h"
@@ -126,7 +126,7 @@ void SysmemBufferPool::OnError() {
}
BufferAllocator::BufferAllocator() {
- allocator_ = base::fuchsia::ComponentContextForCurrentProcess()
+ allocator_ = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::sysmem::Allocator>();
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer.cc b/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
index addfafb6e94..0942b9f5337 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
@@ -70,16 +70,30 @@ class SysmemBufferWriter::Buffer {
size_t bytes_to_fill = std::min(size_, data.size());
memcpy(base_address_ + offset_, data.data(), bytes_to_fill);
- // Flush CPU cache if StreamProcessor reads from RAM.
- if (coherency_domain_ == fuchsia::sysmem::CoherencyDomain::RAM) {
- zx_status_t status = zx_cache_flush(base_address_ + offset_,
- bytes_to_fill, ZX_CACHE_FLUSH_DATA);
- ZX_DCHECK(status == ZX_OK, status) << "zx_cache_flush";
- }
+ FlushBuffer(0, bytes_to_fill);
return bytes_to_fill;
}
+ base::span<uint8_t> ReserveAndMapBuffer() {
+ DCHECK(!is_used_);
+ is_used_ = true;
+ return base::make_span(base_address_ + offset_, size_);
+ }
+
+ void FlushBuffer(size_t flush_offset, size_t flush_size) {
+ DCHECK(is_used_);
+ DCHECK_LE(flush_size, size_ - flush_offset);
+
+ if (coherency_domain_ != fuchsia::sysmem::CoherencyDomain::RAM)
+ return;
+
+ uint8_t* address = base_address_ + offset_ + flush_offset;
+ zx_status_t status =
+ zx_cache_flush(address, flush_size, ZX_CACHE_FLUSH_DATA);
+ ZX_DCHECK(status == ZX_OK, status) << "zx_cache_flush";
+ }
+
void Release() { is_used_ = false; }
private:
@@ -187,4 +201,16 @@ SysmemBufferWriter::GetRecommendedConstraints(
return buffer_constraints;
}
+base::span<uint8_t> SysmemBufferWriter::ReserveAndMapBuffer(size_t index) {
+ DCHECK_LT(index, buffers_.size());
+ return buffers_[index].ReserveAndMapBuffer();
+}
+
+void SysmemBufferWriter::FlushBuffer(size_t index,
+ size_t flush_offset,
+ size_t flush_size) {
+ DCHECK_LT(index, buffers_.size());
+ return buffers_[index].FlushBuffer(flush_offset, flush_size);
+}
+
} // namespace media
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer.h b/chromium/media/fuchsia/common/sysmem_buffer_writer.h
index 9aed936f2b2..aaee25c1449 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer.h
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer.h
@@ -11,6 +11,7 @@
#include <memory>
#include "base/containers/span.h"
+#include "base/memory/shared_memory_mapping.h"
#include "base/optional.h"
namespace media {
@@ -30,11 +31,24 @@ class SysmemBufferWriter {
explicit SysmemBufferWriter(std::vector<Buffer> buffers);
~SysmemBufferWriter();
- // Write the content of |data| into buffer at |index|. Return num of bytes
- // written into the buffer. Write a used buffer will fail. It will mark the
- // buffer as "used".
+ SysmemBufferWriter(const SysmemBufferWriter&) = delete;
+ SysmemBufferWriter& operator=(const SysmemBufferWriter&) = delete;
+
+ // Write the content of |data| into the buffer at |index|. Return num of bytes
+ // written into the buffer. Can be called only for an unused buffer. Marks
+ // the buffer as used.
size_t Write(size_t index, base::span<const uint8_t> data);
+ // Returns a span for the memory-mapping of the buffer with the specified
+ // |index|. Can be called only for an unused buffer. Marks the buffer as used.
+ // Callers must call FlushCache() after they are finished updating the buffer.
+ base::span<uint8_t> ReserveAndMapBuffer(size_t index);
+
+ // Flushes CPU cache for specified range in the buffer with the specified
+ // |index| in case the buffer collection uses RAM coherency. No-op for
+ // collections with RAM coherency.
+ void FlushBuffer(size_t index, size_t flush_offset, size_t flush_size);
+
// Acquire unused buffer for write. If |min_size| is provided, the returned
// buffer will have available size larger than |min_size|. This will NOT
// mark the buffer as "used".
@@ -50,8 +64,6 @@ class SysmemBufferWriter {
private:
std::vector<Buffer> buffers_;
-
- DISALLOW_COPY_AND_ASSIGN(SysmemBufferWriter);
};
} // namespace media
diff --git a/chromium/media/fuchsia/metrics/BUILD.gn b/chromium/media/fuchsia/metrics/BUILD.gn
deleted file mode 100644
index ee6952ba233..00000000000
--- a/chromium/media/fuchsia/metrics/BUILD.gn
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-source_set("metrics") {
- sources = [
- "fuchsia_playback_events_recorder.cc",
- "fuchsia_playback_events_recorder.h",
- ]
- deps = [
- "//media/mojo/mojom",
- "//mojo/public/cpp/bindings",
- ]
-}
-
-source_set("unittests") {
- testonly = true
-
- deps = [
- ":metrics",
- "//base",
- "//base/test:test_support",
- "//media",
- "//testing/gtest",
- ]
-
- sources = [ "fuchsia_playback_events_recorder_test.cc" ]
-}
diff --git a/chromium/media/fuchsia/metrics/DEPS b/chromium/media/fuchsia/metrics/DEPS
deleted file mode 100644
index ef8ad28d9d4..00000000000
--- a/chromium/media/fuchsia/metrics/DEPS
+++ /dev/null
@@ -1,3 +0,0 @@
-include_rules = [
- "+mojo/public",
-]
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index 767a3316092..ec65dea1c67 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -30,6 +30,7 @@ component("gpu") {
"//chrome/gpu",
"//chromecast/*",
"//components/arc/mojom:media",
+ "//components/arc/mojom:media_mojolpm",
"//components/arc/video_accelerator",
"//components/mirroring/service:mirroring_service",
"//components/chromeos_camera/*",
@@ -166,6 +167,7 @@ component("gpu") {
if (is_win) {
sources += [
+ "windows/av1_guids.h",
"windows/d3d11_com_defs.h",
"windows/d3d11_copying_texture_wrapper.cc",
"windows/d3d11_copying_texture_wrapper.h",
@@ -559,7 +561,9 @@ if (use_v4l2_codec || use_vaapi) {
data = [ "//media/test/data/" ]
deps = [
":buildflags",
+ "test:frame_validator",
"test:helpers",
+ "test:test_helpers",
"test:video_encoder",
"test:video_encoder_test_environment",
"//media:test_support",
diff --git a/chromium/media/gpu/OWNERS b/chromium/media/gpu/OWNERS
index 2b325453c62..c805b55640c 100644
--- a/chromium/media/gpu/OWNERS
+++ b/chromium/media/gpu/OWNERS
@@ -5,6 +5,7 @@ sandersd@chromium.org
# For chromeos/, linux/, v4l2/, and vaapi/ -specific changes.
acourbot@chromium.org
+frkoenig@chromium.org
hiroh@chromium.org
jcliang@chromium.org
jkardatzke@chromium.org
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index ae3a90da5c9..1a0b7d93184 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -25,11 +25,11 @@ CodecImage::~CodecImage() {
void CodecImage::Initialize(
std::unique_ptr<CodecOutputBufferRenderer> output_buffer_renderer,
- scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
+ bool is_texture_owner_backed,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb) {
DCHECK(output_buffer_renderer);
output_buffer_renderer_ = std::move(output_buffer_renderer);
- codec_buffer_wait_coordinator_ = std::move(codec_buffer_wait_coordinator);
+ is_texture_owner_backed_ = is_texture_owner_backed;
promotion_hint_cb_ = std::move(promotion_hint_cb);
}
@@ -42,7 +42,6 @@ void CodecImage::NotifyUnused() {
// our reference to the TextureOwner (if any). In other words, undo anything
// that we did in Initialize.
ReleaseCodecBuffer();
- codec_buffer_wait_coordinator_.reset();
promotion_hint_cb_ = base::NullCallback();
for (auto& cb : unused_cbs_)
@@ -65,7 +64,7 @@ unsigned CodecImage::GetDataType() {
CodecImage::BindOrCopy CodecImage::ShouldBindOrCopy() {
// If we're using an overlay, then pretend it's bound. That way, we'll get
// calls to ScheduleOverlayPlane. Otherwise, CopyTexImage needs to be called.
- return !codec_buffer_wait_coordinator_ ? BIND : COPY;
+ return is_texture_owner_backed_ ? COPY : BIND;
}
bool CodecImage::BindTexImage(unsigned target) {
@@ -82,16 +81,17 @@ bool CodecImage::CopyTexImage(unsigned target) {
if (target != GL_TEXTURE_EXTERNAL_OES)
return false;
+ if (!output_buffer_renderer_)
+ return true;
+
GLint bound_service_id = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
// The currently bound texture should be the texture owner's texture.
if (bound_service_id !=
static_cast<GLint>(
- codec_buffer_wait_coordinator_->texture_owner()->GetTextureId()))
+ output_buffer_renderer_->texture_owner()->GetTextureId()))
return false;
- if (!output_buffer_renderer_)
- return true;
output_buffer_renderer_->RenderToTextureOwnerFrontBuffer(
BindingsMode::kEnsureTexImageBound);
@@ -113,7 +113,7 @@ bool CodecImage::ScheduleOverlayPlane(
bool enable_blend,
std::unique_ptr<gfx::GpuFence> gpu_fence) {
TRACE_EVENT0("media", "CodecImage::ScheduleOverlayPlane");
- if (codec_buffer_wait_coordinator_) {
+ if (is_texture_owner_backed_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
"TextureOwner backed.";
return false;
@@ -131,7 +131,7 @@ void CodecImage::NotifyOverlayPromotion(bool promotion,
if (!promotion_hint_cb_)
return;
- if (!codec_buffer_wait_coordinator_ && promotion) {
+ if (!is_texture_owner_backed_ && promotion) {
// When |CodecImage| is already backed by SurfaceView, and it should be used
// as overlay.
@@ -157,16 +157,6 @@ void CodecImage::OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
uint64_t process_tracing_id,
const std::string& dump_name) {}
-void CodecImage::GetTextureMatrix(float matrix[16]) {
- static constexpr float kIdentity[16]{
- 1, 0, 0, 0, //
- 0, 1, 0, 0, //
- 0, 0, 1, 0, //
- 0, 0, 0, 1 //
- };
- memcpy(matrix, kIdentity, sizeof(kIdentity));
-}
-
void CodecImage::NotifyPromotionHint(bool promotion_hint,
int display_x,
int display_y,
@@ -174,7 +164,7 @@ void CodecImage::NotifyPromotionHint(bool promotion_hint,
int display_height) {
// TODO(crbug.com/1004859): Add back early skip due to suspecting affecting
// video smoothness.
- if (promotion_hint && !codec_buffer_wait_coordinator_)
+ if (promotion_hint && !is_texture_owner_backed_)
return;
NotifyOverlayPromotion(
@@ -241,11 +231,11 @@ CodecImage::GetAHardwareBuffer() {
// as free when viz is still using us for drawing. This can happen if the
// renderer crashes before receiving returns. It's hard to catch elsewhere,
// so just handle it gracefully here.
- if (!codec_buffer_wait_coordinator_)
+ if (!output_buffer_renderer_)
return nullptr;
RenderToTextureOwnerFrontBuffer(BindingsMode::kDontRestoreIfBound);
- return codec_buffer_wait_coordinator_->texture_owner()->GetAHardwareBuffer();
+ return output_buffer_renderer_->texture_owner()->GetAHardwareBuffer();
}
gfx::Rect CodecImage::GetCropRect() {
diff --git a/chromium/media/gpu/android/codec_image.h b/chromium/media/gpu/android/codec_image.h
index 8693118f918..c765e24dbfe 100644
--- a/chromium/media/gpu/android/codec_image.h
+++ b/chromium/media/gpu/android/codec_image.h
@@ -15,7 +15,6 @@
#include "base/memory/ref_counted_delete_on_sequence.h"
#include "gpu/command_buffer/service/gl_stream_texture_image.h"
#include "gpu/command_buffer/service/stream_texture_shared_image_interface.h"
-#include "media/gpu/android/codec_buffer_wait_coordinator.h"
#include "media/gpu/android/codec_output_buffer_renderer.h"
#include "media/gpu/android/promotion_hint_aggregator.h"
#include "media/gpu/media_gpu_export.h"
@@ -54,7 +53,7 @@ class MEDIA_GPU_EXPORT CodecImage
// not in use.
void Initialize(
std::unique_ptr<CodecOutputBufferRenderer> output_buffer_renderer,
- scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
+ bool is_texture_owner_backed,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb);
// Add a callback that will be called when we're marked as unused. Does not
@@ -91,7 +90,6 @@ class MEDIA_GPU_EXPORT CodecImage
GetAHardwareBuffer() override;
gfx::Rect GetCropRect() override;
// gpu::gles2::GLStreamTextureMatrix implementation
- void GetTextureMatrix(float xform[16]) override;
// Currently this API is implemented by the NotifyOverlayPromotion, since this
// API is expected to be removed.
void NotifyPromotionHint(bool promotion_hint,
@@ -129,18 +127,11 @@ class MEDIA_GPU_EXPORT CodecImage
// Whether this image is backed by a texture owner.
- // We want to check for texture_owner owned by
- // |codec_buffer_wait_coordinator_| and hence only checking for
- // |codec_buffer_wait_coordinator_| is enough here.
- // TODO(vikassoni): Update the method name in future refactorings.
- bool is_texture_owner_backed() const {
- return !!codec_buffer_wait_coordinator_;
- }
+ bool is_texture_owner_backed() const { return is_texture_owner_backed_; }
scoped_refptr<gpu::TextureOwner> texture_owner() const {
- return codec_buffer_wait_coordinator_
- ? codec_buffer_wait_coordinator_->texture_owner()
- : nullptr;
+ return output_buffer_renderer_ ? output_buffer_renderer_->texture_owner()
+ : nullptr;
}
// Renders this image to the front buffer of its backing surface.
@@ -180,9 +171,8 @@ class MEDIA_GPU_EXPORT CodecImage
// frame available event before calling UpdateTexImage().
bool RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode);
- // The CodecBufferWaitCoordinator that |output_buffer_| will be rendered to.
- // Or null, if this image is backed by an overlay.
- scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator_;
+ // Whether this image is texture_owner or overlay backed.
+ bool is_texture_owner_backed_ = false;
// The bounds last sent to the overlay.
gfx::Rect most_recent_bounds_;
diff --git a/chromium/media/gpu/android/codec_image_unittest.cc b/chromium/media/gpu/android/codec_image_unittest.cc
index 7fcd7eeba13..87496e335c9 100644
--- a/chromium/media/gpu/android/codec_image_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_unittest.cc
@@ -94,7 +94,7 @@ class CodecImageTest : public testing::Test {
scoped_refptr<CodecImage> image = new CodecImage(buffer_renderer->size());
image->Initialize(
- std::move(buffer_renderer), codec_buffer_wait_coordinator,
+ std::move(buffer_renderer), kind == kTextureOwner,
base::BindRepeating(&PromotionHintReceiver::OnPromotionHint,
base::Unretained(&promotion_hint_receiver_)));
@@ -144,7 +144,7 @@ TEST_F(CodecImageTest, UnusedCBRunsOnNotifyUnused) {
base::MockCallback<CodecImage::UnusedCB> cb_2;
auto i = NewImage(kTextureOwner);
ASSERT_TRUE(i->get_codec_output_buffer_for_testing());
- ASSERT_TRUE(i->is_texture_owner_backed());
+ ASSERT_TRUE(i->HasTextureOwner());
i->AddUnusedCB(cb_1.Get());
i->AddUnusedCB(cb_2.Get());
EXPECT_CALL(cb_1, Run(i.get()));
@@ -153,7 +153,7 @@ TEST_F(CodecImageTest, UnusedCBRunsOnNotifyUnused) {
// Also verify that the output buffer and texture owner are released.
i->NotifyUnused();
EXPECT_FALSE(i->get_codec_output_buffer_for_testing());
- EXPECT_FALSE(i->is_texture_owner_backed());
+ EXPECT_FALSE(i->HasTextureOwner());
// Verify that an additional call doesn't crash. It should do nothing.
i->NotifyUnused();
@@ -391,7 +391,7 @@ TEST_F(CodecImageTest, CodedSizeVsVisibleSize) {
std::make_unique<CodecOutputBufferRenderer>(std::move(buffer), nullptr);
scoped_refptr<CodecImage> image = new CodecImage(coded_size);
- image->Initialize(std::move(buffer_renderer), nullptr,
+ image->Initialize(std::move(buffer_renderer), false,
PromotionHintAggregator::NotifyPromotionHintCB());
// Verify that CodecImage::GetSize returns coded_size and not visible_size
diff --git a/chromium/media/gpu/android/frame_info_helper.cc b/chromium/media/gpu/android/frame_info_helper.cc
index efe1873cb1c..b3cf5299aa7 100644
--- a/chromium/media/gpu/android/frame_info_helper.cc
+++ b/chromium/media/gpu/android/frame_info_helper.cc
@@ -4,10 +4,13 @@
#include "media/gpu/android/frame_info_helper.h"
+#include "base/threading/sequence_bound.h"
#include "gpu/command_buffer/service/shared_image_video.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "gpu/ipc/service/gpu_channel_manager.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/gpu/android/codec_output_buffer_renderer.h"
namespace media {
@@ -21,100 +24,188 @@ FrameInfoHelper::FrameInfo& FrameInfoHelper::FrameInfo::operator=(
// Concrete implementation of FrameInfoHelper that renders output buffers and
// gets the FrameInfo they need.
-class FrameInfoHelperImpl : public FrameInfoHelper,
- public gpu::CommandBufferStub::DestructionObserver {
+class FrameInfoHelperImpl : public FrameInfoHelper {
public:
- FrameInfoHelperImpl(SharedImageVideoProvider::GetStubCB get_stub_cb) {
- stub_ = get_stub_cb.Run();
- if (stub_)
- stub_->AddDestructionObserver(this);
+ FrameInfoHelperImpl(scoped_refptr<base::SequencedTaskRunner> gpu_task_runner,
+ SharedImageVideoProvider::GetStubCB get_stub_cb) {
+ on_gpu_ = base::SequenceBound<OnGpu>(std::move(gpu_task_runner),
+ std::move(get_stub_cb));
}
- ~FrameInfoHelperImpl() override {
- if (stub_)
- stub_->RemoveDestructionObserver(this);
+ ~FrameInfoHelperImpl() override = default;
+
+ void GetFrameInfo(std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ FrameInfoReadyCB callback) override {
+ Request request = {.buffer_renderer = std::move(buffer_renderer),
+ .callback = std::move(callback)};
+ requests_.push(std::move(request));
+ // If there were no pending requests start processing queue now.
+ if (requests_.size() == 1)
+ ProcessRequestsQueue();
}
- void GetFrameInfo(
- std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
- base::OnceCallback<
- void(std::unique_ptr<CodecOutputBufferRenderer>, FrameInfo, bool)> cb)
- override {
- if (!buffer_renderer) {
- std::move(cb).Run(nullptr, FrameInfo(), false);
- return;
+ private:
+ struct Request {
+ std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer;
+ FrameInfoReadyCB callback;
+ };
+
+ class OnGpu : public gpu::CommandBufferStub::DestructionObserver {
+ public:
+ OnGpu(SharedImageVideoProvider::GetStubCB get_stub_cb) {
+ stub_ = get_stub_cb.Run();
+ if (stub_)
+ stub_->AddDestructionObserver(this);
}
- auto texture_owner = buffer_renderer->texture_owner();
+ ~OnGpu() override {
+ if (stub_)
+ stub_->RemoveDestructionObserver(this);
+ }
- FrameInfo info;
+ void OnWillDestroyStub(bool have_context) override {
+ DCHECK(stub_);
+ stub_ = nullptr;
+ }
- // Indicates that the FrameInfo is reliable and can be cached by caller.
- // It's true if we either return cached values or we attempted to render
- // frame and succeeded.
- bool success = true;
-
- // We default to visible size if if we can't get real size
- info.coded_size = buffer_renderer->size();
- info.visible_rect = gfx::Rect(info.coded_size);
-
- if (texture_owner) {
- if (visible_size_ == buffer_renderer->size()) {
- info = frame_info_;
- } else if (buffer_renderer->RenderToTextureOwnerFrontBuffer(
- CodecOutputBufferRenderer::BindingsMode::
- kDontRestoreIfBound)) {
- visible_size_ = buffer_renderer->size();
- texture_owner->GetCodedSizeAndVisibleRect(
- visible_size_, &frame_info_.coded_size, &frame_info_.visible_rect);
-
- frame_info_.ycbcr_info = GetYCbCrInfo(texture_owner.get());
- info = frame_info_;
- } else {
- // We attempted to render frame and failed, mark request as failed so
- // caller won't cache best-guess values.
- success = false;
+ void GetFrameInfo(
+ std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ base::OnceCallback<void(std::unique_ptr<CodecOutputBufferRenderer>,
+ base::Optional<FrameInfo>)> cb) {
+ DCHECK(buffer_renderer);
+
+ auto texture_owner = buffer_renderer->texture_owner();
+ DCHECK(texture_owner);
+
+ base::Optional<FrameInfo> info;
+
+ if (buffer_renderer->RenderToTextureOwnerFrontBuffer(
+ CodecOutputBufferRenderer::BindingsMode::kDontRestoreIfBound)) {
+ gfx::Size coded_size;
+ gfx::Rect visible_rect;
+ if (texture_owner->GetCodedSizeAndVisibleRect(
+ buffer_renderer->size(), &coded_size, &visible_rect)) {
+ info.emplace();
+ info->coded_size = coded_size;
+ info->visible_rect = visible_rect;
+ info->ycbcr_info = GetYCbCrInfo(texture_owner.get());
+ }
}
+
+ std::move(cb).Run(std::move(buffer_renderer), info);
+ }
+
+ private:
+ // Gets YCbCrInfo from last rendered frame.
+ base::Optional<gpu::VulkanYCbCrInfo> GetYCbCrInfo(
+ gpu::TextureOwner* texture_owner) {
+ gpu::ContextResult result;
+
+ if (!stub_)
+ return base::nullopt;
+
+ auto shared_context =
+ stub_->channel()->gpu_channel_manager()->GetSharedContextState(
+ &result);
+ auto context_provider =
+ (result == gpu::ContextResult::kSuccess) ? shared_context : nullptr;
+ if (!context_provider)
+ return base::nullopt;
+
+ return gpu::SharedImageVideo::GetYcbcrInfo(texture_owner,
+ context_provider);
}
- std::move(cb).Run(std::move(buffer_renderer), frame_info_, success);
+ gpu::CommandBufferStub* stub_ = nullptr;
+ };
+
+ FrameInfo GetFrameInfoWithVisibleSize(const gfx::Size& visible_size) {
+ FrameInfo info;
+ info.coded_size = visible_size;
+ info.visible_rect = gfx::Rect(visible_size);
+ return info;
}
- void OnWillDestroyStub(bool have_context) override {
- DCHECK(stub_);
- stub_ = nullptr;
+ void OnFrameInfoReady(
+ std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ base::Optional<FrameInfo> frame_info) {
+ DCHECK(buffer_renderer);
+ DCHECK(!requests_.empty());
+
+ auto& request = requests_.front();
+
+ if (frame_info) {
+ visible_size_ = buffer_renderer->size();
+ frame_info_ = *frame_info;
+ std::move(request.callback).Run(std::move(buffer_renderer), frame_info_);
+ } else {
+ // It's possible that we will fail to render frame and so weren't able to
+ // obtain FrameInfo. In this case we don't cache new values and complete
+ // current request with visible size, we will attempt to render next frame
+ // with next request.
+ auto info = GetFrameInfoWithVisibleSize(buffer_renderer->size());
+ std::move(request.callback)
+ .Run(std::move(buffer_renderer), std::move(info));
+ }
+ requests_.pop();
+ ProcessRequestsQueue();
}
- private:
- // Gets YCbCrInfo from last rendered frame.
- base::Optional<gpu::VulkanYCbCrInfo> GetYCbCrInfo(
- gpu::TextureOwner* texture_owner) {
- gpu::ContextResult result;
- if (!stub_)
- return base::nullopt;
-
- auto shared_context =
- stub_->channel()->gpu_channel_manager()->GetSharedContextState(&result);
- auto context_provider =
- (result == gpu::ContextResult::kSuccess) ? shared_context : nullptr;
- if (!context_provider)
- return base::nullopt;
-
- return gpu::SharedImageVideo::GetYcbcrInfo(texture_owner, context_provider);
+ void ProcessRequestsQueue() {
+ while (!requests_.empty()) {
+ auto& request = requests_.front();
+
+ if (!request.buffer_renderer) {
+ // If we don't have buffer_renderer we can Run callback immediately.
+ std::move(request.callback).Run(nullptr, FrameInfo());
+ } else if (!request.buffer_renderer->texture_owner()) {
+ // If there is no texture_owner (SurfaceView case), we can't render
+ // frame and get proper size. But as Display Compositor won't render
+ // this frame the actual size is not important, assume coded_size =
+ // visible_size.
+ auto info =
+ GetFrameInfoWithVisibleSize(request.buffer_renderer->size());
+ std::move(request.callback)
+ .Run(std::move(request.buffer_renderer), std::move(info));
+ } else if (visible_size_ == request.buffer_renderer->size()) {
+ // We have cached the results of last frame info request with the same
+ // size. We assume that coded_size doesn't change if the visible_size
+ // stays the same.
+ std::move(request.callback)
+ .Run(std::move(request.buffer_renderer), frame_info_);
+ } else {
+ // We have texture_owner and we don't have cached value, so we need to
+ // hop to GPU thread and render the frame to get proper size.
+ auto cb = BindToCurrentLoop(
+ base::BindOnce(&FrameInfoHelperImpl::OnFrameInfoReady,
+ weak_factory_.GetWeakPtr()));
+
+ on_gpu_.Post(FROM_HERE, &OnGpu::GetFrameInfo,
+ std::move(request.buffer_renderer), std::move(cb));
+ // We didn't complete this request quite yet, so we can't process queue
+ // any further.
+ break;
+ }
+ requests_.pop();
+ }
}
- gpu::CommandBufferStub* stub_ = nullptr;
+ base::SequenceBound<OnGpu> on_gpu_;
+ std::queue<Request> requests_;
+ // Cached values.
FrameInfo frame_info_;
gfx::Size visible_size_;
+
+ base::WeakPtrFactory<FrameInfoHelperImpl> weak_factory_{this};
};
// static
-base::SequenceBound<FrameInfoHelper> FrameInfoHelper::Create(
+std::unique_ptr<FrameInfoHelper> FrameInfoHelper::Create(
scoped_refptr<base::SequencedTaskRunner> gpu_task_runner,
SharedImageVideoProvider::GetStubCB get_stub_cb) {
- return base::SequenceBound<FrameInfoHelperImpl>(std::move(gpu_task_runner),
- std::move(get_stub_cb));
+ return std::make_unique<FrameInfoHelperImpl>(std::move(gpu_task_runner),
+ std::move(get_stub_cb));
}
-} // namespace media
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/gpu/android/frame_info_helper.h b/chromium/media/gpu/android/frame_info_helper.h
index 5fc4ffca328..1f60bceb094 100644
--- a/chromium/media/gpu/android/frame_info_helper.h
+++ b/chromium/media/gpu/android/frame_info_helper.h
@@ -6,12 +6,11 @@
#define MEDIA_GPU_ANDROID_FRAME_INFO_HELPER_H_
#include "base/optional.h"
-#include "base/threading/sequence_bound.h"
-#include "media/gpu/android/codec_image.h"
#include "media/gpu/android/shared_image_video_provider.h"
#include "media/gpu/media_gpu_export.h"
namespace media {
+class CodecOutputBufferRenderer;
// Helper class to fetch YCbCrInfo for Vulkan from a CodecImage.
class MEDIA_GPU_EXPORT FrameInfoHelper {
@@ -29,7 +28,11 @@ class MEDIA_GPU_EXPORT FrameInfoHelper {
base::Optional<gpu::VulkanYCbCrInfo> ycbcr_info;
};
- static base::SequenceBound<FrameInfoHelper> Create(
+ using FrameInfoReadyCB =
+ base::OnceCallback<void(std::unique_ptr<CodecOutputBufferRenderer>,
+ FrameInfo)>;
+
+ static std::unique_ptr<FrameInfoHelper> Create(
scoped_refptr<base::SequencedTaskRunner> gpu_task_runner,
SharedImageVideoProvider::GetStubCB get_stub_cb);
@@ -40,9 +43,11 @@ class MEDIA_GPU_EXPORT FrameInfoHelper {
// attempt to get YCbCrInfo and cache it. If all necessary info is cached the
// call will leave buffer_renderer intact and it can be rendered later.
// Rendering can fail for reasons. This function will make best efforts to
- // fill FrameInfo which can be used to create VideoFrame, but shouldn't be
- // cached by caller. Last parameter in |cb| is bool that indicates that info
- // is reliable.
+ // fill FrameInfo which can be used to create VideoFrame.
+ //
+ // Callbacks will be executed and on callers sequence and guaranteed to be
+ // called in order of GetFrameInfo calls. Callback can be called before this
+ // function returns if all necessary info is available right away.
//
// While this API might seem to be out of its Vulkan mind, it's this
// complicated to (a) prevent rendering frames out of order to the front
@@ -50,9 +55,7 @@ class MEDIA_GPU_EXPORT FrameInfoHelper {
// can't get a YCbCrInfo from a CodecImage due to timeouts.
virtual void GetFrameInfo(
std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
- base::OnceCallback<void(std::unique_ptr<CodecOutputBufferRenderer>,
- FrameInfo,
- bool)> cb) = 0;
+ FrameInfoReadyCB callback) = 0;
protected:
FrameInfoHelper() = default;
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index 6e127512fc2..848983075f6 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -19,6 +19,7 @@
#include "base/trace_event/trace_event.h"
#include "media/base/android/media_codec_bridge_impl.h"
#include "media/base/android/media_codec_util.h"
+#include "media/base/async_destroy_video_decoder.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
#include "media/base/decoder_buffer.h"
@@ -227,7 +228,9 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
overlay_factory_cb_(std::move(overlay_factory_cb)),
device_info_(device_info),
enable_threaded_texture_mailboxes_(
- gpu_preferences.enable_threaded_texture_mailboxes) {
+ gpu_preferences.enable_threaded_texture_mailboxes),
+ allow_nonsecure_overlays_(
+ base::FeatureList::IsEnabled(media::kAllowNonSecureOverlays)) {
DVLOG(2) << __func__;
surface_chooser_helper_.chooser()->SetClientCallbacks(
base::Bind(&MediaCodecVideoDecoder::OnSurfaceChosen,
@@ -236,44 +239,69 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
weak_factory_.GetWeakPtr(), nullptr));
}
+std::unique_ptr<VideoDecoder> MediaCodecVideoDecoder::Create(
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuFeatureInfo& gpu_feature_info,
+ std::unique_ptr<MediaLog> media_log,
+ DeviceInfo* device_info,
+ CodecAllocator* codec_allocator,
+ std::unique_ptr<AndroidVideoSurfaceChooser> surface_chooser,
+ AndroidOverlayMojoFactoryCB overlay_factory_cb,
+ RequestOverlayInfoCB request_overlay_info_cb,
+ std::unique_ptr<VideoFrameFactory> video_frame_factory) {
+ auto* decoder = new MediaCodecVideoDecoder(
+ gpu_preferences, gpu_feature_info, std::move(media_log), device_info,
+ codec_allocator, std::move(surface_chooser),
+ std::move(overlay_factory_cb), std::move(request_overlay_info_cb),
+ std::move(video_frame_factory));
+ return std::make_unique<AsyncDestroyVideoDecoder<MediaCodecVideoDecoder>>(
+ base::WrapUnique(decoder));
+}
+
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
DVLOG(2) << __func__;
TRACE_EVENT0("media", "MediaCodecVideoDecoder::~MediaCodecVideoDecoder");
ReleaseCodec();
}
-void MediaCodecVideoDecoder::Destroy() {
+void MediaCodecVideoDecoder::DestroyAsync(
+ std::unique_ptr<MediaCodecVideoDecoder> decoder) {
DVLOG(1) << __func__;
TRACE_EVENT0("media", "MediaCodecVideoDecoder::Destroy");
+ DCHECK(decoder);
+
+ // This will be destroyed by a call to |DeleteSoon|
+ // in |OnCodecDrained|.
+ auto* self = decoder.release();
// Cancel pending callbacks.
//
// WARNING: This will lose the callback we've given to MediaCodecBridge for
// asynchronous notifications; so we must not leave this function with any
// work necessary from StartTimerOrPumpCodec().
- weak_factory_.InvalidateWeakPtrs();
+ self->weak_factory_.InvalidateWeakPtrs();
- if (media_crypto_context_) {
+ if (self->media_crypto_context_) {
// Cancel previously registered callback (if any).
- media_crypto_context_->SetMediaCryptoReadyCB(base::NullCallback());
- if (cdm_registration_id_)
- media_crypto_context_->UnregisterPlayer(cdm_registration_id_);
- media_crypto_context_ = nullptr;
- cdm_registration_id_ = 0;
+ self->media_crypto_context_->SetMediaCryptoReadyCB(base::NullCallback());
+ if (self->cdm_registration_id_)
+ self->media_crypto_context_->UnregisterPlayer(self->cdm_registration_id_);
+ self->media_crypto_context_ = nullptr;
+ self->cdm_registration_id_ = 0;
}
// Mojo callbacks require that they're run before destruction.
- if (reset_cb_)
- std::move(reset_cb_).Run();
+ if (self->reset_cb_)
+ std::move(self->reset_cb_).Run();
// Cancel callbacks we no longer want.
- codec_allocator_weak_factory_.InvalidateWeakPtrs();
- CancelPendingDecodes(DecodeStatus::ABORTED);
- StartDrainingCodec(DrainType::kForDestroy);
+ self->codec_allocator_weak_factory_.InvalidateWeakPtrs();
+ self->CancelPendingDecodes(DecodeStatus::ABORTED);
+ self->StartDrainingCodec(DrainType::kForDestroy);
// Per the WARNING above. Validate that no draining work remains.
- if (using_async_api_)
- DCHECK(!drain_type_.has_value());
+ if (self->using_async_api_)
+ DCHECK(!self->drain_type_.has_value());
}
void MediaCodecVideoDecoder::Initialize(const VideoDecoderConfig& config,
@@ -471,6 +499,12 @@ void MediaCodecVideoDecoder::StartLazyInit() {
overlay_mode = VideoFrameFactory::OverlayMode::kRequestPromotionHints;
}
+ // Regardless of whether we're using SurfaceControl or Dialog overlays, don't
+ // allow any overlays in A/B power testing mode, unless this requires a
+ // secure surface. Don't fail the playback for power testing.
+ if (!requires_secure_codec_ && !allow_nonsecure_overlays_)
+ overlay_mode = VideoFrameFactory::OverlayMode::kDontRequestPromotionHints;
+
video_frame_factory_->Initialize(
overlay_mode,
base::Bind(&MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized,
@@ -488,10 +522,16 @@ void MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized(
}
texture_owner_bundle_ = new CodecSurfaceBundle(std::move(texture_owner));
+ // This is for A/B power testing only. Turn off Dialog-based overlays in
+ // power testing mode, unless we need them for L1 content.
+ // See https://crbug.com/1081346 .
+ const bool allowed_for_experiment =
+ requires_secure_codec_ || allow_nonsecure_overlays_;
+
// Overlays are disabled when |enable_threaded_texture_mailboxes| is true
// (http://crbug.com/582170).
if (enable_threaded_texture_mailboxes_ ||
- !device_info_->SupportsOverlaySurfaces()) {
+ !device_info_->SupportsOverlaySurfaces() || !allowed_for_experiment) {
OnSurfaceChosen(nullptr);
return;
}
@@ -975,7 +1015,7 @@ void MediaCodecVideoDecoder::ForwardVideoFrame(
if (reset_generation == reset_generation_) {
// TODO(liberato): We might actually have a SW decoder. Consider setting
// this to false if so, especially for higher bitrates.
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+ frame->metadata()->power_efficient = true;
output_cb_.Run(std::move(frame));
}
}
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index 59055e4d359..7e87139ae32 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -58,11 +58,14 @@ struct PendingDecode {
// playbacks that need them.
// TODO: Lazy initialization should be handled at a higher layer of the media
// stack for both simplicity and cross platform support.
-class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
+class MEDIA_GPU_EXPORT MediaCodecVideoDecoder final : public VideoDecoder {
public:
static std::vector<SupportedVideoDecoderConfig> GetSupportedConfigs();
- MediaCodecVideoDecoder(
+ ~MediaCodecVideoDecoder() override;
+ static void DestroyAsync(std::unique_ptr<MediaCodecVideoDecoder>);
+
+ static std::unique_ptr<VideoDecoder> Create(
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuFeatureInfo& gpu_feature_info,
std::unique_ptr<MediaLog> media_log,
@@ -87,9 +90,20 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
bool CanReadWithoutStalling() const override;
int GetMaxDecodeRequests() const override;
- protected:
- // Protected for testing.
- ~MediaCodecVideoDecoder() override;
+ private:
+ // The test has access for PumpCodec() and the constructor.
+ friend class MediaCodecVideoDecoderTest;
+
+ MediaCodecVideoDecoder(
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuFeatureInfo& gpu_feature_info,
+ std::unique_ptr<MediaLog> media_log,
+ DeviceInfo* device_info,
+ CodecAllocator* codec_allocator,
+ std::unique_ptr<AndroidVideoSurfaceChooser> surface_chooser,
+ AndroidOverlayMojoFactoryCB overlay_factory_cb,
+ RequestOverlayInfoCB request_overlay_info_cb,
+ std::unique_ptr<VideoFrameFactory> video_frame_factory);
// Set up |cdm_context| as part of initialization. Guarantees that |init_cb|
// will be called depending on the outcome, though not necessarily before this
@@ -102,11 +116,6 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
JavaObjectPtr media_crypto,
bool requires_secure_video_codec);
- private:
- // The test has access for PumpCodec().
- friend class MediaCodecVideoDecoderTest;
- friend class base::DeleteHelper<MediaCodecVideoDecoder>;
-
enum class State {
// Initializing resources required to create a codec.
kInitializing,
@@ -124,9 +133,6 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
enum class DrainType { kForReset, kForDestroy };
- // Starts teardown.
- void Destroy() override;
-
// Finishes initialization.
void StartLazyInit();
void OnVideoFrameFactoryInitialized(
@@ -327,6 +333,11 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
// Optional crypto object from the Cdm.
base::android::ScopedJavaGlobalRef<jobject> media_crypto_;
+ // For A/B power testing, this causes all non-L1 content to avoid overlays.
+ // This is only for A/B power testing, and can be removed after that.
+ // See https://crbug.com/1081346 .
+ bool allow_nonsecure_overlays_ = true;
+
base::WeakPtrFactory<MediaCodecVideoDecoder> weak_factory_{this};
base::WeakPtrFactory<MediaCodecVideoDecoder> codec_allocator_weak_factory_{
this};
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 61d4c13f3ed..16f3a5f4871 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -17,6 +17,7 @@
#include "media/base/android/media_codec_util.h"
#include "media/base/android/mock_android_overlay.h"
#include "media/base/android/mock_media_crypto_context.h"
+#include "media/base/async_destroy_video_decoder.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_util.h"
#include "media/base/test_helpers.h"
@@ -52,12 +53,6 @@ std::unique_ptr<AndroidOverlay> CreateAndroidOverlayCb(
return nullptr;
}
-// Make MCVD's destruction observable for teardown tests.
-struct DestructionObservableMCVD : public DestructionObservable,
- public MediaCodecVideoDecoder {
- using MediaCodecVideoDecoder::MediaCodecVideoDecoder;
-};
-
} // namespace
class MockVideoFrameFactory : public VideoFrameFactory {
@@ -149,18 +144,16 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
ON_CALL(*video_frame_factory_, Initialize(ExpectedOverlayMode(), _))
.WillByDefault(RunCallback<1>(texture_owner));
- auto* observable_mcvd = new DestructionObservableMCVD(
+ auto* mcvd = new MediaCodecVideoDecoder(
gpu_preferences_, gpu_feature_info_, std::make_unique<NullMediaLog>(),
device_info_.get(), codec_allocator_.get(), std::move(surface_chooser),
base::BindRepeating(&CreateAndroidOverlayCb),
base::BindRepeating(&MediaCodecVideoDecoderTest::RequestOverlayInfoCb,
base::Unretained(this)),
std::move(video_frame_factory));
- mcvd_.reset(observable_mcvd);
- mcvd_raw_ = observable_mcvd;
- destruction_observer_ = observable_mcvd->CreateDestructionObserver();
- // Ensure MCVD doesn't leak by default.
- destruction_observer_->ExpectDestruction();
+ mcvd_ = std::make_unique<AsyncDestroyVideoDecoder<MediaCodecVideoDecoder>>(
+ base::WrapUnique(mcvd));
+ mcvd_raw_ = mcvd;
}
VideoFrameFactory::OverlayMode ExpectedOverlayMode() const {
@@ -291,7 +284,6 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
gpu::MockTextureOwner* texture_owner_;
MockVideoFrameFactory* video_frame_factory_;
NiceMock<base::MockCallback<VideoDecoder::DecodeCB>> decode_cb_;
- std::unique_ptr<DestructionObserver> destruction_observer_;
ProvideOverlayInfoCB provide_overlay_info_cb_;
bool restart_for_transitions_;
gpu::GpuPreferences gpu_preferences_;
@@ -308,7 +300,7 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
// |mcvd_raw_| lets us call PumpCodec() even after |mcvd_| is dropped, for
// testing the teardown path.
MediaCodecVideoDecoder* mcvd_raw_;
- std::unique_ptr<MediaCodecVideoDecoder> mcvd_;
+ std::unique_ptr<VideoDecoder> mcvd_;
};
// Tests which only work for a single codec.
@@ -687,9 +679,6 @@ TEST_P(MediaCodecVideoDecoderVp8Test, UnregisterPlayerBeforeAsyncDestruction) {
// before the decoder is actually destructed, asynchronously.
EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
mcvd_.reset();
-
- // Make sure the decoder has not been destroyed yet.
- destruction_observer_->DoNotAllowDestruction();
}
// A reference test for UnregisterPlayerBeforeAsyncDestruction.
@@ -704,9 +693,6 @@ TEST_P(MediaCodecVideoDecoderVp8Test, UnregisterPlayerBeforeSyncDestruction) {
// When |mcvd_| is reset, expect that it will unregister itself immediately.
EXPECT_CALL(*cdm_, UnregisterPlayer(MockMediaCryptoContext::kRegistrationId));
mcvd_.reset();
-
- // Make sure the decoder is now destroyed.
- destruction_observer_->ExpectDestruction();
}
TEST_P(MediaCodecVideoDecoderVp8Test, ResetDoesNotDrainVp8WithAsyncApi) {
@@ -818,18 +804,9 @@ TEST_P(MediaCodecVideoDecoderTest, EosDecodeCbIsRunAfterEosIsDequeued) {
std::move(video_frame_factory_->last_closure_).Run();
}
-TEST_P(MediaCodecVideoDecoderTest, TeardownBeforeInitWorks) {
- // Since we assert that MCVD is destructed by default, this test verifies that
- // MCVD is destructed safely before Initialize().
-}
-
TEST_P(MediaCodecVideoDecoderTest, TeardownInvalidatesCodecCreationWeakPtr) {
InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
- destruction_observer_->DoNotAllowDestruction();
mcvd_.reset();
- // DeleteSoon() is now pending. Ensure it's safe if the codec creation
- // completes before it runs.
- destruction_observer_->ExpectDestruction();
EXPECT_CALL(*codec_allocator_, MockReleaseMediaCodec(NotNull()));
ASSERT_TRUE(codec_allocator_->ProvideMockCodecAsync());
}
@@ -837,11 +814,7 @@ TEST_P(MediaCodecVideoDecoderTest, TeardownInvalidatesCodecCreationWeakPtr) {
TEST_P(MediaCodecVideoDecoderTest,
TeardownInvalidatesCodecCreationWeakPtrButDoesNotCallReleaseMediaCodec) {
InitializeWithTextureOwner_OneDecodePending(TestVideoConfig::Large(codec_));
- destruction_observer_->DoNotAllowDestruction();
mcvd_.reset();
- // DeleteSoon() is now pending. Ensure it's safe if the codec creation
- // completes before it runs.
- destruction_observer_->ExpectDestruction();
// A null codec should not be released via ReleaseMediaCodec().
EXPECT_CALL(*codec_allocator_, MockReleaseMediaCodec(_)).Times(0);
@@ -880,7 +853,6 @@ TEST_P(MediaCodecVideoDecoderVp8Test,
PumpCodec();
// MCVD should not be destructed immediately.
- destruction_observer_->DoNotAllowDestruction();
mcvd_.reset();
base::RunLoop().RunUntilIdle();
@@ -888,7 +860,6 @@ TEST_P(MediaCodecVideoDecoderVp8Test,
codec->AcceptOneInput(MockMediaCodecBridge::kEos);
codec->ProduceOneOutput(MockMediaCodecBridge::kEos);
EXPECT_CALL(*codec, Flush()).Times(0);
- destruction_observer_->ExpectDestruction();
PumpCodec();
base::RunLoop().RunUntilIdle();
}
@@ -1000,10 +971,7 @@ TEST_P(MediaCodecVideoDecoderTest, VideoFramesArePowerEfficient) {
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(!!most_recent_frame_);
- bool power_efficient = false;
- EXPECT_TRUE(most_recent_frame_->metadata()->GetBoolean(
- VideoFrameMetadata::POWER_EFFICIENT, &power_efficient));
- EXPECT_TRUE(power_efficient);
+ EXPECT_TRUE(most_recent_frame_->metadata()->power_efficient);
}
TEST_P(MediaCodecVideoDecoderH264Test, CsdIsIncludedInCodecConfig) {
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index b7c768bae0c..1132f5995de 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -81,7 +81,7 @@ VideoFrameFactoryImpl::VideoFrameFactoryImpl(
const gpu::GpuPreferences& gpu_preferences,
std::unique_ptr<SharedImageVideoProvider> image_provider,
std::unique_ptr<MaybeRenderEarlyManager> mre_manager,
- base::SequenceBound<FrameInfoHelper> frame_info_helper)
+ std::unique_ptr<FrameInfoHelper> frame_info_helper)
: image_provider_(std::move(image_provider)),
gpu_task_runner_(std::move(gpu_task_runner)),
enable_threaded_texture_mailboxes_(
@@ -171,7 +171,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame(
auto image_ready_cb =
base::BindOnce(&VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady,
weak_factory_.GetWeakPtr(), std::move(output_cb),
- timestamp, natural_size, codec_buffer_wait_coordinator_,
+ timestamp, natural_size, !!codec_buffer_wait_coordinator_,
std::move(promotion_hint_cb), pixel_format, overlay_mode_,
enable_threaded_texture_mailboxes_, gpu_task_runner_);
@@ -181,48 +181,20 @@ void VideoFrameFactoryImpl::CreateVideoFrame(
void VideoFrameFactoryImpl::RequestImage(
std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
ImageWithInfoReadyCB image_ready_cb) {
- if (buffer_renderer && visible_size_ == buffer_renderer->size()) {
- auto cb = base::BindOnce(std::move(image_ready_cb),
- std::move(buffer_renderer), frame_info_);
-
- image_provider_->RequestImage(
- std::move(cb), image_spec_,
- codec_buffer_wait_coordinator_
- ? codec_buffer_wait_coordinator_->texture_owner()
- : nullptr);
- return;
- }
-
- // We need to reset size to make sure VFFI pipeline is still ordered.
- // e.g: CreateVideoFrame is called with new size. We post task to GPU thread
- // to get new frame info. While we wait CreateVideoFrame might be called with
- // old size again and if we don't reset size here we will skip GPU hop and new
- // frame will be created earlier than first one.
- visible_size_ = gfx::Size();
-
- auto info_cb = BindToCurrentLoop(
+ auto info_cb =
base::BindOnce(&VideoFrameFactoryImpl::CreateVideoFrame_OnFrameInfoReady,
weak_factory_.GetWeakPtr(), std::move(image_ready_cb),
- codec_buffer_wait_coordinator_));
+ codec_buffer_wait_coordinator_);
- frame_info_helper_.Post(FROM_HERE, &FrameInfoHelper::GetFrameInfo,
- std::move(buffer_renderer), std::move(info_cb));
+ frame_info_helper_->GetFrameInfo(std::move(buffer_renderer),
+ std::move(info_cb));
}
void VideoFrameFactoryImpl::CreateVideoFrame_OnFrameInfoReady(
ImageWithInfoReadyCB image_ready_cb,
scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
std::unique_ptr<CodecOutputBufferRenderer> output_buffer_renderer,
- FrameInfoHelper::FrameInfo frame_info,
- bool success) {
- // To get frame info we need to render frame which might fail for variety of
- // reason. FrameInfoHelper will provide best values we can proceed with, but
- // we should not cache it and attempt to get info for next frame.
- if (success) {
- frame_info_ = frame_info;
- visible_size_ = output_buffer_renderer->size();
- }
-
+ FrameInfoHelper::FrameInfo frame_info) {
// If we don't have output buffer here we can't rely on reply from
// FrameInfoHelper as there might be not cached value and we can't render
// nothing. But in this case call comes from RunAfterPendingVideoFrames and we
@@ -246,7 +218,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
OnceOutputCB output_cb,
base::TimeDelta timestamp,
gfx::Size natural_size,
- scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
+ bool is_texture_owner_backed,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
VideoPixelFormat pixel_format,
OverlayMode overlay_mode,
@@ -268,7 +240,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
// When we remove the output buffer management from CodecImage, then that's
// what we'd have a reference to here rather than CodecImage.
record.codec_image_holder->codec_image_raw()->Initialize(
- std::move(output_buffer_renderer), codec_buffer_wait_coordinator,
+ std::move(output_buffer_renderer), is_texture_owner_backed,
std::move(promotion_hint_cb));
// Send the CodecImage (via holder, since we can't touch the refcount here) to
@@ -301,7 +273,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
// The frames must be copied when threaded texture mailboxes are in use
// (http://crbug.com/582170).
if (enable_threaded_texture_mailboxes)
- frame->metadata()->SetBoolean(VideoFrameMetadata::COPY_REQUIRED, true);
+ frame->metadata()->copy_required = true;
const bool is_surface_control =
overlay_mode == OverlayMode::kSurfaceControlSecure ||
@@ -309,25 +281,20 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
const bool wants_promotion_hints =
overlay_mode == OverlayMode::kRequestPromotionHints;
- // Remember that we can't access |codec_buffer_wait_coordinator|, but we can
- // check if we have one here.
bool allow_overlay = false;
if (is_surface_control) {
- DCHECK(codec_buffer_wait_coordinator);
+ DCHECK(is_texture_owner_backed);
allow_overlay = true;
} else {
// We unconditionally mark the picture as overlayable, even if
- // |!codec_buffer_wait_coordinator|, if we want to get hints. It's
+ // |!is_texture_owner_backed|, if we want to get hints. It's
// required, else we won't get hints.
- allow_overlay = !codec_buffer_wait_coordinator || wants_promotion_hints;
+ allow_overlay = !is_texture_owner_backed || wants_promotion_hints;
}
- frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
- allow_overlay);
- frame->metadata()->SetBoolean(VideoFrameMetadata::WANTS_PROMOTION_HINT,
- wants_promotion_hints);
- frame->metadata()->SetBoolean(VideoFrameMetadata::TEXTURE_OWNER,
- !!codec_buffer_wait_coordinator);
+ frame->metadata()->allow_overlay = allow_overlay;
+ frame->metadata()->wants_promotion_hint = wants_promotion_hints;
+ frame->metadata()->texture_owner = is_texture_owner_backed;
// TODO(liberato): if this is run via being dropped, then it would be nice
// to find that out rather than treating the image as unused. If the renderer
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.h b/chromium/media/gpu/android/video_frame_factory_impl.h
index 624d7d2b650..489149eb765 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.h
+++ b/chromium/media/gpu/android/video_frame_factory_impl.h
@@ -10,7 +10,6 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
-#include "base/threading/sequence_bound.h"
#include "gpu/config/gpu_preferences.h"
#include "media/base/video_frame.h"
#include "media/gpu/android/codec_buffer_wait_coordinator.h"
@@ -52,7 +51,7 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
const gpu::GpuPreferences& gpu_preferences,
std::unique_ptr<SharedImageVideoProvider> image_provider,
std::unique_ptr<MaybeRenderEarlyManager> mre_manager,
- base::SequenceBound<FrameInfoHelper> frame_info_helper);
+ std::unique_ptr<FrameInfoHelper> frame_info_helper);
~VideoFrameFactoryImpl() override;
void Initialize(OverlayMode overlay_mode, InitCB init_cb) override;
@@ -91,7 +90,7 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
OnceOutputCB output_cb,
base::TimeDelta timestamp,
gfx::Size natural_size,
- scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
+ bool is_texture_owner_backed,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
VideoPixelFormat pixel_format,
OverlayMode overlay_mode,
@@ -105,8 +104,7 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
ImageWithInfoReadyCB image_ready_cb,
scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
std::unique_ptr<CodecOutputBufferRenderer> output_buffer_renderer,
- FrameInfoHelper::FrameInfo frame_info,
- bool success);
+ FrameInfoHelper::FrameInfo frame_info);
MaybeRenderEarlyManager* mre_manager() const { return mre_manager_.get(); }
@@ -128,12 +126,8 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
std::unique_ptr<MaybeRenderEarlyManager> mre_manager_;
- // Caches FrameInfo and visible size it was cached for.
- gfx::Size visible_size_;
- FrameInfoHelper::FrameInfo frame_info_;
-
- // Optional helper to get the Vulkan YCbCrInfo.
- base::SequenceBound<FrameInfoHelper> frame_info_helper_;
+ // Helper to get coded_size and optional Vulkan YCbCrInfo.
+ std::unique_ptr<FrameInfoHelper> frame_info_helper_;
// The current image spec that we'll use to request images.
SharedImageVideoProvider::ImageSpec image_spec_;
diff --git a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
index ade0a27c05d..13231efe252 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
@@ -44,46 +44,14 @@ class MockMaybeRenderEarlyManager : public MaybeRenderEarlyManager {
class MockFrameInfoHelper : public FrameInfoHelper,
public DestructionObservable {
public:
- MockFrameInfoHelper(MockFrameInfoHelper** thiz) { *thiz = this; }
-
- void GetFrameInfo(
- std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
- base::OnceCallback<
- void(std::unique_ptr<CodecOutputBufferRenderer>, FrameInfo, bool)> cb)
- override {
- MockGetFrameInfo(buffer_renderer.get());
- cb_ = std::move(cb);
- buffer_renderer_ = std::move(buffer_renderer);
-
- if (run_callback_automatically_) {
- RunWithYcbCrInfo(true);
- base::RunLoop().RunUntilIdle();
- }
- }
-
- void RunWithYcbCrInfo(bool success) {
- DCHECK(buffer_renderer_);
-
+ void GetFrameInfo(std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ FrameInfoReadyCB cb) override {
FrameInfo info;
- info.coded_size = buffer_renderer_->size();
+ info.coded_size = buffer_renderer->size();
info.visible_rect = gfx::Rect(info.coded_size);
- std::move(cb_).Run(std::move(buffer_renderer_), info, success);
- }
-
- void set_run_callback_automatically(bool run_callback_automatically) {
- run_callback_automatically_ = run_callback_automatically;
+ std::move(cb).Run(std::move(buffer_renderer), info);
}
-
- MOCK_METHOD1(MockGetFrameInfo,
- void(CodecOutputBufferRenderer* buffer_renderer));
-
- private:
- bool run_callback_automatically_ = true;
- base::OnceCallback<
- void(std::unique_ptr<CodecOutputBufferRenderer>, FrameInfo, bool)>
- cb_;
- std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer_;
};
class VideoFrameFactoryImplTest : public testing::Test {
@@ -96,15 +64,11 @@ class VideoFrameFactoryImplTest : public testing::Test {
auto mre_manager = std::make_unique<MockMaybeRenderEarlyManager>();
mre_manager_raw_ = mre_manager.get();
- auto ycbcr_helper = base::SequenceBound<MockFrameInfoHelper>(
- task_runner_, &ycbcr_helper_raw_);
- base::RunLoop().RunUntilIdle(); // Init |ycbcr_helper_raw_|.
- ycbcr_destruction_observer_ =
- ycbcr_helper_raw_->CreateDestructionObserver();
+ auto info_helper = std::make_unique<MockFrameInfoHelper>();
impl_ = std::make_unique<VideoFrameFactoryImpl>(
task_runner_, gpu_preferences_, std::move(image_provider),
- std::move(mre_manager), std::move(ycbcr_helper));
+ std::move(mre_manager), std::move(info_helper));
auto texture_owner = base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(
0, nullptr, nullptr, true);
auto codec_buffer_wait_coordinator =
@@ -177,7 +141,6 @@ class VideoFrameFactoryImplTest : public testing::Test {
// Sent to |impl_| by RequestVideoFrame..
base::MockCallback<VideoFrameFactory::OnceOutputCB> output_cb_;
- MockFrameInfoHelper* ycbcr_helper_raw_ = nullptr;
std::unique_ptr<DestructionObserver> ycbcr_destruction_observer_;
gpu::GpuPreferences gpu_preferences_;
@@ -272,75 +235,4 @@ TEST_F(VideoFrameFactoryImplTest,
impl_ = nullptr;
base::RunLoop().RunUntilIdle();
}
-
-TEST_F(VideoFrameFactoryImplTest, DoesCallFrameInfoHelperIfVulkan) {
- // We will be driving callback by ourselves in this test.
- ycbcr_helper_raw_->set_run_callback_automatically(false);
- // Expect call to get info for the first frame.
- EXPECT_CALL(*ycbcr_helper_raw_, MockGetFrameInfo(_)).Times(1);
-
- RequestVideoFrame();
-
- // Provide info. It should send image request.
- ycbcr_helper_raw_->RunWithYcbCrInfo(true);
- base::RunLoop().RunUntilIdle();
-
- testing::Mock::VerifyAndClearExpectations(ycbcr_helper_raw_);
-
- // Fulfilling image request should provide video frame.
- EXPECT_CALL(output_cb_, Run(_)).Times(1);
-
- auto image_record = MakeImageRecord();
- image_provider_raw_->ProvideOneRequestedImage(&image_record);
- base::RunLoop().RunUntilIdle();
-
- // Verify that no more calls happen, since we don't want thread hops on every
- // frame. Note that multiple could be dispatched before now. It should still
- // send along a VideoFrame, though.
- EXPECT_CALL(*ycbcr_helper_raw_, MockGetFrameInfo(_)).Times(0);
- EXPECT_CALL(output_cb_, Run(_)).Times(1);
-
- RequestVideoFrame();
- auto other_image_record = MakeImageRecord();
- // If the helper hasn't been destroyed, then we don't expect it to be called.
- image_provider_raw_->ProvideOneRequestedImage(&other_image_record);
- base::RunLoop().RunUntilIdle();
-}
-
-TEST_F(VideoFrameFactoryImplTest, NullYCbCrInfoDoesntCrash) {
- // We will be driving callback by ourselves in this test.
- ycbcr_helper_raw_->set_run_callback_automatically(false);
-
- // Expect call to get info for the first frame.
- EXPECT_CALL(*ycbcr_helper_raw_, MockGetFrameInfo(_)).Times(1);
-
- RequestVideoFrame();
-
- // Provide info. It should send image request.
- ycbcr_helper_raw_->RunWithYcbCrInfo(false);
- base::RunLoop().RunUntilIdle();
-
- testing::Mock::VerifyAndClearExpectations(ycbcr_helper_raw_);
-
- // Fulfilling image request should provide video frame.
- EXPECT_CALL(output_cb_, Run(_)).Times(1);
-
- auto image_record = MakeImageRecord();
- image_provider_raw_->ProvideOneRequestedImage(&image_record);
- base::RunLoop().RunUntilIdle();
-
- // Verify that we will get call to GetFrameInfo as previous one failed.
- EXPECT_CALL(*ycbcr_helper_raw_, MockGetFrameInfo(_)).Times(1);
- EXPECT_CALL(output_cb_, Run(_)).Times(1);
-
- RequestVideoFrame();
- ycbcr_helper_raw_->RunWithYcbCrInfo(true);
- base::RunLoop().RunUntilIdle();
-
- auto other_image_record = MakeImageRecord();
- // If the helper hasn't been destroyed, then we don't expect it to be called.
- image_provider_raw_->ProvideOneRequestedImage(&other_image_record);
- base::RunLoop().RunUntilIdle();
-}
-
} // namespace media
diff --git a/chromium/media/gpu/chromeos/BUILD.gn b/chromium/media/gpu/chromeos/BUILD.gn
index a209dbf6652..ce07f94b380 100644
--- a/chromium/media/gpu/chromeos/BUILD.gn
+++ b/chromium/media/gpu/chromeos/BUILD.gn
@@ -149,6 +149,7 @@ source_set("unit_tests") {
"mailbox_video_frame_converter_unittest.cc",
"platform_video_frame_pool_unittest.cc",
"platform_video_frame_utils_unittest.cc",
+ "video_decoder_pipeline_unittest.cc",
]
}
diff --git a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
index b0c1595e6b5..9e2367128f3 100644
--- a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
+++ b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/sequenced_task_runner.h"
+#include "media/base/media_log.h"
#include "media/base/video_decoder.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/chromeos/mailbox_video_frame_converter.h"
@@ -18,32 +19,27 @@
#endif
#if BUILDFLAG(USE_V4L2_CODEC)
-#include "media/gpu/v4l2/v4l2_slice_video_decoder.h"
+#include "media/gpu/v4l2/v4l2_video_decoder.h"
#endif
namespace media {
namespace {
-// Get a list of the available functions for creating VideoDeocoder.
-base::queue<VideoDecoderPipeline::CreateVDFunc> GetCreateVDFunctions(
- VideoDecoderPipeline::CreateVDFunc cur_create_vd_func) {
- static constexpr VideoDecoderPipeline::CreateVDFunc kCreateVDFuncs[] = {
+// Gets a list of the available functions for creating VideoDecoders.
+VideoDecoderPipeline::CreateDecoderFunctions GetCreateDecoderFunctions() {
+ constexpr VideoDecoderPipeline::CreateDecoderFunction kCreateVDFuncs[] = {
#if BUILDFLAG(USE_VAAPI)
&VaapiVideoDecoder::Create,
#endif // BUILDFLAG(USE_VAAPI)
#if BUILDFLAG(USE_V4L2_CODEC)
- &V4L2SliceVideoDecoder::Create,
+ &V4L2VideoDecoder::Create,
#endif // BUILDFLAG(USE_V4L2_CODEC)
};
- base::queue<VideoDecoderPipeline::CreateVDFunc> ret;
- for (const auto& func : kCreateVDFuncs) {
- if (func != cur_create_vd_func)
- ret.push(func);
- }
- return ret;
+ return VideoDecoderPipeline::CreateDecoderFunctions(
+ kCreateVDFuncs, kCreateVDFuncs + base::size(kCreateVDFuncs));
}
} // namespace
@@ -61,7 +57,7 @@ ChromeosVideoDecoderFactory::GetSupportedConfigs() {
#endif // BUILDFLAG(USE_VAAPI)
#if BUILDFLAG(USE_V4L2_CODEC)
- configs = V4L2SliceVideoDecoder::GetSupportedConfigs();
+ configs = V4L2VideoDecoder::GetSupportedConfigs();
supported_configs.insert(supported_configs.end(), configs.begin(),
configs.end());
#endif // BUILDFLAG(USE_V4L2_CODEC)
@@ -74,11 +70,11 @@ std::unique_ptr<VideoDecoder> ChromeosVideoDecoderFactory::Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory) {
+ std::unique_ptr<MediaLog> media_log) {
return VideoDecoderPipeline::Create(
std::move(client_task_runner), std::move(frame_pool),
- std::move(frame_converter), gpu_memory_buffer_factory,
- base::BindRepeating(&GetCreateVDFunctions));
+ std::move(frame_converter), std::move(media_log),
+ base::BindRepeating(&GetCreateDecoderFunctions));
}
} // namespace media
diff --git a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
index 15d4e5830c9..ee61ce2a5db 100644
--- a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
+++ b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
@@ -15,13 +15,10 @@ namespace base {
class SequencedTaskRunner;
} // namespace base
-namespace gpu {
-class GpuMemoryBufferFactory;
-} // namespace gpu
-
namespace media {
class DmabufVideoFramePool;
+class MediaLog;
class VideoDecoder;
class VideoFrameConverter;
@@ -31,13 +28,11 @@ class MEDIA_GPU_EXPORT ChromeosVideoDecoderFactory {
// Create VideoDecoder instance that allocates VideoFrame from |frame_pool|
// and converts the output VideoFrame |frame_converter|.
- // Note the caller is responsible for keeping |gpu_memory_buffer_factory|
- // alive during the returned VideoDecoder lifetime.
static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory);
+ std::unique_ptr<MediaLog> media_log);
};
} // namespace media
diff --git a/chromium/media/gpu/chromeos/fourcc.cc b/chromium/media/gpu/chromeos/fourcc.cc
index e8d514df9fa..fb179e65bb1 100644
--- a/chromium/media/gpu/chromeos/fourcc.cc
+++ b/chromium/media/gpu/chromeos/fourcc.cc
@@ -5,6 +5,7 @@
#include "media/gpu/chromeos/fourcc.h"
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/strings/stringprintf.h"
#include "media/gpu/macros.h"
@@ -42,6 +43,7 @@ base::Optional<Fourcc> Fourcc::FromUint32(uint32_t fourcc) {
case YM16:
case MT21:
case MM21:
+ case P010:
return Fourcc(static_cast<Value>(fourcc));
}
DVLOGF(3) << "Unmapped fourcc: " << FourccToString(fourcc);
@@ -74,6 +76,8 @@ base::Optional<Fourcc> Fourcc::FromVideoPixelFormat(
return Fourcc(NV12);
case PIXEL_FORMAT_NV21:
return Fourcc(NV21);
+ case PIXEL_FORMAT_P016LE:
+ return Fourcc(P010);
case PIXEL_FORMAT_UYVY:
NOTREACHED();
FALLTHROUGH;
@@ -92,7 +96,6 @@ base::Optional<Fourcc> Fourcc::FromVideoPixelFormat(
case PIXEL_FORMAT_YUV422P12:
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
- case PIXEL_FORMAT_P016LE:
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_UNKNOWN:
@@ -186,6 +189,8 @@ VideoPixelFormat Fourcc::ToVideoPixelFormat() const {
// be mapped to PIXEL_FORMAT_NV12.
case MM21:
return PIXEL_FORMAT_NV12;
+ case P010:
+ return PIXEL_FORMAT_P016LE;
}
NOTREACHED() << "Unmapped Fourcc: " << ToString();
return PIXEL_FORMAT_UNKNOWN;
@@ -230,6 +235,8 @@ base::Optional<Fourcc> Fourcc::FromVAFourCC(uint32_t va_fourcc) {
return Fourcc(XR24);
case VA_FOURCC_ARGB:
return Fourcc(RGB4);
+ case VA_FOURCC_P010:
+ return Fourcc(P010);
}
DVLOGF(3) << "Unmapped VAFourCC: " << FourccToString(va_fourcc);
return base::nullopt;
@@ -257,6 +264,8 @@ base::Optional<uint32_t> Fourcc::ToVAFourCC() const {
return VA_FOURCC_BGRX;
case RGB4:
return VA_FOURCC_ARGB;
+ case P010:
+ return VA_FOURCC_P010;
case YM12:
case YM21:
case NM12:
@@ -287,6 +296,7 @@ base::Optional<Fourcc> Fourcc::ToSinglePlanar() const {
case YUYV:
case NV12:
case NV21:
+ case P010:
return Fourcc(value_);
case YM12:
return Fourcc(YU12);
@@ -319,6 +329,7 @@ bool Fourcc::IsMultiPlanar() const {
case YUYV:
case NV12:
case NV21:
+ case P010:
return false;
case YM12:
case YM21:
diff --git a/chromium/media/gpu/chromeos/fourcc.h b/chromium/media/gpu/chromeos/fourcc.h
index 85172e16d52..652f203e02a 100644
--- a/chromium/media/gpu/chromeos/fourcc.h
+++ b/chromium/media/gpu/chromeos/fourcc.h
@@ -108,6 +108,10 @@ class MEDIA_GPU_EXPORT Fourcc {
// Maps to V4L2_PIX_FMT_MM21.
// It is used for MT8183 hardware video decoder.
MM21 = ComposeFourcc('M', 'M', '2', '1'),
+
+ // Two-plane 10-bit YUV 4:2:0. Each sample is a two-byte little-endian value
+ // with the bottom six bits ignored.
+ P010 = ComposeFourcc('P', '0', '1', '0'),
};
explicit Fourcc(Fourcc::Value fourcc);
diff --git a/chromium/media/gpu/chromeos/fourcc_unittests.cc b/chromium/media/gpu/chromeos/fourcc_unittests.cc
index ade4a4b663c..d59b317ee0b 100644
--- a/chromium/media/gpu/chromeos/fourcc_unittests.cc
+++ b/chromium/media/gpu/chromeos/fourcc_unittests.cc
@@ -32,11 +32,11 @@ TEST(FourccTest, V4L2PixFmtToV4L2PixFmt) {
CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_ABGR32);
#ifdef V4L2_PIX_FMT_RGBA32
- V4L2PixFmtIsEqual(V4L2_PIX_FMT_RGBA32);
+ CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_RGBA32);
#endif
CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_XBGR32);
#ifdef V4L2_PIX_FMT_RGBX32
- V4L2PixFmtIsEqual(V4L2_PIX_FMT_RGBX32);
+ CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_RGBX32);
#endif
CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_RGB32);
CheckFromV4L2PixFmtAndBack(V4L2_PIX_FMT_YUV420);
@@ -133,6 +133,7 @@ TEST(FourccTest, FromVaFourCCAndBack) {
CheckFromVAFourCCAndBack(VA_FOURCC_BGRA);
CheckFromVAFourCCAndBack(VA_FOURCC_BGRX);
CheckFromVAFourCCAndBack(VA_FOURCC_ARGB);
+ CheckFromVAFourCCAndBack(VA_FOURCC_P010);
}
TEST(FourccTest, VAFourCCToVideoPixelFormat) {
@@ -154,6 +155,8 @@ TEST(FourccTest, VAFourCCToVideoPixelFormat) {
Fourcc::FromVAFourCC(VA_FOURCC_BGRA)->ToVideoPixelFormat());
EXPECT_EQ(PIXEL_FORMAT_XRGB,
Fourcc::FromVAFourCC(VA_FOURCC_BGRX)->ToVideoPixelFormat());
+ EXPECT_EQ(PIXEL_FORMAT_P016LE,
+ Fourcc::FromVAFourCC(VA_FOURCC_P010)->ToVideoPixelFormat());
}
TEST(FourccTest, VideoPixelFormatToVAFourCC) {
@@ -175,6 +178,8 @@ TEST(FourccTest, VideoPixelFormatToVAFourCC) {
*Fourcc::FromVideoPixelFormat(PIXEL_FORMAT_ARGB)->ToVAFourCC());
EXPECT_EQ(static_cast<uint32_t>(VA_FOURCC_BGRX),
*Fourcc::FromVideoPixelFormat(PIXEL_FORMAT_XRGB)->ToVAFourCC());
+ EXPECT_EQ(static_cast<uint32_t>(VA_FOURCC_P010),
+ *Fourcc::FromVideoPixelFormat(PIXEL_FORMAT_P016LE)->ToVAFourCC());
}
#endif // BUILDFLAG(USE_VAAPI)
@@ -189,6 +194,7 @@ TEST(FourccTest, FourccToSinglePlanar) {
EXPECT_EQ(Fourcc(Fourcc::YUYV).ToSinglePlanar(), Fourcc(Fourcc::YUYV));
EXPECT_EQ(Fourcc(Fourcc::NV12).ToSinglePlanar(), Fourcc(Fourcc::NV12));
EXPECT_EQ(Fourcc(Fourcc::NV21).ToSinglePlanar(), Fourcc(Fourcc::NV21));
+ EXPECT_EQ(Fourcc(Fourcc::P010).ToSinglePlanar(), Fourcc(Fourcc::P010));
EXPECT_EQ(Fourcc(Fourcc::YM12).ToSinglePlanar(),
Fourcc(Fourcc::YU12).ToSinglePlanar());
EXPECT_EQ(Fourcc(Fourcc::YM21).ToSinglePlanar(),
diff --git a/chromium/media/gpu/chromeos/image_processor.cc b/chromium/media/gpu/chromeos/image_processor.cc
index cde32f09a80..c3227c88154 100644
--- a/chromium/media/gpu/chromeos/image_processor.cc
+++ b/chromium/media/gpu/chromeos/image_processor.cc
@@ -70,6 +70,7 @@ std::unique_ptr<ImageProcessor> ImageProcessor::Create(
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> client_task_runner) {
scoped_refptr<base::SequencedTaskRunner> backend_task_runner =
@@ -77,9 +78,9 @@ std::unique_ptr<ImageProcessor> ImageProcessor::Create(
auto wrapped_error_cb = base::BindRepeating(
base::IgnoreResult(&base::SequencedTaskRunner::PostTask),
client_task_runner, FROM_HERE, std::move(error_cb));
- std::unique_ptr<ImageProcessorBackend> backend =
- create_backend_cb.Run(input_config, output_config, preferred_output_modes,
- std::move(wrapped_error_cb), backend_task_runner);
+ std::unique_ptr<ImageProcessorBackend> backend = create_backend_cb.Run(
+ input_config, output_config, preferred_output_modes, relative_rotation,
+ std::move(wrapped_error_cb), backend_task_runner);
if (!backend)
return nullptr;
diff --git a/chromium/media/gpu/chromeos/image_processor.h b/chromium/media/gpu/chromeos/image_processor.h
index d0ce7acc8e3..ac62dbaf8cd 100644
--- a/chromium/media/gpu/chromeos/image_processor.h
+++ b/chromium/media/gpu/chromeos/image_processor.h
@@ -42,6 +42,7 @@ class MEDIA_GPU_EXPORT ImageProcessor {
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner)>;
@@ -50,6 +51,7 @@ class MEDIA_GPU_EXPORT ImageProcessor {
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> client_task_runner);
diff --git a/chromium/media/gpu/chromeos/image_processor_backend.cc b/chromium/media/gpu/chromeos/image_processor_backend.cc
index 27c5a056e81..0d7924766ba 100644
--- a/chromium/media/gpu/chromeos/image_processor_backend.cc
+++ b/chromium/media/gpu/chromeos/image_processor_backend.cc
@@ -63,11 +63,13 @@ ImageProcessorBackend::ImageProcessorBackend(
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner)
: input_config_(input_config),
output_config_(output_config),
output_mode_(output_mode),
+ relative_rotation_(relative_rotation),
error_cb_(error_cb),
backend_task_runner_(std::move(backend_task_runner)) {
DETACH_FROM_SEQUENCE(backend_sequence_checker_);
diff --git a/chromium/media/gpu/chromeos/image_processor_backend.h b/chromium/media/gpu/chromeos/image_processor_backend.h
index 85fcdf76f59..6b0c86f5bc8 100644
--- a/chromium/media/gpu/chromeos/image_processor_backend.h
+++ b/chromium/media/gpu/chromeos/image_processor_backend.h
@@ -113,6 +113,7 @@ class MEDIA_GPU_EXPORT ImageProcessorBackend {
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
virtual ~ImageProcessorBackend();
@@ -125,6 +126,10 @@ class MEDIA_GPU_EXPORT ImageProcessorBackend {
// works as IMPORT mode for output.
const OutputMode output_mode_;
+ // ImageProcessor performs a rotation if the |relative_rotation_| is not equal
+ // to VIDEO_ROTATION_0.
+ const VideoRotation relative_rotation_;
+
// Call this callback when any error occurs.
const ErrorCB error_cb_;
diff --git a/chromium/media/gpu/chromeos/image_processor_factory.cc b/chromium/media/gpu/chromeos/image_processor_factory.cc
index 0daaab910f5..ccdfcf4c1ca 100644
--- a/chromium/media/gpu/chromeos/image_processor_factory.cc
+++ b/chromium/media/gpu/chromeos/image_processor_factory.cc
@@ -81,7 +81,8 @@ std::unique_ptr<ImageProcessor> CreateV4L2ImageProcessorWithInputCandidates(
return v4l2_vda_helpers::CreateImageProcessor(
input_fourcc, *output_fourcc, input_size, output_size, visible_size,
- num_buffers, V4L2Device::Create(), ImageProcessor::OutputMode::IMPORT,
+ VideoFrame::StorageType::STORAGE_GPU_MEMORY_BUFFER, num_buffers,
+ V4L2Device::Create(), ImageProcessor::OutputMode::IMPORT,
std::move(client_task_runner), std::move(error_cb));
}
return nullptr;
@@ -96,6 +97,7 @@ std::unique_ptr<ImageProcessor> ImageProcessorFactory::Create(
const ImageProcessor::PortConfig& output_config,
const std::vector<ImageProcessor::OutputMode>& preferred_output_modes,
size_t num_buffers,
+ VideoRotation relative_rotation,
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
ImageProcessor::ErrorCB error_cb) {
std::vector<ImageProcessor::CreateBackendCB> create_funcs;
@@ -112,9 +114,10 @@ std::unique_ptr<ImageProcessor> ImageProcessorFactory::Create(
std::unique_ptr<ImageProcessor> image_processor;
for (auto& create_func : create_funcs) {
- image_processor = ImageProcessor::Create(
- std::move(create_func), input_config, output_config,
- preferred_output_modes, error_cb, client_task_runner);
+ image_processor =
+ ImageProcessor::Create(std::move(create_func), input_config,
+ output_config, preferred_output_modes,
+ relative_rotation, error_cb, client_task_runner);
if (image_processor)
return image_processor;
}
diff --git a/chromium/media/gpu/chromeos/image_processor_factory.h b/chromium/media/gpu/chromeos/image_processor_factory.h
index 7ab5b4cf56b..a81eddde318 100644
--- a/chromium/media/gpu/chromeos/image_processor_factory.h
+++ b/chromium/media/gpu/chromeos/image_processor_factory.h
@@ -54,6 +54,7 @@ class MEDIA_GPU_EXPORT ImageProcessorFactory {
const ImageProcessor::PortConfig& output_config,
const std::vector<ImageProcessor::OutputMode>& preferred_output_modes,
size_t num_buffers,
+ VideoRotation relative_rotation,
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
ImageProcessor::ErrorCB error_cb);
diff --git a/chromium/media/gpu/chromeos/image_processor_test.cc b/chromium/media/gpu/chromeos/image_processor_test.cc
index a4192c90397..84c3535203b 100644
--- a/chromium/media/gpu/chromeos/image_processor_test.cc
+++ b/chromium/media/gpu/chromeos/image_processor_test.cc
@@ -76,6 +76,14 @@ const base::FilePath::CharType* kNV12Image180P =
const base::FilePath::CharType* kNV12Image360PIn480P =
FILE_PATH_LITERAL("puppets-640x360_in_640x480.nv12.yuv");
+// Files for rotation test.
+const base::FilePath::CharType* kNV12Image90 =
+ FILE_PATH_LITERAL("bear_192x320_90.nv12.yuv");
+const base::FilePath::CharType* kNV12Image180 =
+ FILE_PATH_LITERAL("bear_320x192_180.nv12.yuv");
+const base::FilePath::CharType* kNV12Image270 =
+ FILE_PATH_LITERAL("bear_192x320_270.nv12.yuv");
+
class ImageProcessorParamTest
: public ::testing::Test,
public ::testing::WithParamInterface<
@@ -115,6 +123,26 @@ class ImageProcessorParamTest
ImageProcessor::PortConfig output_config(
output_fourcc, output_image->Size(), output_layout->planes(),
output_image->VisibleRect(), output_storage_types);
+ int rotation =
+ ((output_image->Rotation() - input_image.Rotation() + 4) % 4) * 90;
+ VideoRotation relative_rotation = VIDEO_ROTATION_0;
+ switch (rotation) {
+ case 0:
+ relative_rotation = VIDEO_ROTATION_0;
+ break;
+ case 90:
+ relative_rotation = VIDEO_ROTATION_90;
+ break;
+ case 180:
+ relative_rotation = VIDEO_ROTATION_180;
+ break;
+ case 270:
+ relative_rotation = VIDEO_ROTATION_270;
+ break;
+ default:
+ NOTREACHED() << "Invalid rotation: " << rotation;
+ return nullptr;
+ }
// TODO(crbug.com/917951): Select more appropriate number of buffers.
constexpr size_t kNumBuffers = 1;
LOG_ASSERT(output_image->IsMetadataLoaded());
@@ -156,7 +184,8 @@ class ImageProcessorParamTest
}
auto ip_client = test::ImageProcessorClient::Create(
- input_config, output_config, kNumBuffers, std::move(frame_processors));
+ input_config, output_config, kNumBuffers, relative_rotation,
+ std::move(frame_processors));
return ip_client;
}
@@ -294,6 +323,17 @@ INSTANTIATE_TEST_SUITE_P(NV12CroppingAndScaling,
::testing::Values(std::make_tuple(kNV12Image360PIn480P,
kNV12Image270P)));
+// Rotate frame to specified rotation.
+// Now only VaapiIP maybe support rotaion.
+INSTANTIATE_TEST_SUITE_P(
+ NV12Rotation,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kNV12Image, kNV12Image90),
+ std::make_tuple(kNV12Image, kNV12Image180),
+ std::make_tuple(kNV12Image, kNV12Image270),
+ std::make_tuple(kNV12Image180, kNV12Image90),
+ std::make_tuple(kNV12Image180, kNV12Image)));
+
#if defined(OS_CHROMEOS)
// TODO(hiroh): Add more tests.
// MEM->DMABUF (V4L2VideoEncodeAccelerator),
diff --git a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
index ab55071a330..a9de6bc113d 100644
--- a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
+++ b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
@@ -12,6 +12,7 @@
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/convert_from_argb.h"
+#include "third_party/libyuv/include/libyuv/rotate.h"
#include "third_party/libyuv/include/libyuv/scale.h"
namespace media {
@@ -72,6 +73,57 @@ void NV12Scale(uint8_t* tmp_buffer,
dst_stride_uv, dst_chroma_width, dst_chroma_height);
}
+// TODO(https://bugs.chromium.org/p/libyuv/issues/detail?id=840): Remove
+// this once libyuv implements NV12Rotate() and use the libyuv::NV12Rotate().
+bool NV12Rotate(uint8_t* tmp_buffer,
+ const uint8_t* src_y,
+ int src_stride_y,
+ const uint8_t* src_uv,
+ int src_stride_uv,
+ int src_width,
+ int src_height,
+ uint8_t* dst_y,
+ int dst_stride_y,
+ uint8_t* dst_uv,
+ int dst_stride_uv,
+ int dst_width,
+ int dst_height,
+ VideoRotation relative_rotation) {
+ libyuv::RotationModeEnum rotation = libyuv::kRotate0;
+ switch (relative_rotation) {
+ case VIDEO_ROTATION_0:
+ NOTREACHED() << "Unexpected rotation: " << rotation;
+ return false;
+ case VIDEO_ROTATION_90:
+ rotation = libyuv::kRotate90;
+ break;
+ case VIDEO_ROTATION_180:
+ rotation = libyuv::kRotate180;
+ break;
+ case VIDEO_ROTATION_270:
+ rotation = libyuv::kRotate270;
+ break;
+ }
+
+ // Rotating.
+ const int tmp_uv_width = (dst_width + 1) / 2;
+ const int tmp_uv_height = (dst_height + 1) / 2;
+ uint8_t* const tmp_u = tmp_buffer;
+ uint8_t* const tmp_v = tmp_u + tmp_uv_width * tmp_uv_height;
+
+ // Rotate the NV12 planes to I420.
+ int ret = libyuv::NV12ToI420Rotate(
+ src_y, src_stride_y, src_uv, src_stride_uv, dst_y, dst_stride_y, tmp_u,
+ tmp_uv_width, tmp_v, tmp_uv_width, src_width, src_height, rotation);
+ if (ret != 0)
+ return false;
+
+ // Merge the UV planes into the destination.
+ libyuv::MergeUVPlane(tmp_u, tmp_uv_width, tmp_v, tmp_uv_width, dst_uv,
+ dst_stride_uv, tmp_uv_width, tmp_uv_height);
+ return true;
+}
+
enum class SupportResult {
Supported,
SupportedWithPivot,
@@ -90,7 +142,7 @@ SupportResult IsFormatSupported(Fourcc input_fourcc, Fourcc output_fourcc) {
{Fourcc::YV12, Fourcc::NV12, false},
{Fourcc::AB24, Fourcc::NV12, true},
{Fourcc::XB24, Fourcc::NV12, true},
- // Scaling.
+ // Scaling or Rotating.
{Fourcc::NV12, Fourcc::NV12, true},
};
@@ -128,6 +180,7 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
VLOGF(2);
@@ -206,7 +259,8 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
// used as |tmp_buffer| in NV12Scale().
// TODO(hiroh): Remove this restriction once libyuv:NV12Scale() is arrived.
if (!gfx::Rect(input_config.visible_rect.size())
- .Contains(gfx::Rect(output_config.visible_rect.size()))) {
+ .Contains(gfx::Rect(output_config.visible_rect.size())) &&
+ relative_rotation == VIDEO_ROTATION_0) {
VLOGF(2) << "Down-scaling support only, input_config.visible_rect="
<< input_config.visible_rect.ToString()
<< ", output_config.visible_rect="
@@ -237,7 +291,7 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
PortConfig(output_config.fourcc, output_config.size,
output_config.planes, output_config.visible_rect,
{output_storage_type}),
- OutputMode::IMPORT, std::move(error_cb),
+ OutputMode::IMPORT, relative_rotation, std::move(error_cb),
std::move(backend_task_runner)));
VLOGF(2) << "LibYUVImageProcessorBackend created for converting from "
<< input_config.ToString() << " to " << output_config.ToString();
@@ -251,11 +305,13 @@ LibYUVImageProcessorBackend::LibYUVImageProcessorBackend(
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner)
: ImageProcessorBackend(input_config,
output_config,
output_mode,
+ relative_rotation,
std::move(error_cb),
std::move(backend_task_runner)),
input_frame_mapper_(std::move(input_frame_mapper)),
@@ -353,6 +409,26 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
return LIBYUV_FUNC(I420ToNV12, Y_U_V_DATA(intermediate_frame_),
Y_UV_DATA(output));
case PIXEL_FORMAT_NV12:
+ // Rotation mode.
+ if (relative_rotation_ != VIDEO_ROTATION_0) {
+ // The size of |tmp_buffer| of NV12Rotate() should be
+ // output_visible_rect().GetArea() / 2, which used to store temporary
+ // U and V planes for I420 data. Although
+ // |intermediate_frame_->data(0)| is much larger than the required
+ // size, we use the frame to simplify the code.
+ NV12Rotate(intermediate_frame_->data(0),
+ input->visible_data(VideoFrame::kYPlane),
+ input->stride(VideoFrame::kYPlane),
+ input->visible_data(VideoFrame::kUPlane),
+ input->stride(VideoFrame::kUPlane),
+ input->visible_rect().width(),
+ input->visible_rect().height(), Y_UV_DATA(output),
+ output->visible_rect().width(),
+ output->visible_rect().height(), relative_rotation_);
+ return 0;
+ }
+
+ // Scaling mode.
// The size of |tmp_buffer| of NV12Scale() should be
// input_visible_rect().GetArea() / 2 +
// output_visible_rect().GetArea() / 2. Although |intermediate_frame_|
diff --git a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.h b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.h
index f8836096bdd..cd6562bbf82 100644
--- a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.h
+++ b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.h
@@ -32,6 +32,7 @@ class MEDIA_GPU_EXPORT LibYUVImageProcessorBackend
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
@@ -48,6 +49,7 @@ class MEDIA_GPU_EXPORT LibYUVImageProcessorBackend
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
~LibYUVImageProcessorBackend() override;
diff --git a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
index 8bb25386dce..19c3829afa8 100644
--- a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
+++ b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
@@ -14,6 +14,7 @@
#include "base/trace_event/trace_event.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
#include "gpu/command_buffer/service/scheduler.h"
+#include "gpu/ipc/common/gpu_client_ids.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "gpu/ipc/service/shared_image_stub.h"
#include "media/base/format_utils.h"
@@ -154,7 +155,7 @@ void MailboxVideoFrameConverter::ConvertFrame(scoped_refptr<VideoFrame> frame) {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
DVLOGF(4);
- if (!frame || !frame->HasDmaBufs())
+ if (!frame || frame->storage_type() != VideoFrame::STORAGE_GPU_MEMORY_BUFFER)
return OnError(FROM_HERE, "Invalid frame.");
VideoFrame* origin_frame = unwrap_frame_cb_.Run(*frame);
@@ -225,9 +226,9 @@ void MailboxVideoFrameConverter::WrapMailboxAndVideoFrameAndOutput(
frame->format(), mailbox_holders, std::move(release_mailbox_cb),
frame->coded_size(), frame->visible_rect(), frame->natural_size(),
frame->timestamp());
- mailbox_frame->metadata()->MergeMetadataFrom(frame->metadata());
- mailbox_frame->metadata()->SetBoolean(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED, true);
+ mailbox_frame->set_color_space(frame->ColorSpace());
+ mailbox_frame->set_metadata(*(frame->metadata()));
+ mailbox_frame->metadata()->read_lock_fences_enabled = true;
output_cb_.Run(mailbox_frame);
}
@@ -336,7 +337,7 @@ bool MailboxVideoFrameConverter::GenerateSharedImageOnGPUThread(
const uint32_t shared_image_usage =
gpu::SHARED_IMAGE_USAGE_DISPLAY | gpu::SHARED_IMAGE_USAGE_SCANOUT;
const bool success = shared_image_stub->CreateSharedImage(
- mailbox, shared_image_stub->channel()->client_id(),
+ mailbox, gpu::kPlatformVideoFramePoolClientId,
std::move(gpu_memory_buffer_handle), *buffer_format,
gpu::kNullSurfaceHandle, destination_visible_rect.size(),
video_frame->ColorSpace(), shared_image_usage);
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
index 90a7db5ae52..eebdcb4d5b8 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
@@ -25,9 +25,9 @@ scoped_refptr<VideoFrame> DefaultCreateFrame(
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp) {
- return CreatePlatformVideoFrame(gpu_memory_buffer_factory, format, coded_size,
- visible_rect, natural_size, timestamp,
- gfx::BufferUsage::SCANOUT_VDA_WRITE);
+ return CreateGpuMemoryBufferVideoFrame(
+ gpu_memory_buffer_factory, format, coded_size, visible_rect, natural_size,
+ timestamp, gfx::BufferUsage::SCANOUT_VDA_WRITE);
}
} // namespace
@@ -51,6 +51,15 @@ PlatformVideoFramePool::~PlatformVideoFramePool() {
weak_this_factory_.InvalidateWeakPtrs();
}
+// static
+gfx::GpuMemoryBufferId PlatformVideoFramePool::GetGpuMemoryBufferId(
+ const VideoFrame& frame) {
+ DCHECK_EQ(frame.storage_type(),
+ VideoFrame::StorageType::STORAGE_GPU_MEMORY_BUFFER);
+ DCHECK(frame.GetGpuMemoryBuffer());
+ return frame.GetGpuMemoryBuffer()->GetId();
+}
+
scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
DVLOGF(4);
@@ -61,7 +70,7 @@ scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
return nullptr;
}
- VideoPixelFormat format = frame_layout_->fourcc().ToVideoPixelFormat();
+ const VideoPixelFormat format = frame_layout_->fourcc().ToVideoPixelFormat();
const gfx::Size& coded_size = frame_layout_->size();
if (free_frames_.empty()) {
if (GetTotalNumFrames_Locked() >= max_num_frames_)
@@ -88,14 +97,15 @@ scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
scoped_refptr<VideoFrame> wrapped_frame = VideoFrame::WrapVideoFrame(
origin_frame, format, visible_rect_, natural_size_);
DCHECK(wrapped_frame);
- frames_in_use_.emplace(GetDmabufId(*wrapped_frame), origin_frame.get());
+ frames_in_use_.emplace(GetGpuMemoryBufferId(*wrapped_frame),
+ origin_frame.get());
wrapped_frame->AddDestructionObserver(
base::BindOnce(&PlatformVideoFramePool::OnFrameReleasedThunk, weak_this_,
parent_task_runner_, std::move(origin_frame)));
// Clear all metadata before returning to client, in case origin frame has any
// unrelated metadata.
- wrapped_frame->metadata()->Clear();
+ wrapped_frame->clear_metadata();
return wrapped_frame;
}
@@ -134,7 +144,8 @@ base::Optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
create_frame_cb_.Run(gpu_memory_buffer_factory_, format, coded_size,
visible_rect_, natural_size_, base::TimeDelta());
if (!frame) {
- VLOGF(1) << "Failed to create video frame";
+ VLOGF(1) << "Failed to create video frame " << format << " (fourcc "
+ << fourcc.ToString() << ")";
return base::nullopt;
}
frame_layout_ = GpuBufferLayout::Create(fourcc, frame->coded_size(),
@@ -168,7 +179,7 @@ VideoFrame* PlatformVideoFramePool::UnwrapFrame(
DVLOGF(4);
base::AutoLock auto_lock(lock_);
- auto it = frames_in_use_.find(GetDmabufId(wrapped_frame));
+ auto it = frames_in_use_.find(GetGpuMemoryBufferId(wrapped_frame));
return (it == frames_in_use_.end()) ? nullptr : it->second;
}
@@ -203,7 +214,7 @@ void PlatformVideoFramePool::OnFrameReleased(
DVLOGF(4);
base::AutoLock auto_lock(lock_);
- DmabufId frame_id = GetDmabufId(*origin_frame);
+ gfx::GpuMemoryBufferId frame_id = GetGpuMemoryBufferId(*origin_frame);
auto it = frames_in_use_.find(frame_id);
DCHECK(it != frames_in_use_.end());
frames_in_use_.erase(it);
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.h b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
index b983f7c3393..b594d107c51 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
@@ -21,6 +21,7 @@
#include "media/base/video_types.h"
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/media_gpu_export.h"
+#include "ui/gfx/gpu_memory_buffer.h"
namespace gpu {
class GpuMemoryBufferFactory;
@@ -43,6 +44,9 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
gpu::GpuMemoryBufferFactory* gpu_memory_buffer_factory);
~PlatformVideoFramePool() override;
+ // Returns the ID of the GpuMemoryBuffer wrapped by |frame|.
+ static gfx::GpuMemoryBufferId GetGpuMemoryBufferId(const VideoFrame& frame);
+
// DmabufVideoFramePool implementation.
base::Optional<GpuBufferLayout> Initialize(const Fourcc& fourcc,
const gfx::Size& coded_size,
@@ -58,12 +62,12 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
// recycling, and bind destruction callback at original frames.
VideoFrame* UnwrapFrame(const VideoFrame& wrapped_frame);
- private:
- friend class PlatformVideoFramePoolTest;
-
// Returns the number of frames in the pool for testing purposes.
size_t GetPoolSizeForTesting();
+ private:
+ friend class PlatformVideoFramePoolTest;
+
// Thunk to post OnFrameReleased() to |task_runner|.
// Because this thunk may be called in any thread, We don't want to
// dereference WeakPtr. Therefore we wrap the WeakPtr by base::Optional to
@@ -116,8 +120,9 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
// should be the same as |format_| and |coded_size_|.
base::circular_deque<scoped_refptr<VideoFrame>> free_frames_
GUARDED_BY(lock_);
- // Mapping from the unique_id of the wrapped frame to the original frame.
- std::map<DmabufId, VideoFrame*> frames_in_use_ GUARDED_BY(lock_);
+ // Mapping from the frame's GpuMemoryBuffer's ID to the original frame.
+ std::map<gfx::GpuMemoryBufferId, VideoFrame*> frames_in_use_
+ GUARDED_BY(lock_);
// The maximum number of frames created by the pool.
size_t max_num_frames_ GUARDED_BY(lock_) = 0;
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
index 19b03688c81..ac7bb4ae5b1 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
@@ -9,44 +9,34 @@
#include <memory>
#include <vector>
-#include "base/files/file.h"
-#include "base/files/file_path.h"
-#include "base/files/file_util.h"
-#include "base/files/scoped_file.h"
+#include "base/bind_helpers.h"
#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "gpu/command_buffer/common/mailbox_holder.h"
+#include "media/base/format_utils.h"
#include "media/gpu/chromeos/fourcc.h"
+#include "media/video/fake_gpu_memory_buffer.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
namespace {
-base::ScopedFD CreateTmpHandle() {
- base::FilePath path;
- DCHECK(CreateTemporaryFile(&path));
- base::File file(path, base::File::FLAG_OPEN | base::File::FLAG_READ);
- DCHECK(file.IsValid());
- return base::ScopedFD(file.TakePlatformFile());
-}
-
-scoped_refptr<VideoFrame> CreateDmabufVideoFrame(
+scoped_refptr<VideoFrame> CreateGpuMemoryBufferVideoFrame(
gpu::GpuMemoryBufferFactory* factory,
VideoPixelFormat format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
base::TimeDelta timestamp) {
- base::Optional<VideoFrameLayout> layout =
- VideoFrameLayout::Create(format, coded_size);
- DCHECK(layout);
-
- std::vector<base::ScopedFD> dmabuf_fds;
- for (size_t i = 0; i < VideoFrame::NumPlanes(format); ++i)
- dmabuf_fds.push_back(CreateTmpHandle());
-
- return VideoFrame::WrapExternalDmabufs(*layout, visible_rect, natural_size,
- std::move(dmabuf_fds), timestamp);
+ base::Optional<gfx::BufferFormat> gfx_format =
+ VideoPixelFormatToGfxBufferFormat(format);
+ DCHECK(gfx_format);
+ const gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes] = {};
+ return VideoFrame::WrapExternalGpuMemoryBuffer(
+ visible_rect, natural_size,
+ std::make_unique<FakeGpuMemoryBuffer>(coded_size, *gfx_format),
+ mailbox_holders, base::NullCallback(), timestamp);
}
} // namespace
@@ -54,16 +44,14 @@ scoped_refptr<VideoFrame> CreateDmabufVideoFrame(
class PlatformVideoFramePoolTest
: public ::testing::TestWithParam<VideoPixelFormat> {
public:
- using DmabufId = DmabufVideoFramePool::DmabufId;
-
PlatformVideoFramePoolTest()
: task_environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
pool_(new PlatformVideoFramePool(nullptr)) {
- pool_->create_frame_cb_ = base::BindRepeating(&CreateDmabufVideoFrame);
+ SetCreateFrameCB(base::BindRepeating(&CreateGpuMemoryBufferVideoFrame));
pool_->set_parent_task_runner(base::ThreadTaskRunnerHandle::Get());
}
- void Initialize(const Fourcc& fourcc) {
+ bool Initialize(const Fourcc& fourcc) {
constexpr gfx::Size kCodedSize(320, 240);
constexpr size_t kNumFrames = 10;
@@ -72,7 +60,7 @@ class PlatformVideoFramePoolTest
layout_ = pool_->Initialize(fourcc, kCodedSize, visible_rect_,
natural_size_, kNumFrames);
- EXPECT_TRUE(layout_);
+ return !!layout_;
}
scoped_refptr<VideoFrame> GetFrame(int timestamp_ms) {
@@ -88,8 +76,8 @@ class PlatformVideoFramePoolTest
return frame;
}
- void CheckPoolSize(size_t size) const {
- EXPECT_EQ(size, pool_->GetPoolSizeForTesting());
+ void SetCreateFrameCB(PlatformVideoFramePool::CreateFrameCB cb) {
+ pool_->create_frame_cb_ = cb;
}
protected:
@@ -103,17 +91,18 @@ class PlatformVideoFramePoolTest
INSTANTIATE_TEST_SUITE_P(All,
PlatformVideoFramePoolTest,
- testing::Values(PIXEL_FORMAT_I420,
- PIXEL_FORMAT_YV12,
+ testing::Values(PIXEL_FORMAT_YV12,
PIXEL_FORMAT_NV12,
- PIXEL_FORMAT_ARGB));
+ PIXEL_FORMAT_ARGB,
+ PIXEL_FORMAT_P016LE));
TEST_P(PlatformVideoFramePoolTest, SingleFrameReuse) {
const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
ASSERT_TRUE(fourcc.has_value());
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame = GetFrame(10);
- DmabufId id = DmabufVideoFramePool::GetDmabufId(*frame);
+ gfx::GpuMemoryBufferId id =
+ PlatformVideoFramePool::GetGpuMemoryBufferId(*frame);
// Clear frame reference to return the frame to the pool.
frame = nullptr;
@@ -121,38 +110,40 @@ TEST_P(PlatformVideoFramePoolTest, SingleFrameReuse) {
// Verify that the next frame from the pool uses the same memory.
scoped_refptr<VideoFrame> new_frame = GetFrame(20);
- EXPECT_EQ(id, DmabufVideoFramePool::GetDmabufId(*new_frame));
+ EXPECT_EQ(id, PlatformVideoFramePool::GetGpuMemoryBufferId(*new_frame));
}
TEST_P(PlatformVideoFramePoolTest, MultipleFrameReuse) {
const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
ASSERT_TRUE(fourcc.has_value());
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame1 = GetFrame(10);
scoped_refptr<VideoFrame> frame2 = GetFrame(20);
- DmabufId id1 = DmabufVideoFramePool::GetDmabufId(*frame1);
- DmabufId id2 = DmabufVideoFramePool::GetDmabufId(*frame2);
+ gfx::GpuMemoryBufferId id1 =
+ PlatformVideoFramePool::GetGpuMemoryBufferId(*frame1);
+ gfx::GpuMemoryBufferId id2 =
+ PlatformVideoFramePool::GetGpuMemoryBufferId(*frame2);
frame1 = nullptr;
task_environment_.RunUntilIdle();
frame1 = GetFrame(30);
- EXPECT_EQ(id1, DmabufVideoFramePool::GetDmabufId(*frame1));
+ EXPECT_EQ(id1, PlatformVideoFramePool::GetGpuMemoryBufferId(*frame1));
frame2 = nullptr;
task_environment_.RunUntilIdle();
frame2 = GetFrame(40);
- EXPECT_EQ(id2, DmabufVideoFramePool::GetDmabufId(*frame2));
+ EXPECT_EQ(id2, PlatformVideoFramePool::GetGpuMemoryBufferId(*frame2));
frame1 = nullptr;
frame2 = nullptr;
task_environment_.RunUntilIdle();
- CheckPoolSize(2u);
+ EXPECT_EQ(2u, pool_->GetPoolSizeForTesting());
}
TEST_P(PlatformVideoFramePoolTest, InitializeWithDifferentFourcc) {
const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
ASSERT_TRUE(fourcc.has_value());
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame_a = GetFrame(10);
scoped_refptr<VideoFrame> frame_b = GetFrame(10);
@@ -162,52 +153,68 @@ TEST_P(PlatformVideoFramePoolTest, InitializeWithDifferentFourcc) {
task_environment_.RunUntilIdle();
// Verify that both frames are in the pool.
- CheckPoolSize(2u);
+ EXPECT_EQ(2u, pool_->GetPoolSizeForTesting());
// Verify that requesting a frame with a different format causes the pool
// to get drained.
- const Fourcc different_fourcc(Fourcc::NV21);
+ const Fourcc different_fourcc(Fourcc::XR24);
ASSERT_NE(fourcc, different_fourcc);
- Initialize(different_fourcc);
+ ASSERT_TRUE(Initialize(different_fourcc));
scoped_refptr<VideoFrame> new_frame = GetFrame(10);
- CheckPoolSize(0u);
+ EXPECT_EQ(0u, pool_->GetPoolSizeForTesting());
}
TEST_P(PlatformVideoFramePoolTest, UnwrapVideoFrame) {
const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
ASSERT_TRUE(fourcc.has_value());
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame_1 = GetFrame(10);
scoped_refptr<VideoFrame> frame_2 = VideoFrame::WrapVideoFrame(
frame_1, frame_1->format(), frame_1->visible_rect(),
frame_1->natural_size());
EXPECT_EQ(pool_->UnwrapFrame(*frame_1), pool_->UnwrapFrame(*frame_2));
- EXPECT_TRUE(frame_1->IsSameDmaBufsAs(*frame_2));
+ EXPECT_EQ(frame_1->GetGpuMemoryBuffer(), frame_2->GetGpuMemoryBuffer());
scoped_refptr<VideoFrame> frame_3 = GetFrame(20);
EXPECT_NE(pool_->UnwrapFrame(*frame_1), pool_->UnwrapFrame(*frame_3));
- EXPECT_FALSE(frame_1->IsSameDmaBufsAs(*frame_3));
+ EXPECT_NE(frame_1->GetGpuMemoryBuffer(), frame_3->GetGpuMemoryBuffer());
}
TEST_P(PlatformVideoFramePoolTest, InitializeWithSameFourcc) {
const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
ASSERT_TRUE(fourcc.has_value());
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame1 = GetFrame(10);
- DmabufId id1 = DmabufVideoFramePool::GetDmabufId(*frame1);
+ gfx::GpuMemoryBufferId id1 =
+ PlatformVideoFramePool::GetGpuMemoryBufferId(*frame1);
// Clear frame references to return the frames to the pool.
frame1 = nullptr;
task_environment_.RunUntilIdle();
// Request frame with the same format. The pool should not request new frames.
- Initialize(fourcc.value());
+ ASSERT_TRUE(Initialize(fourcc.value()));
scoped_refptr<VideoFrame> frame2 = GetFrame(20);
- DmabufId id2 = DmabufVideoFramePool::GetDmabufId(*frame2);
+ gfx::GpuMemoryBufferId id2 =
+ PlatformVideoFramePool::GetGpuMemoryBufferId(*frame2);
EXPECT_EQ(id1, id2);
}
+TEST_P(PlatformVideoFramePoolTest, InitializeFail) {
+ const auto fourcc = Fourcc::FromVideoPixelFormat(GetParam());
+ ASSERT_TRUE(fourcc.has_value());
+ SetCreateFrameCB(base::BindRepeating(
+ [](gpu::GpuMemoryBufferFactory* factory, VideoPixelFormat format,
+ const gfx::Size& coded_size, const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size, base::TimeDelta timestamp) {
+ auto frame = scoped_refptr<VideoFrame>(nullptr);
+ return frame;
+ }));
+
+ EXPECT_FALSE(Initialize(fourcc.value()));
+}
+
// TODO(akahuang): Add a testcase to verify calling Initialize() only with
// different |max_num_frames|.
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
index 9e7994040b8..ce559f9a0c0 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
@@ -142,8 +142,7 @@ scoped_refptr<VideoFrame> CreatePlatformVideoFrame(
dmabuf_fds.emplace_back(plane.fd.release());
auto frame = VideoFrame::WrapExternalDmabufs(
- *layout, visible_rect, visible_rect.size(), std::move(dmabuf_fds),
- timestamp);
+ *layout, visible_rect, natural_size, std::move(dmabuf_fds), timestamp);
if (!frame)
return nullptr;
@@ -174,6 +173,11 @@ gfx::GpuMemoryBufferHandle CreateGpuMemoryBufferHandle(
switch (video_frame->storage_type()) {
case VideoFrame::STORAGE_GPU_MEMORY_BUFFER:
handle = video_frame->GetGpuMemoryBuffer()->CloneHandle();
+ // TODO(crbug.com/1097956): handle a failure gracefully.
+ CHECK_EQ(handle.type, gfx::NATIVE_PIXMAP)
+ << "The cloned handle has an unexpected type: " << handle.type;
+ CHECK(!handle.native_pixmap_handle.planes.empty())
+ << "The cloned handle has no planes";
break;
case VideoFrame::STORAGE_DMABUFS: {
const size_t num_planes = VideoFrame::NumPlanes(video_frame->format());
@@ -185,10 +189,8 @@ gfx::GpuMemoryBufferHandle CreateGpuMemoryBufferHandle(
while (num_planes != duped_fds.size()) {
int duped_fd = -1;
duped_fd = HANDLE_EINTR(dup(duped_fds.back().get()));
- if (duped_fd == -1) {
- DLOG(ERROR) << "Failed duplicating dmabuf fd";
- return handle;
- }
+ // TODO(crbug.com/1097956): handle a failure gracefully.
+ PCHECK(duped_fd >= 0) << "Failed duplicating a dma-buf fd";
duped_fds.emplace_back(duped_fd);
}
diff --git a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
index 9373a3d26d4..5dd05705614 100644
--- a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
+++ b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
@@ -10,6 +10,7 @@
#include "base/bind.h"
#include "base/location.h"
#include "base/macros.h"
+#include "media/base/media_util.h"
#include "media/base/video_color_space.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
@@ -137,7 +138,7 @@ bool VdVideoDecodeAccelerator::Initialize(const Config& config,
std::make_unique<VdaVideoFramePool>(weak_this_, client_task_runner_);
vd_ = create_vd_cb_.Run(client_task_runner_, std::move(frame_pool),
std::make_unique<VideoFrameConverter>(),
- nullptr /* gpu_memory_buffer_factory */);
+ std::make_unique<NullMediaLog>());
if (!vd_)
return false;
@@ -385,9 +386,7 @@ base::Optional<Picture> VdVideoDecodeAccelerator::GetPicture(
}
int32_t picture_buffer_id = it->second;
int32_t bitstream_id = FakeTimestampToBitstreamId(frame.timestamp());
- bool allow_overlay = false;
- ignore_result(frame.metadata()->GetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
- &allow_overlay));
+ bool allow_overlay = frame.metadata()->allow_overlay;
return base::make_optional(Picture(picture_buffer_id, bitstream_id,
frame.visible_rect(), frame.ColorSpace(),
allow_overlay));
diff --git a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.h b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.h
index 9b9481ca60a..ffdb43c8eb8 100644
--- a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.h
+++ b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.h
@@ -24,12 +24,9 @@
#include "media/gpu/media_gpu_export.h"
#include "media/video/video_decode_accelerator.h"
-namespace gpu {
-class GpuMemoryBufferFactory;
-} // namespace gpu
-
namespace media {
+class MediaLog;
class VideoFrame;
// Implements the VideoDecodeAccelerator backed by a VideoDecoder.
@@ -52,7 +49,7 @@ class MEDIA_GPU_EXPORT VdVideoDecodeAccelerator
scoped_refptr<base::SequencedTaskRunner>,
std::unique_ptr<DmabufVideoFramePool>,
std::unique_ptr<VideoFrameConverter>,
- gpu::GpuMemoryBufferFactory* const)>;
+ std::unique_ptr<MediaLog>)>;
// Create VdVideoDecodeAccelerator instance, and call Initialize().
// Return nullptr if Initialize() failed.
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
index 906861ba788..3759f9f994f 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
@@ -14,7 +14,9 @@
#include "base/task/task_traits.h"
#include "base/task/thread_pool.h"
#include "build/build_config.h"
+#include "media/base/async_destroy_video_decoder.h"
#include "media/base/limits.h"
+#include "media/base/media_log.h"
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/chromeos/image_processor.h"
#include "media/gpu/chromeos/image_processor_factory.h"
@@ -54,6 +56,14 @@ base::Optional<Fourcc> PickRenderableFourcc(
return base::nullopt;
}
+// Appends |new_status| to |parent_status| unless |parent_status| is kOk, in
+// that case we cannot append, just forward |new_status| then.
+Status AppendOrForwardStatus(Status parent_status, Status new_status) {
+ if (parent_status.is_ok())
+ return new_status;
+ return std::move(parent_status).AddCause(std::move(new_status));
+}
+
} // namespace
DecoderInterface::DecoderInterface(
@@ -68,38 +78,36 @@ std::unique_ptr<VideoDecoder> VideoDecoderPipeline::Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory,
- GetCreateVDFunctionsCB get_create_vd_functions_cb) {
+ std::unique_ptr<MediaLog> /*media_log*/,
+ GetCreateDecoderFunctionsCB get_create_decoder_functions_cb) {
if (!client_task_runner || !frame_pool || !frame_converter) {
VLOGF(1) << "One of arguments is nullptr.";
return nullptr;
}
- if (get_create_vd_functions_cb.Run(nullptr).empty()) {
+ if (get_create_decoder_functions_cb.Run().empty()) {
VLOGF(1) << "No available function to create video decoder.";
return nullptr;
}
- return base::WrapUnique<VideoDecoder>(new VideoDecoderPipeline(
+ auto* decoder = new VideoDecoderPipeline(
std::move(client_task_runner), std::move(frame_pool),
- std::move(frame_converter), gpu_memory_buffer_factory,
- std::move(get_create_vd_functions_cb)));
+ std::move(frame_converter), std::move(get_create_decoder_functions_cb));
+ return std::make_unique<AsyncDestroyVideoDecoder<VideoDecoderPipeline>>(
+ base::WrapUnique(decoder));
}
VideoDecoderPipeline::VideoDecoderPipeline(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory,
- GetCreateVDFunctionsCB get_create_vd_functions_cb)
+ GetCreateDecoderFunctionsCB get_create_decoder_functions_cb)
: client_task_runner_(std::move(client_task_runner)),
decoder_task_runner_(base::ThreadPool::CreateSingleThreadTaskRunner(
{base::WithBaseSyncPrimitives(), base::TaskPriority::USER_VISIBLE},
base::SingleThreadTaskRunnerThreadMode::DEDICATED)),
main_frame_pool_(std::move(frame_pool)),
- gpu_memory_buffer_factory_(gpu_memory_buffer_factory),
- frame_converter_(std::move(frame_converter)),
- get_create_vd_functions_cb_(std::move(get_create_vd_functions_cb)) {
+ frame_converter_(std::move(frame_converter)) {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
DETACH_FROM_SEQUENCE(decoder_sequence_checker_);
DCHECK(main_frame_pool_);
@@ -110,6 +118,8 @@ VideoDecoderPipeline::VideoDecoderPipeline(
client_weak_this_ = client_weak_this_factory_.GetWeakPtr();
decoder_weak_this_ = decoder_weak_this_factory_.GetWeakPtr();
+ remaining_create_decoder_functions_ = get_create_decoder_functions_cb.Run();
+
main_frame_pool_->set_parent_task_runner(decoder_task_runner_);
frame_converter_->Initialize(
decoder_task_runner_,
@@ -118,37 +128,30 @@ VideoDecoderPipeline::VideoDecoderPipeline(
}
VideoDecoderPipeline::~VideoDecoderPipeline() {
- // We have to destroy |main_frame_pool_| on |decoder_task_runner_|, so the
- // destructor is also called on |decoder_task_runner_|.
- DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
- DVLOGF(3);
-}
-
-void VideoDecoderPipeline::Destroy() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- DVLOGF(2);
-
- client_weak_this_factory_.InvalidateWeakPtrs();
-
- decoder_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoDecoderPipeline::DestroyTask, decoder_weak_this_));
-}
-
-void VideoDecoderPipeline::DestroyTask() {
+ // We have to destroy |main_frame_pool_| and |frame_converter_| on
+ // |decoder_task_runner_|, so the destructor must be called on
+ // |decoder_task_runner_|.
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
decoder_weak_this_factory_.InvalidateWeakPtrs();
- // The frame pool and converter should be destroyed on |decoder_task_runner_|.
main_frame_pool_.reset();
frame_converter_.reset();
decoder_.reset();
- used_create_vd_func_ = nullptr;
+ remaining_create_decoder_functions_.clear();
+}
+
+void VideoDecoderPipeline::DestroyAsync(
+ std::unique_ptr<VideoDecoderPipeline> decoder) {
+ DVLOGF(2);
+ DCHECK(decoder);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(decoder->client_sequence_checker_);
- delete this;
+ decoder->client_weak_this_factory_.InvalidateWeakPtrs();
+ auto* decoder_task_runner = decoder->decoder_task_runner_.get();
+ decoder_task_runner->DeleteSoon(FROM_HERE, std::move(decoder));
}
std::string VideoDecoderPipeline::GetDisplayName() const {
@@ -182,11 +185,11 @@ bool VideoDecoderPipeline::CanReadWithoutStalling() const {
}
void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
- bool low_delay,
+ bool /* low_delay */,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
- const WaitingCB& waiting_cb) {
+ const WaitingCB& /* waiting_cb */) {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
VLOGF(2) << "config: " << config.AsHumanReadableString();
@@ -217,79 +220,76 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
InitCB init_cb,
const OutputCB& output_cb) {
+ DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(!init_cb_);
client_output_cb_ = std::move(output_cb);
init_cb_ = std::move(init_cb);
- base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs =
- get_create_vd_functions_cb_.Run(used_create_vd_func_);
+ // Initialize() and correspondingly InitializeTask(), are called both on first
+ // initialization and on subsequent stream |config| changes, e.g. change of
+ // resolution. Subsequent initializations are marked by |decoder_| already
+ // existing.
if (!decoder_) {
- CreateAndInitializeVD(std::move(create_vd_funcs), config,
- StatusCode::kChromeOSVideoDecoderNoDecoders);
+ CreateAndInitializeVD(config, Status());
} else {
decoder_->Initialize(
config,
- // If it fails to re-initialize current |decoder_|, it will create
- // another decoder instance by trying available VD creation functions
- // again. See |OnInitializeDone| for detail.
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, std::move(create_vd_funcs), config,
- StatusCode::kChromeOSVideoDecoderNoDecoders),
+ decoder_weak_this_, config, Status()),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
decoder_weak_this_));
}
}
-void VideoDecoderPipeline::CreateAndInitializeVD(
- base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs,
- VideoDecoderConfig config,
- ::media::Status parent_error) {
+void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
+ Status parent_error) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(init_cb_);
DCHECK(!decoder_);
- DCHECK(!used_create_vd_func_);
DVLOGF(3);
- if (create_vd_funcs.empty()) {
- DVLOGF(2) << "No available video decoder.";
+ if (remaining_create_decoder_functions_.empty()) {
+ DVLOGF(2) << "No remaining video decoder create functions to try";
client_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(std::move(init_cb_), parent_error));
+ FROM_HERE,
+ base::BindOnce(
+ std::move(init_cb_),
+ AppendOrForwardStatus(
+ parent_error, StatusCode::kChromeOSVideoDecoderNoDecoders)));
return;
}
- used_create_vd_func_ = create_vd_funcs.front();
- create_vd_funcs.pop();
- decoder_ = used_create_vd_func_(decoder_task_runner_, decoder_weak_this_);
+ decoder_ = remaining_create_decoder_functions_.front()(decoder_task_runner_,
+ decoder_weak_this_);
+ remaining_create_decoder_functions_.pop_front();
+
if (!decoder_) {
- DVLOGF(2) << "Failed to create VideoDecoder.";
- used_create_vd_func_ = nullptr;
+ DVLOGF(2) << "|decoder_| creation failed, trying again with the next "
+ "available create function.";
return CreateAndInitializeVD(
- std::move(create_vd_funcs), config,
- std::move(parent_error).AddCause(StatusCode::kDecoderFailedCreation));
+ config, AppendOrForwardStatus(parent_error,
+ StatusCode::kDecoderFailedCreation));
}
decoder_->Initialize(
config,
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, std::move(create_vd_funcs), config,
- std::move(parent_error)),
+ decoder_weak_this_, config, std::move(parent_error)),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
decoder_weak_this_));
}
-void VideoDecoderPipeline::OnInitializeDone(
- base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs,
- VideoDecoderConfig config,
- ::media::Status parent_error,
- ::media::Status status) {
+void VideoDecoderPipeline::OnInitializeDone(VideoDecoderConfig config,
+ Status parent_error,
+ Status status) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(init_cb_);
DVLOGF(4) << "Initialization status = " << status.code();
if (status.is_ok()) {
- DVLOGF(2) << "Initialize VD successfully.";
+ DVLOGF(2) << "|decoder_| successfully initialized.";
// TODO(tmathmeyer) consider logging the causes of |parent_error| as they
// might have infor about why other decoders failed.
client_task_runner_->PostTask(
@@ -297,11 +297,11 @@ void VideoDecoderPipeline::OnInitializeDone(
return;
}
- DVLOGF(3) << "Reset VD, try the next create function.";
+ DVLOGF(3) << "|decoder_| initialization failed, trying again with the next "
+ "available create function.";
decoder_ = nullptr;
- used_create_vd_func_ = nullptr;
- CreateAndInitializeVD(std::move(create_vd_funcs), config,
- std::move(parent_error).AddCause(std::move(status)));
+ CreateAndInitializeVD(config,
+ AppendOrForwardStatus(parent_error, std::move(status)));
}
void VideoDecoderPipeline::Reset(base::OnceClosure closure) {
@@ -417,9 +417,9 @@ void VideoDecoderPipeline::OnFrameConverted(scoped_refptr<VideoFrame> frame) {
}
// Flag that the video frame is capable of being put in an overlay.
- frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
+ frame->metadata()->allow_overlay = true;
// Flag that the video frame was decoded in a power efficient way.
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+ frame->metadata()->power_efficient = true;
// MojoVideoDecoderService expects the |output_cb_| to be called on the client
// task runner, even though media::VideoDecoder states frames should be output
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.h b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
index 030ed9058e1..c0c6ac10c5f 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.h
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
@@ -8,7 +8,6 @@
#include <memory>
#include "base/callback_forward.h"
-#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/sequence_checker.h"
@@ -24,13 +23,10 @@ namespace base {
class SequencedTaskRunner;
}
-namespace gpu {
-class GpuMemoryBufferFactory;
-} // namespace gpu
-
namespace media {
class DmabufVideoFramePool;
+class MediaLog;
// An interface that defines methods to operate on video decoder components
// inside the VideoDecoderPipeline. The interface is similar to
@@ -42,7 +38,7 @@ class DmabufVideoFramePool;
// Note: All methods and callbacks should be called on the same sequence.
class MEDIA_GPU_EXPORT DecoderInterface {
public:
- using InitCB = base::OnceCallback<void(::media::Status status)>;
+ using InitCB = base::OnceCallback<void(Status status)>;
// TODO(crbug.com/998413): Replace VideoFrame to GpuMemoryBuffer-based
// instance.
using OutputCB = base::RepeatingCallback<void(scoped_refptr<VideoFrame>)>;
@@ -130,21 +126,22 @@ class MEDIA_GPU_EXPORT DecoderInterface {
class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
public DecoderInterface::Client {
public:
- // Function signature for creating VideoDecoder.
- using CreateVDFunc = std::unique_ptr<DecoderInterface> (*)(
+ using CreateDecoderFunction = std::unique_ptr<DecoderInterface> (*)(
scoped_refptr<base::SequencedTaskRunner>,
base::WeakPtr<DecoderInterface::Client>);
- using GetCreateVDFunctionsCB =
- base::RepeatingCallback<base::queue<CreateVDFunc>(CreateVDFunc)>;
+ using CreateDecoderFunctions = std::list<CreateDecoderFunction>;
+ using GetCreateDecoderFunctionsCB =
+ base::RepeatingCallback<CreateDecoderFunctions()>;
static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory,
- GetCreateVDFunctionsCB get_create_vd_functions_cb);
+ std::unique_ptr<MediaLog> media_log,
+ GetCreateDecoderFunctionsCB get_create_decoder_functions_cb);
~VideoDecoderPipeline() override;
+ static void DestroyAsync(std::unique_ptr<VideoDecoderPipeline>);
// VideoDecoder implementation
std::string GetDisplayName() const override;
@@ -152,7 +149,6 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
int GetMaxDecodeRequests() const override;
bool NeedsBitstreamConversion() const override;
bool CanReadWithoutStalling() const override;
-
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -173,19 +169,13 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
const gfx::Rect& visible_rect) override;
private:
- // Get a list of the available functions for creating VideoDeocoder except
- // |current_func| one.
- static base::queue<CreateVDFunc> GetCreateVDFunctions(
- CreateVDFunc current_func);
+ friend class VideoDecoderPipelineTest;
VideoDecoderPipeline(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter,
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory,
- GetCreateVDFunctionsCB get_create_vd_functions_cb);
- void Destroy() override;
- void DestroyTask();
+ GetCreateDecoderFunctionsCB get_create_decoder_functions_cb);
void InitializeTask(const VideoDecoderConfig& config,
InitCB init_cb,
@@ -193,13 +183,10 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
void ResetTask(base::OnceClosure closure);
void DecodeTask(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb);
- void CreateAndInitializeVD(base::queue<CreateVDFunc> create_vd_funcs,
- VideoDecoderConfig config,
- ::media::Status parent_error);
- void OnInitializeDone(base::queue<CreateVDFunc> create_vd_funcs,
- VideoDecoderConfig config,
- ::media::Status parent_error,
- ::media::Status success);
+ void CreateAndInitializeVD(VideoDecoderConfig config, Status parent_error);
+ void OnInitializeDone(VideoDecoderConfig config,
+ Status parent_error,
+ Status status);
void OnDecodeDone(bool eos_buffer, DecodeCB decode_cb, DecodeStatus status);
void OnResetDone();
@@ -241,10 +228,6 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// the client should be created using this pool.
// Used on |decoder_task_runner_|.
std::unique_ptr<DmabufVideoFramePool> main_frame_pool_;
- // Used to generate additional frame pools for intermediate results if
- // required. The instance is indirectly owned by GpuChildThread, therefore
- // alive as long as the GPU process is.
- gpu::GpuMemoryBufferFactory* const gpu_memory_buffer_factory_;
// The image processor is only created when the decoder cannot output frames
// with renderable format.
@@ -254,14 +237,14 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
// |client_task_runner_|.
std::unique_ptr<VideoFrameConverter> frame_converter_;
- // The callback to get a list of function for creating DecoderInterface.
- GetCreateVDFunctionsCB get_create_vd_functions_cb_;
-
// The current video decoder implementation. Valid after initialization is
// successfully done.
std::unique_ptr<DecoderInterface> decoder_;
- // The create function of |decoder_|. nullptr iff |decoder_| is nullptr.
- CreateVDFunc used_create_vd_func_ = nullptr;
+
+ // |remaining_create_decoder_functions_| holds all the potential video decoder
+ // creation functions. We try them all in the given order until one succeeds.
+ // Only used after initialization on |decoder_sequence_checker_|.
+ CreateDecoderFunctions remaining_create_decoder_functions_;
// Callback from the client. These callback are called on
// |client_task_runner_|.
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
new file mode 100644
index 00000000000..b95a52d7e64
--- /dev/null
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
@@ -0,0 +1,229 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/chromeos/video_decoder_pipeline.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/test/gmock_callback_support.h"
+#include "base/test/task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/media_util.h"
+#include "media/base/status.h"
+#include "media/base/video_decoder_config.h"
+#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
+#include "media/gpu/chromeos/mailbox_video_frame_converter.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using base::test::RunClosure;
+using ::testing::_;
+using ::testing::TestWithParam;
+
+namespace media {
+
+MATCHER_P(MatchesStatusCode, status_code, "") {
+ // media::Status doesn't provide an operator==(...), we add here a simple one.
+ return arg.code() == status_code;
+}
+
+class MockVideoFramePool : public DmabufVideoFramePool {
+ public:
+ MockVideoFramePool() = default;
+ ~MockVideoFramePool() override = default;
+
+ // DmabufVideoFramePool implementation.
+ MOCK_METHOD5(Initialize,
+ base::Optional<GpuBufferLayout>(const Fourcc&,
+ const gfx::Size&,
+ const gfx::Rect&,
+ const gfx::Size&,
+ size_t));
+ MOCK_METHOD0(GetFrame, scoped_refptr<VideoFrame>());
+ MOCK_METHOD0(IsExhausted, bool());
+ MOCK_METHOD1(NotifyWhenFrameAvailable, void(base::OnceClosure));
+};
+
+constexpr gfx::Size kCodedSize(48, 36);
+
+class MockDecoder : public DecoderInterface {
+ public:
+ MockDecoder()
+ : DecoderInterface(base::ThreadTaskRunnerHandle::Get(),
+ base::WeakPtr<DecoderInterface::Client>(nullptr)) {}
+ ~MockDecoder() override = default;
+
+ MOCK_METHOD3(Initialize,
+ void(const VideoDecoderConfig&, InitCB, const OutputCB&));
+ MOCK_METHOD2(Decode, void(scoped_refptr<DecoderBuffer>, DecodeCB));
+ MOCK_METHOD1(Reset, void(base::OnceClosure));
+ MOCK_METHOD0(ApplyResolutionChange, void());
+};
+
+struct DecoderPipelineTestParams {
+ VideoDecoderPipeline::CreateDecoderFunctions create_decoder_functions;
+ StatusCode status_code;
+};
+
+class VideoDecoderPipelineTest
+ : public testing::TestWithParam<DecoderPipelineTestParams> {
+ public:
+ VideoDecoderPipelineTest()
+ : config_(kCodecVP8,
+ VP8PROFILE_ANY,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(),
+ kNoTransformation,
+ kCodedSize,
+ gfx::Rect(kCodedSize),
+ kCodedSize,
+ EmptyExtraData(),
+ EncryptionScheme::kUnencrypted),
+ pool_(new MockVideoFramePool),
+ converter_(new VideoFrameConverter),
+ decoder_(new VideoDecoderPipeline(
+ base::ThreadTaskRunnerHandle::Get(),
+ std::move(pool_),
+ std::move(converter_),
+ base::BindRepeating([]() {
+ // This callback needs to be configured in the individual tests.
+ return VideoDecoderPipeline::CreateDecoderFunctions();
+ }))) {}
+ ~VideoDecoderPipelineTest() override = default;
+
+ void TearDown() override {
+ VideoDecoderPipeline::DestroyAsync(std::move(decoder_));
+ task_environment_.RunUntilIdle();
+ }
+ MOCK_METHOD1(OnInit, void(Status));
+ MOCK_METHOD1(OnOutput, void(scoped_refptr<VideoFrame>));
+
+ void SetCreateDecoderFunctions(
+ VideoDecoderPipeline::CreateDecoderFunctions functions) {
+ decoder_->remaining_create_decoder_functions_ = functions;
+ }
+
+ void InitializeDecoder() {
+ decoder_->Initialize(
+ config_, false /* low_delay */, nullptr /* cdm_context */,
+ base::BindOnce(&VideoDecoderPipelineTest::OnInit,
+ base::Unretained(this)),
+ base::BindRepeating(&VideoDecoderPipelineTest::OnOutput,
+ base::Unretained(this)),
+ base::DoNothing());
+ }
+
+ static std::unique_ptr<DecoderInterface> CreateNullMockDecoder(
+ scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
+ base::WeakPtr<DecoderInterface::Client> /* client */) {
+ return nullptr;
+ }
+
+ // Creates a MockDecoder with an EXPECT_CALL on Initialize that returns ok.
+ static std::unique_ptr<DecoderInterface> CreateGoodMockDecoder(
+ scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
+ base::WeakPtr<DecoderInterface::Client> /* client */) {
+ std::unique_ptr<MockDecoder> decoder(new MockDecoder());
+ EXPECT_CALL(*decoder, Initialize(_, _, _))
+ .WillOnce(::testing::WithArgs<1>([](VideoDecoder::InitCB init_cb) {
+ std::move(init_cb).Run(OkStatus());
+ }));
+ return std::move(decoder);
+ }
+
+ // Creates a MockDecoder with an EXPECT_CALL on Initialize that returns error.
+ static std::unique_ptr<DecoderInterface> CreateBadMockDecoder(
+ scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
+ base::WeakPtr<DecoderInterface::Client> /* client */) {
+ std::unique_ptr<MockDecoder> decoder(new MockDecoder());
+ EXPECT_CALL(*decoder, Initialize(_, _, _))
+ .WillOnce(::testing::WithArgs<1>([](VideoDecoder::InitCB init_cb) {
+ std::move(init_cb).Run(StatusCode::kDecoderFailedInitialization);
+ }));
+ return std::move(decoder);
+ }
+
+ DecoderInterface* GetUnderlyingDecoder() { return decoder_->decoder_.get(); }
+
+ base::test::TaskEnvironment task_environment_;
+ const VideoDecoderConfig config_;
+ DecoderInterface* underlying_decoder_ptr_ = nullptr;
+
+ std::unique_ptr<MockVideoFramePool> pool_;
+ std::unique_ptr<VideoFrameConverter> converter_;
+ std::unique_ptr<VideoDecoderPipeline> decoder_;
+};
+
+// Verifies the status code for several typical CreateDecoderFunctions cases.
+TEST_P(VideoDecoderPipelineTest, Initialize) {
+ SetCreateDecoderFunctions(GetParam().create_decoder_functions);
+
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ EXPECT_CALL(*this, OnInit(MatchesStatusCode(GetParam().status_code)))
+ .WillOnce(RunClosure(quit_closure));
+
+ InitializeDecoder();
+ run_loop.Run();
+
+ EXPECT_EQ(GetParam().status_code == StatusCode::kOk,
+ !!GetUnderlyingDecoder());
+}
+
+const struct DecoderPipelineTestParams kDecoderPipelineTestParams[] = {
+ // An empty set of CreateDecoderFunctions.
+ {{}, StatusCode::kChromeOSVideoDecoderNoDecoders},
+
+ // Just one CreateDecoderFunctions that fails to Create() (i.e. returns a
+ // null Decoder)
+ {{&VideoDecoderPipelineTest::CreateNullMockDecoder},
+ StatusCode::kDecoderFailedCreation},
+
+ // Just one CreateDecoderFunctions that works fine, i.e. Create()s and
+ // Initialize()s correctly.
+ {{&VideoDecoderPipelineTest::CreateGoodMockDecoder}, StatusCode::kOk},
+
+ // One CreateDecoderFunctions that Create()s ok but fails to Initialize()
+ // correctly
+ {{&VideoDecoderPipelineTest::CreateBadMockDecoder},
+ StatusCode::kDecoderFailedInitialization},
+
+ // Two CreateDecoderFunctions, one that fails to Create() (i.e. returns a
+ // null Decoder), and one that works. The first error StatusCode is lost
+ // because VideoDecoderPipeline::OnInitializeDone() throws it away.
+ {{&VideoDecoderPipelineTest::CreateNullMockDecoder,
+ &VideoDecoderPipelineTest::CreateGoodMockDecoder},
+ StatusCode::kOk},
+
+ // Two CreateDecoderFunctions, one that Create()s ok but fails to
+ // Initialize(), and one that works. The first error StatusCode is lost
+ // because VideoDecoderPipeline::OnInitializeDone() throws it away.
+ {{&VideoDecoderPipelineTest::CreateBadMockDecoder,
+ &VideoDecoderPipelineTest::CreateGoodMockDecoder},
+ StatusCode::kOk},
+
+ // Two CreateDecoderFunctions, one that fails to Create() (i.e. returns a
+ // null Decoder), and one that fails to Initialize(). The first error
+ // StatusCode is the only one we can check here: a Status object is created
+ // with a "primary" StatusCode, archiving subsequent ones in a private
+ // member.
+ {{&VideoDecoderPipelineTest::CreateNullMockDecoder,
+ &VideoDecoderPipelineTest::CreateBadMockDecoder},
+ StatusCode::kDecoderFailedCreation},
+ // Previous one in reverse order.
+ {{&VideoDecoderPipelineTest::CreateBadMockDecoder,
+ &VideoDecoderPipelineTest::CreateNullMockDecoder},
+ StatusCode::kDecoderFailedInitialization},
+
+ {{&VideoDecoderPipelineTest::CreateBadMockDecoder,
+ &VideoDecoderPipelineTest::CreateBadMockDecoder,
+ &VideoDecoderPipelineTest::CreateGoodMockDecoder},
+ StatusCode::kOk},
+};
+
+INSTANTIATE_TEST_SUITE_P(All,
+ VideoDecoderPipelineTest,
+ testing::ValuesIn(kDecoderPipelineTestParams));
+
+} // namespace media
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index 59ab81d16ba..93abea8c715 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
+#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "base/optional.h"
#include "base/stl_util.h"
@@ -567,6 +568,13 @@ bool H264Decoder::ModifyReferencePicList(const H264SliceHeader* slice_hdr,
DVLOG(1) << "Malformed stream, no pic num " << pic_num_lx;
return false;
}
+
+ if (ref_idx_lx > num_ref_idx_lX_active_minus1) {
+ DVLOG(1) << "Bounds mismatch: expected " << ref_idx_lx
+ << " <= " << num_ref_idx_lX_active_minus1;
+ return false;
+ }
+
ShiftRightAndInsert(ref_pic_listx, ref_idx_lx,
num_ref_idx_lX_active_minus1, pic);
ref_idx_lx++;
diff --git a/chromium/media/gpu/h264_dpb.cc b/chromium/media/gpu/h264_dpb.cc
index 02031457883..8ef3bafb255 100644
--- a/chromium/media/gpu/h264_dpb.cc
+++ b/chromium/media/gpu/h264_dpb.cc
@@ -7,6 +7,7 @@
#include <algorithm>
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/stl_util.h"
#include "media/gpu/h264_dpb.h"
diff --git a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
index 464c10626ea..bd818a3e178 100644
--- a/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
+++ b/chromium/media/gpu/ipc/client/gpu_video_decode_accelerator_host.cc
@@ -292,8 +292,8 @@ void GpuVideoDecodeAcceleratorHost::OnNotifyError(uint32_t error) {
// Client::NotifyError() may Destroy() |this|, so calling it needs to be the
// last thing done on this stack!
- VideoDecodeAccelerator::Client* client = nullptr;
- std::swap(client, client_);
+ VideoDecodeAccelerator::Client* client = client_;
+ client_ = nullptr;
client->NotifyError(static_cast<VideoDecodeAccelerator::Error>(error));
}
diff --git a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
index efe9f6031cf..77a7881304d 100644
--- a/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
+++ b/chromium/media/gpu/ipc/service/gpu_video_decode_accelerator.cc
@@ -121,9 +121,9 @@ class GpuVideoDecodeAccelerator::MessageFilter : public IPC::MessageFilter {
MessageFilter(GpuVideoDecodeAccelerator* owner, int32_t host_route_id)
: owner_(owner), host_route_id_(host_route_id) {}
- void OnChannelError() override { sender_ = NULL; }
+ void OnChannelError() override { sender_ = nullptr; }
- void OnChannelClosing() override { sender_ = NULL; }
+ void OnChannelClosing() override { sender_ = nullptr; }
void OnFilterAdded(IPC::Channel* channel) override { sender_ = channel; }
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
index c9963a4ff87..d6164bb3820 100644
--- a/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
@@ -218,15 +218,12 @@ class PictureBufferManagerImpl : public PictureBufferManager {
frame->set_color_space(picture.color_space());
- if (picture.allow_overlay())
- frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true);
- if (picture.read_lock_fences_enabled()) {
- frame->metadata()->SetBoolean(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED, true);
- }
+ frame->metadata()->allow_overlay = picture.allow_overlay();
+ frame->metadata()->read_lock_fences_enabled =
+ picture.read_lock_fences_enabled();
// TODO(sandersd): Provide an API for VDAs to control this.
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+ frame->metadata()->power_efficient = true;
return frame;
}
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
index 5dbc5214002..f925028013a 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -16,6 +16,7 @@
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_info.h"
#include "gpu/config/gpu_preferences.h"
+#include "media/base/async_destroy_video_decoder.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_log.h"
@@ -102,8 +103,7 @@ bool IsProfileSupported(
} // namespace
// static
-std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>>
-VdaVideoDecoder::Create(
+std::unique_ptr<VideoDecoder> VdaVideoDecoder::Create(
scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
std::unique_ptr<MediaLog> media_log,
@@ -111,21 +111,19 @@ VdaVideoDecoder::Create(
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
GetStubCB get_stub_cb) {
- // Constructed in a variable to avoid _CheckUniquePtr() PRESUBMIT.py regular
- // expressions, which do not understand custom deleters.
- // TODO(sandersd): Extend base::WrapUnique() to handle this.
- std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>> ptr(
- new VdaVideoDecoder(
- std::move(parent_task_runner), std::move(gpu_task_runner),
- std::move(media_log), target_color_space,
- base::BindOnce(&PictureBufferManager::Create),
- base::BindOnce(&CreateCommandBufferHelper, std::move(get_stub_cb)),
- base::BindRepeating(&CreateAndInitializeVda, gpu_preferences,
- gpu_workarounds),
- GpuVideoAcceleratorUtil::ConvertGpuToMediaDecodeCapabilities(
- GpuVideoDecodeAcceleratorFactory::GetDecoderCapabilities(
- gpu_preferences, gpu_workarounds))));
- return ptr;
+ auto* decoder = new VdaVideoDecoder(
+ std::move(parent_task_runner), std::move(gpu_task_runner),
+ std::move(media_log), target_color_space,
+ base::BindOnce(&PictureBufferManager::Create),
+ base::BindOnce(&CreateCommandBufferHelper, std::move(get_stub_cb)),
+ base::BindRepeating(&CreateAndInitializeVda, gpu_preferences,
+ gpu_workarounds),
+ GpuVideoAcceleratorUtil::ConvertGpuToMediaDecodeCapabilities(
+ GpuVideoDecodeAcceleratorFactory::GetDecoderCapabilities(
+ gpu_preferences, gpu_workarounds)));
+
+ return std::make_unique<AsyncDestroyVideoDecoder<VdaVideoDecoder>>(
+ base::WrapUnique(decoder));
}
VdaVideoDecoder::VdaVideoDecoder(
@@ -160,38 +158,40 @@ VdaVideoDecoder::VdaVideoDecoder(
gpu_weak_this_));
}
-void VdaVideoDecoder::Destroy() {
+void VdaVideoDecoder::DestroyAsync(std::unique_ptr<VdaVideoDecoder> decoder) {
DVLOG(1) << __func__;
- DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ DCHECK(decoder);
+ DCHECK(decoder->parent_task_runner_->BelongsToCurrentThread());
- // TODO(sandersd): The documentation says that Destroy() fires any pending
- // callbacks.
+ // TODO(sandersd): The documentation says that DestroyAsync() fires any
+ // pending callbacks.
// Prevent any more callbacks to this thread.
- parent_weak_this_factory_.InvalidateWeakPtrs();
+ decoder->parent_weak_this_factory_.InvalidateWeakPtrs();
// Pass ownership of the destruction process over to the GPU thread.
- gpu_task_runner_->PostTask(
+ auto* gpu_task_runner = decoder->gpu_task_runner_.get();
+ gpu_task_runner->PostTask(
FROM_HERE,
- base::BindOnce(&VdaVideoDecoder::DestroyOnGpuThread, gpu_weak_this_));
+ base::BindOnce(&VdaVideoDecoder::CleanupOnGpuThread, std::move(decoder)));
}
-void VdaVideoDecoder::DestroyOnGpuThread() {
+void VdaVideoDecoder::CleanupOnGpuThread(
+ std::unique_ptr<VdaVideoDecoder> decoder) {
DVLOG(2) << __func__;
- DCHECK(gpu_task_runner_->BelongsToCurrentThread());
+ DCHECK(decoder);
+ DCHECK(decoder->gpu_task_runner_->BelongsToCurrentThread());
// VDA destruction is likely to result in reentrant calls to
// NotifyEndOfBitstreamBuffer(). Invalidating |gpu_weak_vda_| ensures that we
// don't call back into |vda_| during its destruction.
- gpu_weak_vda_factory_ = nullptr;
- vda_ = nullptr;
- media_log_ = nullptr;
+ decoder->gpu_weak_vda_factory_ = nullptr;
+ decoder->vda_ = nullptr;
+ decoder->media_log_ = nullptr;
// Because |parent_weak_this_| was invalidated in Destroy(), picture buffer
// dismissals since then have been dropped on the floor.
- picture_buffer_manager_->DismissAllPictureBuffers();
-
- delete this;
+ decoder->picture_buffer_manager_->DismissAllPictureBuffers();
}
VdaVideoDecoder::~VdaVideoDecoder() {
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.h b/chromium/media/gpu/ipc/service/vda_video_decoder.h
index 07f475b7d43..72f5cf73ebb 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.h
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.h
@@ -64,14 +64,34 @@ class VdaVideoDecoder : public VideoDecoder,
// called on the GPU thread.
//
// See VdaVideoDecoder() for other arguments.
- static std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>>
- Create(scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
- scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
- std::unique_ptr<MediaLog> media_log,
- const gfx::ColorSpace& target_color_space,
- const gpu::GpuPreferences& gpu_preferences,
- const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
- GetStubCB get_stub_cb);
+ static std::unique_ptr<VideoDecoder> Create(
+ scoped_refptr<base::SingleThreadTaskRunner> parent_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ std::unique_ptr<MediaLog> media_log,
+ const gfx::ColorSpace& target_color_space,
+ const gpu::GpuPreferences& gpu_preferences,
+ const gpu::GpuDriverBugWorkarounds& gpu_workarounds,
+ GetStubCB get_stub_cb);
+
+ ~VdaVideoDecoder() override;
+ static void DestroyAsync(std::unique_ptr<VdaVideoDecoder>);
+
+ // media::VideoDecoder implementation.
+ std::string GetDisplayName() const override;
+ void Initialize(const VideoDecoderConfig& config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ InitCB init_cb,
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) override;
+ void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
+ void Reset(base::OnceClosure reset_cb) override;
+ bool NeedsBitstreamConversion() const override;
+ bool CanReadWithoutStalling() const override;
+ int GetMaxDecodeRequests() const override;
+
+ private:
+ friend class VdaVideoDecoderTest;
// |parent_task_runner|: Task runner that |this| should operate on. All
// methods must be called on |parent_task_runner| (should be the Mojo
@@ -95,30 +115,6 @@ class VdaVideoDecoder : public VideoDecoder,
CreateAndInitializeVdaCB create_and_initialize_vda_cb,
const VideoDecodeAccelerator::Capabilities& vda_capabilities);
- // media::VideoDecoder implementation.
- std::string GetDisplayName() const override;
- void Initialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb) override;
- void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
- void Reset(base::OnceClosure reset_cb) override;
- bool NeedsBitstreamConversion() const override;
- bool CanReadWithoutStalling() const override;
- int GetMaxDecodeRequests() const override;
-
- private:
- void Destroy() override;
-
- protected:
- // Owners should call Destroy(). This is automatic via
- // std::default_delete<media::VideoDecoder> when held by a
- // std::unique_ptr<media::VideoDecoder>.
- ~VdaVideoDecoder() override;
-
- private:
// media::VideoDecodeAccelerator::Client implementation.
void NotifyInitializationComplete(Status status) override;
void ProvidePictureBuffers(uint32_t requested_num_of_buffers,
@@ -134,7 +130,7 @@ class VdaVideoDecoder : public VideoDecoder,
void NotifyError(VideoDecodeAccelerator::Error error) override;
// Tasks and thread hopping.
- void DestroyOnGpuThread();
+ static void CleanupOnGpuThread(std::unique_ptr<VdaVideoDecoder>);
void InitializeOnGpuThread();
void ReinitializeOnGpuThread();
void InitializeDone(Status status);
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
index 0bbf1b38a24..6bff33cdc50 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -16,6 +16,7 @@
#include "base/threading/thread.h"
#include "base/time/time.h"
#include "gpu/command_buffer/common/sync_token.h"
+#include "media/base/async_destroy_video_decoder.h"
#include "media/base/decode_status.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_util.h"
@@ -97,7 +98,7 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
// In either case, vda_->Destroy() should be called once.
EXPECT_CALL(*vda_, Destroy());
- vdavd_.reset(new VdaVideoDecoder(
+ auto* vdavd = new VdaVideoDecoder(
parent_task_runner, gpu_task_runner, media_log_.Clone(),
gfx::ColorSpace(),
base::BindOnce(&VdaVideoDecoderTest::CreatePictureBufferManager,
@@ -106,8 +107,10 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
base::Unretained(this)),
base::BindRepeating(&VdaVideoDecoderTest::CreateAndInitializeVda,
base::Unretained(this)),
- GetCapabilities()));
- client_ = vdavd_.get();
+ GetCapabilities());
+ vdavd_ = std::make_unique<AsyncDestroyVideoDecoder<VdaVideoDecoder>>(
+ base::WrapUnique(vdavd));
+ client_ = vdavd;
}
~VdaVideoDecoderTest() override {
@@ -137,7 +140,7 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
}
void Initialize() {
- EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(true));
+ EXPECT_CALL(*vda_, Initialize(_, client_)).WillOnce(Return(true));
EXPECT_CALL(*vda_, TryToSetupDecodeOnSeparateThread(_, _))
.WillOnce(Return(GetParam()));
EXPECT_CALL(init_cb_, Run(IsOkStatus()));
@@ -304,7 +307,7 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
testing::StrictMock<MockVideoDecodeAccelerator>* vda_;
std::unique_ptr<VideoDecodeAccelerator> owned_vda_;
scoped_refptr<PictureBufferManager> pbm_;
- std::unique_ptr<VdaVideoDecoder, std::default_delete<VideoDecoder>> vdavd_;
+ std::unique_ptr<AsyncDestroyVideoDecoder<VdaVideoDecoder>> vdavd_;
VideoDecodeAccelerator::Client* client_;
uint64_t next_release_count_ = 1;
@@ -341,7 +344,7 @@ TEST_P(VdaVideoDecoderTest, Initialize_UnsupportedCodec) {
}
TEST_P(VdaVideoDecoderTest, Initialize_RejectedByVda) {
- EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(false));
+ EXPECT_CALL(*vda_, Initialize(_, client_)).WillOnce(Return(false));
InitializeWithConfig(VideoDecoderConfig(
kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace::REC709(), kNoTransformation, gfx::Size(1920, 1088),
@@ -423,7 +426,7 @@ TEST_P(VdaVideoDecoderTest, Decode_OutputAndDismiss) {
TEST_P(VdaVideoDecoderTest, Decode_Output_MaintainsAspect) {
// Initialize with a config that has a 2:1 pixel aspect ratio.
- EXPECT_CALL(*vda_, Initialize(_, vdavd_.get())).WillOnce(Return(true));
+ EXPECT_CALL(*vda_, Initialize(_, client_)).WillOnce(Return(true));
EXPECT_CALL(*vda_, TryToSetupDecodeOnSeparateThread(_, _))
.WillOnce(Return(GetParam()));
InitializeWithConfig(VideoDecoderConfig(
diff --git a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
index 84ba11e6f85..2fa16cecbe4 100644
--- a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
@@ -6,6 +6,7 @@
#include <memory>
+#include "base/logging.h"
#include "base/memory/shared_memory_mapping.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -267,11 +268,8 @@ void VTVideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
kVTEncodeFrameOptionKey_ForceKeyFrame,
force_keyframe ? kCFBooleanTrue : kCFBooleanFalse);
- base::TimeTicks ref_time;
- if (!frame->metadata()->GetTimeTicks(VideoFrameMetadata::REFERENCE_TIME,
- &ref_time)) {
- ref_time = base::TimeTicks::Now();
- }
+ base::TimeTicks ref_time =
+ frame->metadata()->reference_time.value_or(base::TimeTicks::Now());
auto timestamp_cm =
CMTimeMake(frame->timestamp().InMicroseconds(), USEC_PER_SEC);
// Wrap information we'll need after the frame is encoded in a heap object.
diff --git a/chromium/media/gpu/test/BUILD.gn b/chromium/media/gpu/test/BUILD.gn
index d1465599fa5..dde96cc47fe 100644
--- a/chromium/media/gpu/test/BUILD.gn
+++ b/chromium/media/gpu/test/BUILD.gn
@@ -69,10 +69,14 @@ source_set("test_helpers") {
"video_test_helpers.cc",
"video_test_helpers.h",
]
- public_deps = [ ":helpers" ]
+ public_deps = [
+ ":helpers",
+ "//media:test_support",
+ ]
deps = [
"//media/gpu",
"//testing/gtest",
+ "//third_party/libyuv",
]
if (use_ozone) {
deps += [ "//ui/ozone" ]
@@ -134,7 +138,14 @@ static_library("video_player_test_environment") {
static_library("video_encoder") {
testonly = true
sources = [
+ "bitstream_helpers.cc",
"bitstream_helpers.h",
+ "video_encoder/bitstream_file_writer.cc",
+ "video_encoder/bitstream_file_writer.h",
+ "video_encoder/bitstream_validator.cc",
+ "video_encoder/bitstream_validator.h",
+ "video_encoder/decoder_buffer_validator.cc",
+ "video_encoder/decoder_buffer_validator.h",
"video_encoder/video_encoder.cc",
"video_encoder/video_encoder.h",
"video_encoder/video_encoder_client.cc",
@@ -143,6 +154,7 @@ static_library("video_encoder") {
deps = [
":test_helpers",
"//media/gpu",
+ "//media/parsers",
"//testing/gtest:gtest",
]
}
diff --git a/chromium/media/gpu/v4l2/BUILD.gn b/chromium/media/gpu/v4l2/BUILD.gn
index 88b72e36308..38d0bb24ef5 100644
--- a/chromium/media/gpu/v4l2/BUILD.gn
+++ b/chromium/media/gpu/v4l2/BUILD.gn
@@ -41,16 +41,18 @@ source_set("v4l2") {
"v4l2_image_processor_backend.h",
"v4l2_slice_video_decode_accelerator.cc",
"v4l2_slice_video_decode_accelerator.h",
- "v4l2_slice_video_decoder.cc",
- "v4l2_slice_video_decoder.h",
"v4l2_stateful_workaround.cc",
"v4l2_stateful_workaround.h",
"v4l2_vda_helpers.cc",
"v4l2_vda_helpers.h",
"v4l2_video_decode_accelerator.cc",
"v4l2_video_decode_accelerator.h",
+ "v4l2_video_decoder.cc",
+ "v4l2_video_decoder.h",
"v4l2_video_decoder_backend.cc",
"v4l2_video_decoder_backend.h",
+ "v4l2_video_decoder_backend_stateful.cc",
+ "v4l2_video_decoder_backend_stateful.h",
"v4l2_video_decoder_backend_stateless.cc",
"v4l2_video_decoder_backend_stateless.h",
"v4l2_video_encode_accelerator.cc",
diff --git a/chromium/media/gpu/v4l2/v4l2_decode_surface.cc b/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
index d4593868a6c..77206ba81c1 100644
--- a/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
+++ b/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
@@ -127,7 +127,7 @@ bool V4L2ConfigStoreDecodeSurface::Submit() {
case V4L2_MEMORY_MMAP:
return std::move(output_buffer()).QueueMMap();
case V4L2_MEMORY_DMABUF:
- return std::move(output_buffer()).QueueDMABuf(video_frame()->DmabufFds());
+ return std::move(output_buffer()).QueueDMABuf(video_frame());
default:
NOTREACHED() << "We should only use MMAP or DMABUF.";
}
@@ -174,8 +174,7 @@ bool V4L2RequestDecodeSurface::Submit() {
result = std::move(output_buffer()).QueueMMap();
break;
case V4L2_MEMORY_DMABUF:
- result = std::move(output_buffer())
- .QueueDMABuf(video_frame()->DmabufFds());
+ result = std::move(output_buffer()).QueueDMABuf(video_frame());
break;
default:
NOTREACHED() << "We should only use MMAP or DMABUF.";
diff --git a/chromium/media/gpu/v4l2/v4l2_device.cc b/chromium/media/gpu/v4l2/v4l2_device.cc
index 9b81f8046f2..ba9b5184914 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device.cc
@@ -27,6 +27,7 @@
#include "media/base/color_plane_layout.h"
#include "media/base/video_types.h"
#include "media/gpu/chromeos/fourcc.h"
+#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/generic_v4l2_device.h"
#include "ui/gfx/native_pixmap_handle.h"
@@ -313,7 +314,7 @@ class V4L2BufferRefBase {
base::WeakPtr<V4L2Queue> queue);
~V4L2BufferRefBase();
- bool QueueBuffer();
+ bool QueueBuffer(scoped_refptr<VideoFrame> video_frame);
void* GetPlaneMapping(const size_t plane);
scoped_refptr<VideoFrame> GetVideoFrame();
@@ -368,13 +369,13 @@ V4L2BufferRefBase::~V4L2BufferRefBase() {
return_to_->ReturnBuffer(BufferId());
}
-bool V4L2BufferRefBase::QueueBuffer() {
+bool V4L2BufferRefBase::QueueBuffer(scoped_refptr<VideoFrame> video_frame) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!queue_)
return false;
- queued = queue_->QueueBuffer(&v4l2_buffer_);
+ queued = queue_->QueueBuffer(&v4l2_buffer_, std::move(video_frame));
return queued;
}
@@ -484,14 +485,15 @@ enum v4l2_memory V4L2WritableBufferRef::Memory() const {
return static_cast<enum v4l2_memory>(buffer_data_->v4l2_buffer_.memory);
}
-bool V4L2WritableBufferRef::DoQueue(V4L2RequestRef* request_ref) && {
+bool V4L2WritableBufferRef::DoQueue(V4L2RequestRef* request_ref,
+ scoped_refptr<VideoFrame> video_frame) && {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer_data_);
if (request_ref && buffer_data_->queue_->SupportsRequests())
request_ref->ApplyQueueBuffer(&(buffer_data_->v4l2_buffer_));
- bool queued = buffer_data_->QueueBuffer();
+ bool queued = buffer_data_->QueueBuffer(std::move(video_frame));
// Clear our own reference.
buffer_data_.reset();
@@ -512,7 +514,7 @@ bool V4L2WritableBufferRef::QueueMMap(
return false;
}
- return std::move(self).DoQueue(request_ref);
+ return std::move(self).DoQueue(request_ref, nullptr);
}
bool V4L2WritableBufferRef::QueueUserPtr(
@@ -539,7 +541,7 @@ bool V4L2WritableBufferRef::QueueUserPtr(
self.buffer_data_->v4l2_buffer_.m.planes[i].m.userptr =
reinterpret_cast<unsigned long>(ptrs[i]);
- return std::move(self).DoQueue(request_ref);
+ return std::move(self).DoQueue(request_ref, nullptr);
}
bool V4L2WritableBufferRef::QueueDMABuf(
@@ -563,7 +565,52 @@ bool V4L2WritableBufferRef::QueueDMABuf(
for (size_t i = 0; i < num_planes; i++)
self.buffer_data_->v4l2_buffer_.m.planes[i].m.fd = fds[i].get();
- return std::move(self).DoQueue(request_ref);
+ return std::move(self).DoQueue(request_ref, nullptr);
+}
+
+bool V4L2WritableBufferRef::QueueDMABuf(scoped_refptr<VideoFrame> video_frame,
+ V4L2RequestRef* request_ref) && {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(buffer_data_);
+
+ // Move ourselves so our data gets freed no matter when we return
+ V4L2WritableBufferRef self(std::move(*this));
+
+ if (self.Memory() != V4L2_MEMORY_DMABUF) {
+ VLOGF(1) << "Called on invalid buffer type!";
+ return false;
+ }
+
+ // TODO(andrescj): consider replacing this by a DCHECK.
+ if (video_frame->storage_type() != VideoFrame::STORAGE_GPU_MEMORY_BUFFER &&
+ video_frame->storage_type() != VideoFrame::STORAGE_DMABUFS) {
+ VLOGF(1) << "Only GpuMemoryBuffer and dma-buf VideoFrames are supported";
+ return false;
+ }
+
+ // The FDs duped by CreateGpuMemoryBufferHandle() will be closed after the
+ // call to DoQueue() which uses the VIDIOC_QBUF ioctl and so ends up
+ // increasing the reference count of the dma-buf. Thus, closing the FDs is
+ // safe.
+ // TODO(andrescj): for dma-buf VideoFrames, duping the FDs is unnecessary.
+ // Consider handling that path separately.
+ gfx::GpuMemoryBufferHandle gmb_handle =
+ CreateGpuMemoryBufferHandle(video_frame.get());
+ if (gmb_handle.type != gfx::GpuMemoryBufferType::NATIVE_PIXMAP) {
+ VLOGF(1) << "Failed to create GpuMemoryBufferHandle for frame!";
+ return false;
+ }
+ const std::vector<gfx::NativePixmapPlane>& planes =
+ gmb_handle.native_pixmap_handle.planes;
+
+ if (!self.buffer_data_->CheckNumFDsForFormat(planes.size()))
+ return false;
+
+ size_t num_planes = self.PlanesCount();
+ for (size_t i = 0; i < num_planes; i++)
+ self.buffer_data_->v4l2_buffer_.m.planes[i].m.fd = planes[i].fd.get();
+
+ return std::move(self).DoQueue(request_ref, std::move(video_frame));
}
bool V4L2WritableBufferRef::QueueDMABuf(
@@ -587,7 +634,7 @@ bool V4L2WritableBufferRef::QueueDMABuf(
for (size_t i = 0; i < num_planes; i++)
self.buffer_data_->v4l2_buffer_.m.planes[i].m.fd = planes[i].fd.get();
- return std::move(self).DoQueue(request_ref);
+ return std::move(self).DoQueue(request_ref, nullptr);
}
size_t V4L2WritableBufferRef::PlanesCount() const {
@@ -709,14 +756,20 @@ void V4L2WritableBufferRef::SetConfigStore(uint32_t config_store) {
}
V4L2ReadableBuffer::V4L2ReadableBuffer(const struct v4l2_buffer& v4l2_buffer,
- base::WeakPtr<V4L2Queue> queue)
+ base::WeakPtr<V4L2Queue> queue,
+ scoped_refptr<VideoFrame> video_frame)
: buffer_data_(
- std::make_unique<V4L2BufferRefBase>(v4l2_buffer, std::move(queue))) {
+ std::make_unique<V4L2BufferRefBase>(v4l2_buffer, std::move(queue))),
+ video_frame_(std::move(video_frame)) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
scoped_refptr<VideoFrame> V4L2ReadableBuffer::GetVideoFrame() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(buffer_data_);
+
+ if (buffer_data_->v4l2_buffer_.memory == V4L2_MEMORY_DMABUF && video_frame_)
+ return video_frame_;
return buffer_data_->GetVideoFrame();
}
@@ -806,8 +859,10 @@ class V4L2BufferRefFactory {
static V4L2ReadableBufferRef CreateReadableRef(
const struct v4l2_buffer& v4l2_buffer,
- base::WeakPtr<V4L2Queue> queue) {
- return new V4L2ReadableBuffer(v4l2_buffer, std::move(queue));
+ base::WeakPtr<V4L2Queue> queue,
+ scoped_refptr<VideoFrame> video_frame) {
+ return new V4L2ReadableBuffer(v4l2_buffer, std::move(queue),
+ std::move(video_frame));
}
};
@@ -1070,7 +1125,8 @@ base::Optional<V4L2WritableBufferRef> V4L2Queue::GetFreeBuffer() {
weak_this_factory_.GetWeakPtr());
}
-bool V4L2Queue::QueueBuffer(struct v4l2_buffer* v4l2_buffer) {
+bool V4L2Queue::QueueBuffer(struct v4l2_buffer* v4l2_buffer,
+ scoped_refptr<VideoFrame> video_frame) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
int ret = device_->Ioctl(VIDIOC_QBUF, v4l2_buffer);
@@ -1079,7 +1135,8 @@ bool V4L2Queue::QueueBuffer(struct v4l2_buffer* v4l2_buffer) {
return false;
}
- auto inserted = queued_buffers_.emplace(v4l2_buffer->index);
+ auto inserted =
+ queued_buffers_.emplace(v4l2_buffer->index, std::move(video_frame));
DCHECK_EQ(inserted.second, true);
device_->SchedulePoll();
@@ -1127,15 +1184,16 @@ std::pair<bool, V4L2ReadableBufferRef> V4L2Queue::DequeueBuffer() {
auto it = queued_buffers_.find(v4l2_buffer.index);
DCHECK(it != queued_buffers_.end());
- queued_buffers_.erase(*it);
+ scoped_refptr<VideoFrame> queued_frame = std::move(it->second);
+ queued_buffers_.erase(it);
if (QueuedBuffersCount() > 0)
device_->SchedulePoll();
DCHECK(free_buffers_);
- return std::make_pair(true,
- V4L2BufferRefFactory::CreateReadableRef(
- v4l2_buffer, weak_this_factory_.GetWeakPtr()));
+ return std::make_pair(true, V4L2BufferRefFactory::CreateReadableRef(
+ v4l2_buffer, weak_this_factory_.GetWeakPtr(),
+ std::move(queued_frame)));
}
bool V4L2Queue::IsStreaming() const {
@@ -1176,9 +1234,9 @@ bool V4L2Queue::Streamoff() {
return false;
}
- for (const auto& buffer_id : queued_buffers_) {
+ for (const auto& it : queued_buffers_) {
DCHECK(free_buffers_);
- free_buffers_->ReturnBuffer(buffer_id);
+ free_buffers_->ReturnBuffer(it.first);
}
queued_buffers_.clear();
@@ -1332,6 +1390,10 @@ VideoCodecProfile V4L2Device::V4L2ProfileToVideoCodecProfile(VideoCodec codec,
return H264PROFILE_EXTENDED;
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
return H264PROFILE_HIGH;
+ case V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH:
+ return H264PROFILE_STEREOHIGH;
+ case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
+ return H264PROFILE_MULTIVIEWHIGH;
}
break;
case kCodecVP8:
diff --git a/chromium/media/gpu/v4l2/v4l2_device.h b/chromium/media/gpu/v4l2/v4l2_device.h
index 310d4a4a1a5..bdd8585aacd 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.h
+++ b/chromium/media/gpu/v4l2/v4l2_device.h
@@ -129,6 +129,15 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
// list.
bool QueueDMABuf(const std::vector<gfx::NativePixmapPlane>& planes,
V4L2RequestRef* request_ref = nullptr) &&;
+ // Queue a |video_frame| using its file descriptors as DMABUFs. The VideoFrame
+ // must have been constructed from its file descriptors.
+ // The particularity of this method is that a reference to |video_frame| is
+ // kept and made available again when the buffer is dequeued through
+ // |V4L2ReadableBufferRef::GetVideoFrame()|. |video_frame| is thus guaranteed
+ // to be alive until either all the |V4L2ReadableBufferRef| from the dequeued
+ // buffer get out of scope, or |V4L2Queue::Streamoff()| is called.
+ bool QueueDMABuf(scoped_refptr<VideoFrame> video_frame,
+ V4L2RequestRef* request_ref = nullptr) &&;
// Returns the number of planes in this buffer.
size_t PlanesCount() const;
@@ -180,7 +189,8 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
// filled.
// When requests are supported, a |request_ref| can be passed along this
// the buffer to be submitted.
- bool DoQueue(V4L2RequestRef* request_ref) &&;
+ bool DoQueue(V4L2RequestRef* request_ref,
+ scoped_refptr<VideoFrame> video_frame) &&;
V4L2WritableBufferRef(const struct v4l2_buffer& v4l2_buffer,
base::WeakPtr<V4L2Queue> queue);
@@ -245,9 +255,14 @@ class MEDIA_GPU_EXPORT V4L2ReadableBuffer
~V4L2ReadableBuffer();
V4L2ReadableBuffer(const struct v4l2_buffer& v4l2_buffer,
- base::WeakPtr<V4L2Queue> queue);
+ base::WeakPtr<V4L2Queue> queue,
+ scoped_refptr<VideoFrame> video_frame);
std::unique_ptr<V4L2BufferRefBase> buffer_data_;
+ // If this buffer was a DMABUF buffer queued with
+ // QueueDMABuf(scoped_refptr<VideoFrame>), then this will hold the VideoFrame
+ // that has been passed at the time of queueing.
+ scoped_refptr<VideoFrame> video_frame_;
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(V4L2ReadableBuffer);
@@ -386,7 +401,8 @@ class MEDIA_GPU_EXPORT V4L2Queue
~V4L2Queue();
// Called when clients request a buffer to be queued.
- bool QueueBuffer(struct v4l2_buffer* v4l2_buffer);
+ bool QueueBuffer(struct v4l2_buffer* v4l2_buffer,
+ scoped_refptr<VideoFrame> video_frame);
const enum v4l2_buf_type type_;
enum v4l2_memory memory_ = V4L2_MEMORY_MMAP;
@@ -402,8 +418,10 @@ class MEDIA_GPU_EXPORT V4L2Queue
// Buffers that are available for client to get and submit.
// Buffers in this list are not referenced by anyone else than ourselves.
scoped_refptr<V4L2BuffersList> free_buffers_;
- // Buffers that have been queued by the client, and not dequeued yet.
- std::set<size_t> queued_buffers_;
+ // Buffers that have been queued by the client, and not dequeued yet. The
+ // value will be set to the VideoFrame that has been passed when we queued
+ // the buffer, if any.
+ std::map<size_t, scoped_refptr<VideoFrame>> queued_buffers_;
scoped_refptr<V4L2Device> device_;
// Callback to call in this queue's destructor.
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
index 6498537e426..2a062f8b1d4 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
@@ -126,11 +126,13 @@ V4L2ImageProcessorBackend::V4L2ImageProcessorBackend(
v4l2_memory input_memory_type,
v4l2_memory output_memory_type,
OutputMode output_mode,
+ VideoRotation relative_rotation,
size_t num_buffers,
ErrorCB error_cb)
: ImageProcessorBackend(input_config,
output_config,
output_mode,
+ relative_rotation,
std::move(error_cb),
std::move(backend_task_runner)),
input_memory_type_(input_memory_type),
@@ -228,12 +230,13 @@ std::unique_ptr<ImageProcessorBackend> V4L2ImageProcessorBackend::Create(
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
for (const auto& output_mode : preferred_output_modes) {
auto image_processor = V4L2ImageProcessorBackend::CreateWithOutputMode(
- device, num_buffers, input_config, output_config, output_mode, error_cb,
- backend_task_runner);
+ device, num_buffers, input_config, output_config, output_mode,
+ relative_rotation, error_cb, backend_task_runner);
if (image_processor)
return image_processor;
}
@@ -249,6 +252,7 @@ V4L2ImageProcessorBackend::CreateWithOutputMode(
const PortConfig& input_config,
const PortConfig& output_config,
const OutputMode& output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
VLOGF(2);
@@ -308,6 +312,12 @@ V4L2ImageProcessorBackend::CreateWithOutputMode(
return nullptr;
}
+ // V4L2IP now doesn't support rotation case, so return nullptr.
+ if (relative_rotation != VIDEO_ROTATION_0) {
+ VLOGF(1) << "Currently V4L2IP doesn't support rotation";
+ return nullptr;
+ }
+
if (!device->Open(V4L2Device::Type::kImageProcessor,
input_config.fourcc.ToV4L2PixFmt())) {
VLOGF(1) << "Failed to open device with input fourcc: "
@@ -390,8 +400,8 @@ V4L2ImageProcessorBackend::CreateWithOutputMode(
PortConfig(output_config.fourcc, negotiated_output_size,
output_planes, output_config.visible_rect,
{output_storage_type}),
- input_memory_type, output_memory_type, output_mode, num_buffers,
- std::move(error_cb)));
+ input_memory_type, output_memory_type, output_mode, relative_rotation,
+ num_buffers, std::move(error_cb)));
// Initialize at |backend_task_runner_|.
bool success = false;
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
index bd1c78ac4e9..4652bda62b7 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.h
@@ -49,6 +49,7 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessorBackend
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
@@ -104,6 +105,7 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessorBackend
const PortConfig& input_config,
const PortConfig& output_config,
const OutputMode& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
@@ -115,6 +117,7 @@ class MEDIA_GPU_EXPORT V4L2ImageProcessorBackend
v4l2_memory input_memory_type,
v4l2_memory output_memory_type,
OutputMode output_mode,
+ VideoRotation relative_rotation,
size_t num_buffers,
ErrorCB error_cb);
~V4L2ImageProcessorBackend() override;
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index dd2c2e853eb..594081c44f8 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -592,8 +592,8 @@ bool V4L2SliceVideoDecodeAccelerator::CreateImageProcessor() {
image_processor_ = v4l2_vda_helpers::CreateImageProcessor(
*output_format_fourcc_, *gl_image_format_fourcc_, coded_size_,
gl_image_size_, GetRectSizeFromOrigin(decoder_->GetVisibleRect()),
- output_buffer_map_.size(), image_processor_device_,
- image_processor_output_mode,
+ VideoFrame::StorageType::STORAGE_DMABUFS, output_buffer_map_.size(),
+ image_processor_device_, image_processor_output_mode,
// Unretained(this) is safe for ErrorCB because |decoder_thread_| is owned
// by this V4L2VideoDecodeAccelerator and |this| must be valid when
// ErrorCB is executed.
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
index f520d93be0f..558b694af86 100644
--- a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -71,6 +71,7 @@ std::unique_ptr<ImageProcessor> CreateImageProcessor(
const gfx::Size& vda_output_coded_size,
const gfx::Size& ip_output_coded_size,
const gfx::Size& visible_size,
+ VideoFrame::StorageType output_storage_type,
size_t nb_buffers,
scoped_refptr<V4L2Device> image_processor_device,
ImageProcessor::OutputMode image_processor_output_mode,
@@ -86,8 +87,8 @@ std::unique_ptr<ImageProcessor> CreateImageProcessor(
{VideoFrame::STORAGE_DMABUFS}),
ImageProcessor::PortConfig(ip_output_format, ip_output_coded_size, {},
gfx::Rect(visible_size),
- {VideoFrame::STORAGE_DMABUFS}),
- {image_processor_output_mode}, std::move(error_cb),
+ {output_storage_type}),
+ {image_processor_output_mode}, VIDEO_ROTATION_0, std::move(error_cb),
std::move(client_task_runner));
if (!image_processor)
return nullptr;
@@ -174,6 +175,8 @@ bool InputBufferFragmentSplitter::IsPartialFramePending() const {
H264InputBufferFragmentSplitter::H264InputBufferFragmentSplitter()
: h264_parser_(new H264Parser()) {}
+H264InputBufferFragmentSplitter::~H264InputBufferFragmentSplitter() = default;
+
bool H264InputBufferFragmentSplitter::AdvanceFrameFragment(const uint8_t* data,
size_t size,
size_t* endpos) {
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.h b/chromium/media/gpu/v4l2/v4l2_vda_helpers.h
index b0c780cd734..05b74a3205d 100644
--- a/chromium/media/gpu/v4l2/v4l2_vda_helpers.h
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.h
@@ -41,6 +41,7 @@ base::Optional<Fourcc> FindImageProcessorOutputFormat(V4L2Device* ip_device);
// |ip_output_coded_size| is the coded size of the output buffers that the IP
// must produce.
// |visible_size| is the visible size of both the input and output buffers.
+// |output_storage_type| indicates what type of VideoFrame is used for output.
// |nb_buffers| is the exact number of output buffers that the IP must create.
// |image_processor_output_mode| specifies whether the IP must allocate its
// own buffers or rely on imported ones.
@@ -53,6 +54,7 @@ std::unique_ptr<ImageProcessor> CreateImageProcessor(
const gfx::Size& vda_output_coded_size,
const gfx::Size& ip_output_coded_size,
const gfx::Size& visible_size,
+ VideoFrame::StorageType output_storage_type,
size_t nb_buffers,
scoped_refptr<V4L2Device> image_processor_device,
ImageProcessor::OutputMode image_processor_output_mode,
@@ -97,6 +99,7 @@ class InputBufferFragmentSplitter {
class H264InputBufferFragmentSplitter : public InputBufferFragmentSplitter {
public:
explicit H264InputBufferFragmentSplitter();
+ ~H264InputBufferFragmentSplitter() override;
bool AdvanceFrameFragment(const uint8_t* data,
size_t size,
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index 4a581cab841..e844687937b 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -1523,8 +1523,7 @@ bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord(
ret = std::move(buffer).QueueMMap();
break;
case V4L2_MEMORY_DMABUF:
- ret = std::move(buffer).QueueDMABuf(
- output_record.output_frame->DmabufFds());
+ ret = std::move(buffer).QueueDMABuf(output_record.output_frame);
break;
default:
NOTREACHED();
@@ -1880,6 +1879,10 @@ bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
+ cancelable_service_device_task_.Reset(base::BindRepeating(
+ &V4L2VideoDecodeAccelerator::ServiceDeviceTask, base::Unretained(this)));
+ cancelable_service_device_task_callback_ =
+ cancelable_service_device_task_.callback();
device_poll_thread_.task_runner()->PostTask(
FROM_HERE, base::BindOnce(&V4L2VideoDecodeAccelerator::DevicePollTask,
base::Unretained(this), 0));
@@ -1901,6 +1904,10 @@ bool V4L2VideoDecodeAccelerator::StopDevicePoll() {
return false;
}
device_poll_thread_.Stop();
+ // Must be done after the Stop() above to ensure
+ // |cancelable_service_device_task_callback_| is not copied.
+ cancelable_service_device_task_.Cancel();
+ cancelable_service_device_task_callback_ = {};
// Clear the interrupt now, to be sure.
if (!device_->ClearDevicePollInterrupt()) {
PLOG(ERROR) << "ClearDevicePollInterrupt: failed";
@@ -2027,8 +2034,8 @@ void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
// All processing should happen on ServiceDeviceTask(), since we shouldn't
// touch decoder state from this thread.
decoder_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&V4L2VideoDecodeAccelerator::ServiceDeviceTask,
- base::Unretained(this), event_pending));
+ FROM_HERE,
+ base::BindOnce(cancelable_service_device_task_callback_, event_pending));
}
bool V4L2VideoDecodeAccelerator::IsDestroyPending() {
@@ -2314,9 +2321,9 @@ bool V4L2VideoDecodeAccelerator::CreateImageProcessor() {
image_processor_ = v4l2_vda_helpers::CreateImageProcessor(
*output_format_fourcc_, *egl_image_format_fourcc_, coded_size_,
- egl_image_size_, visible_size_, output_buffer_map_.size(),
- image_processor_device_, image_processor_output_mode,
- decoder_thread_.task_runner(),
+ egl_image_size_, visible_size_, VideoFrame::StorageType::STORAGE_DMABUFS,
+ output_buffer_map_.size(), image_processor_device_,
+ image_processor_output_mode, decoder_thread_.task_runner(),
// Unretained(this) is safe for ErrorCB because |decoder_thread_| is owned
// by this V4L2VideoDecodeAccelerator and |this| must be valid when
// ErrorCB is executed.
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
index e4d27c1284b..96a23510f18 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
@@ -20,6 +20,7 @@
#include <vector>
#include "base/callback_forward.h"
+#include "base/cancelable_callback.h"
#include "base/containers/queue.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -465,6 +466,15 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// Decoder state machine state.
State decoder_state_;
+ // Cancelable callback for running ServiceDeviceTask(). Must only be accessed
+ // on |decoder_thread_|.
+ base::CancelableRepeatingCallback<void(bool)> cancelable_service_device_task_;
+ // Concrete callback from |cancelable_service_device_task_| that can be copied
+ // on |device_poll_thread_|. This exists because
+ // CancelableRepeatingCallback::callback() creates a WeakPtr internally, which
+ // must be created/destroyed from the same thread.
+ base::RepeatingCallback<void(bool)> cancelable_service_device_task_callback_;
+
// Waitable event signaled when the decoder is destroying.
base::WaitableEvent destroy_pending_;
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
index 28e1b3b7e4a..4c747eb86f3 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/gpu/v4l2/v4l2_slice_video_decoder.h"
+#include "media/gpu/v4l2/v4l2_video_decoder.h"
#include <algorithm>
@@ -17,6 +17,7 @@
#include "media/gpu/chromeos/fourcc.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/macros.h"
+#include "media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h"
#include "media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h"
namespace media {
@@ -33,15 +34,14 @@ constexpr size_t kNumInputBuffers = 16;
// Input format V4L2 fourccs this class supports.
constexpr uint32_t kSupportedInputFourccs[] = {
- V4L2_PIX_FMT_H264_SLICE,
- V4L2_PIX_FMT_VP8_FRAME,
- V4L2_PIX_FMT_VP9_FRAME,
+ V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, V4L2_PIX_FMT_VP9_FRAME,
+ V4L2_PIX_FMT_H264, V4L2_PIX_FMT_VP8, V4L2_PIX_FMT_VP9,
};
} // namespace
// static
-std::unique_ptr<DecoderInterface> V4L2SliceVideoDecoder::Create(
+std::unique_ptr<DecoderInterface> V4L2VideoDecoder::Create(
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<DecoderInterface::Client> client) {
DCHECK(decoder_task_runner->RunsTasksInCurrentSequence());
@@ -53,12 +53,12 @@ std::unique_ptr<DecoderInterface> V4L2SliceVideoDecoder::Create(
return nullptr;
}
- return base::WrapUnique<DecoderInterface>(new V4L2SliceVideoDecoder(
+ return base::WrapUnique<DecoderInterface>(new V4L2VideoDecoder(
std::move(decoder_task_runner), std::move(client), std::move(device)));
}
// static
-SupportedVideoDecoderConfigs V4L2SliceVideoDecoder::GetSupportedConfigs() {
+SupportedVideoDecoderConfigs V4L2VideoDecoder::GetSupportedConfigs() {
scoped_refptr<V4L2Device> device = V4L2Device::Create();
if (!device)
return SupportedVideoDecoderConfigs();
@@ -69,7 +69,7 @@ SupportedVideoDecoderConfigs V4L2SliceVideoDecoder::GetSupportedConfigs() {
false);
}
-V4L2SliceVideoDecoder::V4L2SliceVideoDecoder(
+V4L2VideoDecoder::V4L2VideoDecoder(
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<DecoderInterface::Client> client,
scoped_refptr<V4L2Device> device)
@@ -82,7 +82,7 @@ V4L2SliceVideoDecoder::V4L2SliceVideoDecoder(
weak_this_ = weak_this_factory_.GetWeakPtr();
}
-V4L2SliceVideoDecoder::~V4L2SliceVideoDecoder() {
+V4L2VideoDecoder::~V4L2VideoDecoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(2);
@@ -93,7 +93,7 @@ V4L2SliceVideoDecoder::~V4L2SliceVideoDecoder() {
}
// Stop and Destroy device.
- StopStreamV4L2Queue();
+ StopStreamV4L2Queue(true);
if (input_queue_) {
input_queue_->DeallocateBuffers();
input_queue_ = nullptr;
@@ -106,9 +106,9 @@ V4L2SliceVideoDecoder::~V4L2SliceVideoDecoder() {
weak_this_factory_.InvalidateWeakPtrs();
}
-void V4L2SliceVideoDecoder::Initialize(const VideoDecoderConfig& config,
- InitCB init_cb,
- const OutputCB& output_cb) {
+void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
+ InitCB init_cb,
+ const OutputCB& output_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(config.IsValidConfig());
DCHECK(state_ == State::kUninitialized || state_ == State::kDecoding);
@@ -116,7 +116,7 @@ void V4L2SliceVideoDecoder::Initialize(const VideoDecoderConfig& config,
// Reset V4L2 device and queue if reinitializing decoder.
if (state_ != State::kUninitialized) {
- if (!StopStreamV4L2Queue()) {
+ if (!StopStreamV4L2Queue(true)) {
std::move(init_cb).Run(StatusCode::kV4l2FailedToStopStreamQueue);
return;
}
@@ -141,12 +141,33 @@ void V4L2SliceVideoDecoder::Initialize(const VideoDecoderConfig& config,
// Open V4L2 device.
VideoCodecProfile profile = config.profile();
- uint32_t input_format_fourcc =
+ uint32_t input_format_fourcc_stateless =
V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, true);
- if (!input_format_fourcc ||
- !device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc)) {
+ if (!input_format_fourcc_stateless ||
+ !device_->Open(V4L2Device::Type::kDecoder,
+ input_format_fourcc_stateless)) {
VLOGF(1) << "Failed to open device for profile: " << profile
- << " fourcc: " << FourccToString(input_format_fourcc);
+ << " fourcc: " << FourccToString(input_format_fourcc_stateless);
+ input_format_fourcc_stateless = 0;
+ } else {
+ VLOGF(1) << "Found V4L2 device capable of stateless decoding for "
+ << FourccToString(input_format_fourcc_stateless);
+ }
+
+ uint32_t input_format_fourcc_stateful =
+ V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, false);
+ if (!input_format_fourcc_stateful ||
+ !device_->Open(V4L2Device::Type::kDecoder,
+ input_format_fourcc_stateful)) {
+ VLOGF(1) << "Failed to open device for profile: " << profile
+ << " fourcc: " << FourccToString(input_format_fourcc_stateful);
+ input_format_fourcc_stateful = 0;
+ } else {
+ VLOGF(1) << "Found V4L2 device capable of stateful decoding for "
+ << FourccToString(input_format_fourcc_stateful);
+ }
+
+ if (!input_format_fourcc_stateless && !input_format_fourcc_stateful) {
std::move(init_cb).Run(StatusCode::kV4l2NoDecoder);
return;
}
@@ -172,10 +193,23 @@ void V4L2SliceVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- // Create the backend (only stateless API supported as of now).
- backend_ = std::make_unique<V4L2StatelessVideoDecoderBackend>(
- this, device_, profile, decoder_task_runner_);
+ uint32_t input_format_fourcc;
+ if (input_format_fourcc_stateful) {
+ backend_ = std::make_unique<V4L2StatefulVideoDecoderBackend>(
+ this, device_, profile, decoder_task_runner_);
+ input_format_fourcc = input_format_fourcc_stateful;
+ } else if (input_format_fourcc_stateless) {
+ backend_ = std::make_unique<V4L2StatelessVideoDecoderBackend>(
+ this, device_, profile, decoder_task_runner_);
+ input_format_fourcc = input_format_fourcc_stateless;
+ } else {
+ VLOGF(1) << "No backend capable of taking this profile.";
+ std::move(init_cb).Run(StatusCode::kV4l2FailedResourceAllocation);
+ return;
+ }
+
if (!backend_->Initialize()) {
+ VLOGF(1) << "Failed to initialize backend.";
std::move(init_cb).Run(StatusCode::kV4l2FailedResourceAllocation);
return;
}
@@ -193,13 +227,21 @@ void V4L2SliceVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
+ // Start streaming input queue and polling. This is required for the stateful
+ // decoder, and doesn't hurt for the stateless one.
+ if (!StartStreamV4L2Queue(false)) {
+ VLOGF(1) << "Failed to start streaming.";
+ std::move(init_cb).Run(StatusCode::kV4L2FailedToStartStreamQueue);
+ return;
+ }
+
// Call init_cb
output_cb_ = output_cb;
SetState(State::kDecoding);
std::move(init_cb).Run(::media::OkStatus());
}
-bool V4L2SliceVideoDecoder::SetupInputFormat(uint32_t input_format_fourcc) {
+bool V4L2VideoDecoder::SetupInputFormat(uint32_t input_format_fourcc) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK_EQ(state_, State::kUninitialized);
@@ -232,8 +274,8 @@ bool V4L2SliceVideoDecoder::SetupInputFormat(uint32_t input_format_fourcc) {
return true;
}
-bool V4L2SliceVideoDecoder::SetupOutputFormat(const gfx::Size& size,
- const gfx::Rect& visible_rect) {
+bool V4L2VideoDecoder::SetupOutputFormat(const gfx::Size& size,
+ const gfx::Rect& visible_rect) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3) << "size: " << size.ToString()
<< ", visible_rect: " << visible_rect.ToString();
@@ -307,7 +349,7 @@ bool V4L2SliceVideoDecoder::SetupOutputFormat(const gfx::Size& size,
return true;
}
-void V4L2SliceVideoDecoder::Reset(base::OnceClosure closure) {
+void V4L2VideoDecoder::Reset(base::OnceClosure closure) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
@@ -321,12 +363,13 @@ void V4L2SliceVideoDecoder::Reset(base::OnceClosure closure) {
// Streamoff V4L2 queues to drop input and output buffers.
// If the queues are streaming before reset, then we need to start streaming
// them after stopping.
- bool is_streaming = input_queue_->IsStreaming();
- if (!StopStreamV4L2Queue())
+ const bool is_input_streaming = input_queue_->IsStreaming();
+ const bool is_output_streaming = output_queue_->IsStreaming();
+ if (!StopStreamV4L2Queue(true))
return;
- if (is_streaming) {
- if (!StartStreamV4L2Queue())
+ if (is_input_streaming) {
+ if (!StartStreamV4L2Queue(is_output_streaming))
return;
}
@@ -337,8 +380,8 @@ void V4L2SliceVideoDecoder::Reset(base::OnceClosure closure) {
std::move(closure).Run();
}
-void V4L2SliceVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
- DecodeCB decode_cb) {
+void V4L2VideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
+ DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK_NE(state_, State::kUninitialized);
@@ -352,20 +395,20 @@ void V4L2SliceVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
bitstream_id);
}
-bool V4L2SliceVideoDecoder::StartStreamV4L2Queue() {
+bool V4L2VideoDecoder::StartStreamV4L2Queue(bool start_output_queue) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
- if (!input_queue_->Streamon() || !output_queue_->Streamon()) {
+ if (!input_queue_->Streamon() ||
+ (start_output_queue && !output_queue_->Streamon())) {
VLOGF(1) << "Failed to streamon V4L2 queue.";
SetState(State::kError);
return false;
}
if (!device_->StartPolling(
- base::BindRepeating(&V4L2SliceVideoDecoder::ServiceDeviceTask,
- weak_this_),
- base::BindRepeating(&V4L2SliceVideoDecoder::SetState, weak_this_,
+ base::BindRepeating(&V4L2VideoDecoder::ServiceDeviceTask, weak_this_),
+ base::BindRepeating(&V4L2VideoDecoder::SetState, weak_this_,
State::kError))) {
SetState(State::kError);
return false;
@@ -374,7 +417,7 @@ bool V4L2SliceVideoDecoder::StartStreamV4L2Queue() {
return true;
}
-bool V4L2SliceVideoDecoder::StopStreamV4L2Queue() {
+bool V4L2VideoDecoder::StopStreamV4L2Queue(bool stop_input_queue) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
@@ -384,48 +427,48 @@ bool V4L2SliceVideoDecoder::StopStreamV4L2Queue() {
}
// Streamoff input and output queue.
- if (input_queue_)
+ if (input_queue_ && stop_input_queue)
input_queue_->Streamoff();
if (output_queue_)
output_queue_->Streamoff();
if (backend_)
- backend_->OnStreamStopped();
+ backend_->OnStreamStopped(stop_input_queue);
return true;
}
-void V4L2SliceVideoDecoder::InitiateFlush() {
+void V4L2VideoDecoder::InitiateFlush() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
SetState(State::kFlushing);
}
-void V4L2SliceVideoDecoder::CompleteFlush() {
+void V4L2VideoDecoder::CompleteFlush() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
SetState(State::kDecoding);
}
-void V4L2SliceVideoDecoder::ChangeResolution(gfx::Size pic_size,
- gfx::Rect visible_rect,
- size_t num_output_frames) {
+void V4L2VideoDecoder::ChangeResolution(gfx::Size pic_size,
+ gfx::Rect visible_rect,
+ size_t num_output_frames) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
DCHECK(!continue_change_resolution_cb_);
// After the pipeline flushes all frames, we can start changing resolution.
continue_change_resolution_cb_ =
- base::BindOnce(&V4L2SliceVideoDecoder::ContinueChangeResolution,
- weak_this_, pic_size, visible_rect, num_output_frames);
+ base::BindOnce(&V4L2VideoDecoder::ContinueChangeResolution, weak_this_,
+ pic_size, visible_rect, num_output_frames);
DCHECK(client_);
client_->PrepareChangeResolution();
}
-void V4L2SliceVideoDecoder::ApplyResolutionChange() {
+void V4L2VideoDecoder::ApplyResolutionChange() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
DCHECK(continue_change_resolution_cb_);
@@ -433,13 +476,12 @@ void V4L2SliceVideoDecoder::ApplyResolutionChange() {
std::move(continue_change_resolution_cb_).Run();
}
-void V4L2SliceVideoDecoder::ContinueChangeResolution(
+void V4L2VideoDecoder::ContinueChangeResolution(
const gfx::Size& pic_size,
const gfx::Rect& visible_rect,
const size_t num_output_frames) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
- DCHECK_EQ(input_queue_->QueuedBuffersCount(), 0u);
DCHECK_EQ(output_queue_->QueuedBuffersCount(), 0u);
// If we already reset, then skip it.
@@ -455,7 +497,9 @@ void V4L2SliceVideoDecoder::ContinueChangeResolution(
num_output_frames_ = num_output_frames;
- if (!StopStreamV4L2Queue())
+ // Stateful decoders require the input queue to keep running during resolution
+ // changes, but stateless ones require it to be stopped.
+ if (!StopStreamV4L2Queue(backend_->StopInputQueueOnResChange()))
return;
if (!output_queue_->DeallocateBuffers()) {
@@ -488,7 +532,7 @@ void V4L2SliceVideoDecoder::ContinueChangeResolution(
return;
}
- if (!StartStreamV4L2Queue()) {
+ if (!StartStreamV4L2Queue(true)) {
SetState(State::kError);
return;
}
@@ -500,7 +544,7 @@ void V4L2SliceVideoDecoder::ContinueChangeResolution(
base::Unretained(backend_.get()), true));
}
-void V4L2SliceVideoDecoder::ServiceDeviceTask(bool /* event */) {
+void V4L2VideoDecoder::ServiceDeviceTask(bool event) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3) << "Number of queued input buffers: "
<< input_queue_->QueuedBuffersCount()
@@ -509,8 +553,9 @@ void V4L2SliceVideoDecoder::ServiceDeviceTask(bool /* event */) {
// Dequeue V4L2 output buffer first to reduce output latency.
bool success;
- V4L2ReadableBufferRef dequeued_buffer;
while (output_queue_->QueuedBuffersCount() > 0) {
+ V4L2ReadableBufferRef dequeued_buffer;
+
std::tie(success, dequeued_buffer) = output_queue_->DequeueBuffer();
if (!success) {
SetState(State::kError);
@@ -524,6 +569,8 @@ void V4L2SliceVideoDecoder::ServiceDeviceTask(bool /* event */) {
// Dequeue V4L2 input buffer.
while (input_queue_->QueuedBuffersCount() > 0) {
+ V4L2ReadableBufferRef dequeued_buffer;
+
std::tie(success, dequeued_buffer) = input_queue_->DequeueBuffer();
if (!success) {
SetState(State::kError);
@@ -532,13 +579,15 @@ void V4L2SliceVideoDecoder::ServiceDeviceTask(bool /* event */) {
if (!dequeued_buffer)
break;
}
+
+ backend_->OnServiceDeviceTask(event);
}
-void V4L2SliceVideoDecoder::OutputFrame(scoped_refptr<VideoFrame> frame,
- const gfx::Rect& visible_rect,
- base::TimeDelta timestamp) {
+void V4L2VideoDecoder::OutputFrame(scoped_refptr<VideoFrame> frame,
+ const gfx::Rect& visible_rect,
+ base::TimeDelta timestamp) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
- DVLOGF(4) << "timestamp: " << timestamp;
+ DVLOGF(4) << "timestamp: " << timestamp.InMilliseconds() << " msec";
// Set the timestamp at which the decode operation started on the
// |frame|. If the frame has been outputted before (e.g. because of VP9
@@ -561,14 +610,14 @@ void V4L2SliceVideoDecoder::OutputFrame(scoped_refptr<VideoFrame> frame,
output_cb_.Run(std::move(frame));
}
-DmabufVideoFramePool* V4L2SliceVideoDecoder::GetVideoFramePool() const {
+DmabufVideoFramePool* V4L2VideoDecoder::GetVideoFramePool() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(4);
return client_->GetVideoFramePool();
}
-void V4L2SliceVideoDecoder::SetState(State new_state) {
+void V4L2VideoDecoder::SetState(State new_state) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3) << "Change state from " << static_cast<int>(state_) << " to "
<< static_cast<int>(new_state);
@@ -613,14 +662,14 @@ void V4L2SliceVideoDecoder::SetState(State new_state) {
return;
}
-void V4L2SliceVideoDecoder::OnBackendError() {
+void V4L2VideoDecoder::OnBackendError() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(2);
SetState(State::kError);
}
-bool V4L2SliceVideoDecoder::IsDecoding() const {
+bool V4L2VideoDecoder::IsDecoding() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DVLOGF(3);
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
index d5b82bbf824..b046b17dbd7 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_GPU_V4L2_V4L2_SLICE_VIDEO_DECODER_H_
-#define MEDIA_GPU_V4L2_V4L2_SLICE_VIDEO_DECODER_H_
+#ifndef MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_H_
+#define MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_H_
#include <linux/videodev2.h>
@@ -36,12 +36,12 @@ namespace media {
class DmabufVideoFramePool;
-class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder
+class MEDIA_GPU_EXPORT V4L2VideoDecoder
: public DecoderInterface,
public V4L2VideoDecoderBackend::Client {
public:
- // Create V4L2SliceVideoDecoder instance. The success of the creation doesn't
- // ensure V4L2SliceVideoDecoder is available on the device. It will be
+ // Create V4L2VideoDecoder instance. The success of the creation doesn't
+ // ensure V4L2VideoDecoder is available on the device. It will be
// determined in Initialize().
static std::unique_ptr<DecoderInterface> Create(
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
@@ -71,13 +71,12 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder
DmabufVideoFramePool* GetVideoFramePool() const override;
private:
- friend class V4L2SliceVideoDecoderTest;
+ friend class V4L2VideoDecoderTest;
- V4L2SliceVideoDecoder(
- scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
- base::WeakPtr<DecoderInterface::Client> client,
- scoped_refptr<V4L2Device> device);
- ~V4L2SliceVideoDecoder() override;
+ V4L2VideoDecoder(scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ base::WeakPtr<DecoderInterface::Client> client,
+ scoped_refptr<V4L2Device> device);
+ ~V4L2VideoDecoder() override;
enum class State {
// Initial state. Transitions to |kDecoding| if Initialize() is successful,
@@ -116,12 +115,12 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder
// in VideoFramePool. Return true if the setup is successful.
bool SetupOutputFormat(const gfx::Size& size, const gfx::Rect& visible_rect);
- // Start streaming V4L2 input and output queues. Attempt to start
- // |device_poll_thread_| before starting streaming.
- bool StartStreamV4L2Queue();
- // Stop streaming V4L2 input and output queues. Stop |device_poll_thread_|
- // before stopping streaming.
- bool StopStreamV4L2Queue();
+ // Start streaming V4L2 input and (if |start_output_queue| is true) output
+ // queues. Attempt to start |device_poll_thread_| after streaming starts.
+ bool StartStreamV4L2Queue(bool start_output_queue);
+ // Stop streaming V4L2 output and (if |stop_input_queue| is true) input
+ // queues. Stop |device_poll_thread_| before stopping streaming.
+ bool StopStreamV4L2Queue(bool stop_input_queue);
// Try to dequeue input and output buffers from device.
void ServiceDeviceTask(bool event);
@@ -167,10 +166,10 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder
// |weak_this_| must be dereferenced and invalidated on
// |decoder_task_runner_|.
- base::WeakPtr<V4L2SliceVideoDecoder> weak_this_;
- base::WeakPtrFactory<V4L2SliceVideoDecoder> weak_this_factory_;
+ base::WeakPtr<V4L2VideoDecoder> weak_this_;
+ base::WeakPtrFactory<V4L2VideoDecoder> weak_this_factory_;
};
} // namespace media
-#endif // MEDIA_GPU_V4L2_V4L2_SLICE_VIDEO_DECODER_H_
+#endif // MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
index 093df178bb5..3c49de8f8dd 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend.h
@@ -72,9 +72,13 @@ class V4L2VideoDecoderBackend {
int32_t bitstream_id) = 0;
// Called by the decoder when it has dequeued a buffer from the CAPTURE queue.
virtual void OnOutputBufferDequeued(V4L2ReadableBufferRef buf) = 0;
- // Called whenever the V4L2 stream is stopped (|Streamoff| called on both
- // |V4L2Queue|s).
- virtual void OnStreamStopped() = 0;
+ // Backend can overload this method if it needs to do specific work when
+ // the device task is called.
+ virtual void OnServiceDeviceTask(bool event) {}
+ // Called whenever the V4L2 stream is stopped (|Streamoff| called on either
+ // the CAPTURE queue alone or on both queues). |input_queue_stopped| is
+ // true if the input queue has been requested to stop.
+ virtual void OnStreamStopped(bool input_queue_stopped) = 0;
// Called when the resolution has been decided, in case the backend needs
// to do something specific beyond applying these parameters to the CAPTURE
// queue.
@@ -88,6 +92,12 @@ class V4L2VideoDecoderBackend {
// with |status| as argument.
virtual void ClearPendingRequests(DecodeStatus status) = 0;
+ // Whether we should stop the input queue when changing resolution. Stateless
+ // decoders require this, but stateful ones need the input queue to keep
+ // running. Although not super elegant, this is required to express that
+ // difference.
+ virtual bool StopInputQueueOnResChange() const = 0;
+
protected:
V4L2VideoDecoderBackend(Client* const client,
scoped_refptr<V4L2Device> device);
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
new file mode 100644
index 00000000000..417598f893c
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
@@ -0,0 +1,608 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h"
+#include <cstddef>
+
+#include <memory>
+#include <tuple>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/callback_forward.h"
+#include "base/logging.h"
+#include "base/optional.h"
+#include "base/sequence_checker.h"
+#include "base/sequenced_task_runner.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/v4l2/v4l2_device.h"
+#include "media/gpu/v4l2/v4l2_vda_helpers.h"
+#include "media/gpu/v4l2/v4l2_video_decoder_backend.h"
+
+namespace media {
+
+V4L2StatefulVideoDecoderBackend::DecodeRequest::DecodeRequest(
+ scoped_refptr<DecoderBuffer> buf,
+ VideoDecoder::DecodeCB cb,
+ int32_t id)
+ : buffer(std::move(buf)), decode_cb(std::move(cb)), bitstream_id(id) {}
+
+V4L2StatefulVideoDecoderBackend::DecodeRequest::DecodeRequest(DecodeRequest&&) =
+ default;
+V4L2StatefulVideoDecoderBackend::DecodeRequest&
+V4L2StatefulVideoDecoderBackend::DecodeRequest::operator=(DecodeRequest&&) =
+ default;
+
+V4L2StatefulVideoDecoderBackend::DecodeRequest::~DecodeRequest() = default;
+
+bool V4L2StatefulVideoDecoderBackend::DecodeRequest::IsCompleted() const {
+ return bytes_used == buffer->data_size();
+}
+
+V4L2StatefulVideoDecoderBackend::V4L2StatefulVideoDecoderBackend(
+ Client* const client,
+ scoped_refptr<V4L2Device> device,
+ VideoCodecProfile profile,
+ scoped_refptr<base::SequencedTaskRunner> task_runner)
+ : V4L2VideoDecoderBackend(client, std::move(device)),
+ profile_(profile),
+ task_runner_(task_runner) {
+ DVLOGF(3);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ weak_this_ = weak_this_factory_.GetWeakPtr();
+}
+
+V4L2StatefulVideoDecoderBackend::~V4L2StatefulVideoDecoderBackend() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ if (flush_cb_ || current_decode_request_ || !decode_request_queue_.empty()) {
+ VLOGF(1) << "Should not destroy backend during pending decode!";
+ }
+
+ struct v4l2_event_subscription sub;
+ memset(&sub, 0, sizeof(sub));
+ sub.type = V4L2_EVENT_SOURCE_CHANGE;
+ if (device_->Ioctl(VIDIOC_UNSUBSCRIBE_EVENT, &sub) != 0) {
+ VLOGF(1) << "Cannot unsubscribe to event";
+ }
+}
+
+bool V4L2StatefulVideoDecoderBackend::Initialize() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ if (!IsSupportedProfile(profile_)) {
+ VLOGF(1) << "Unsupported profile " << GetProfileName(profile_);
+ return false;
+ }
+
+ frame_splitter_ =
+ v4l2_vda_helpers::InputBufferFragmentSplitter::CreateFromProfile(
+ profile_);
+ if (!frame_splitter_) {
+ VLOGF(1) << "Failed to create frame splitter";
+ return false;
+ }
+
+ struct v4l2_event_subscription sub;
+ memset(&sub, 0, sizeof(sub));
+ sub.type = V4L2_EVENT_SOURCE_CHANGE;
+ if (device_->Ioctl(VIDIOC_SUBSCRIBE_EVENT, &sub) != 0) {
+ VLOGF(1) << "Cannot subscribe to event";
+ return false;
+ }
+
+ return true;
+}
+
+void V4L2StatefulVideoDecoderBackend::EnqueueDecodeTask(
+ scoped_refptr<DecoderBuffer> buffer,
+ VideoDecoder::DecodeCB decode_cb,
+ int32_t bitstream_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ decode_request_queue_.push(
+ DecodeRequest(std::move(buffer), std::move(decode_cb), bitstream_id));
+
+ DoDecodeWork();
+}
+
+void V4L2StatefulVideoDecoderBackend::DoDecodeWork() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // Do not decode if a flush is in progress.
+ // This may actually be ok to do if we are changing resolution?
+ if (flush_cb_)
+ return;
+
+ // Get a new decode request if none is in progress.
+ if (!current_decode_request_) {
+ // No more decode request, nothing to do for now.
+ if (decode_request_queue_.empty())
+ return;
+
+ auto decode_request = std::move(decode_request_queue_.front());
+ decode_request_queue_.pop();
+
+ // Need to flush?
+ if (decode_request.buffer->end_of_stream()) {
+ InitiateFlush(std::move(decode_request.decode_cb));
+ return;
+ }
+
+ // This is our new decode request.
+ current_decode_request_ = std::move(decode_request);
+ DCHECK_EQ(current_decode_request_->bytes_used, 0u);
+ }
+
+ // Get a V4L2 buffer to copy the encoded data into.
+ if (!current_input_buffer_) {
+ current_input_buffer_ = input_queue_->GetFreeBuffer();
+ // We will be called again once an input buffer becomes available.
+ if (!current_input_buffer_)
+ return;
+
+ // Record timestamp of the input buffer so it propagates to the decoded
+ // frames.
+ const struct timespec timespec =
+ current_decode_request_->buffer->timestamp().ToTimeSpec();
+ struct timeval timestamp = {
+ .tv_sec = timespec.tv_sec,
+ .tv_usec = timespec.tv_nsec / 1000,
+ };
+ current_input_buffer_->SetTimeStamp(timestamp);
+ }
+
+ // From here on we have both a decode request and input buffer, so we can
+ // progress with decoding.
+ DCHECK(current_decode_request_.has_value());
+ DCHECK(current_input_buffer_.has_value());
+
+ const DecoderBuffer* current_buffer = current_decode_request_->buffer.get();
+ DCHECK_LT(current_decode_request_->bytes_used, current_buffer->data_size());
+ const uint8_t* const data =
+ current_buffer->data() + current_decode_request_->bytes_used;
+ const size_t data_size =
+ current_buffer->data_size() - current_decode_request_->bytes_used;
+ size_t bytes_to_copy = 0;
+
+ if (!frame_splitter_->AdvanceFrameFragment(data, data_size, &bytes_to_copy)) {
+ VLOGF(1) << "Invalid H.264 stream detected.";
+ std::move(current_decode_request_->decode_cb)
+ .Run(DecodeStatus::DECODE_ERROR);
+ current_decode_request_.reset();
+ current_input_buffer_.reset();
+ client_->OnBackendError();
+ return;
+ }
+
+ const size_t bytes_used = current_input_buffer_->GetPlaneBytesUsed(0);
+ if (bytes_used + bytes_to_copy > current_input_buffer_->GetPlaneSize(0)) {
+ VLOGF(1) << "V4L2 buffer size is too small to contain a whole frame.";
+ std::move(current_decode_request_->decode_cb)
+ .Run(DecodeStatus::DECODE_ERROR);
+ current_decode_request_.reset();
+ current_input_buffer_.reset();
+ client_->OnBackendError();
+ return;
+ }
+
+ uint8_t* dst =
+ static_cast<uint8_t*>(current_input_buffer_->GetPlaneMapping(0)) +
+ bytes_used;
+ memcpy(dst, data, bytes_to_copy);
+ current_input_buffer_->SetPlaneBytesUsed(0, bytes_used + bytes_to_copy);
+ current_decode_request_->bytes_used += bytes_to_copy;
+
+ // Release current_input_request_ if we reached its end.
+ if (current_decode_request_->IsCompleted()) {
+ std::move(current_decode_request_->decode_cb).Run(DecodeStatus::OK);
+ current_decode_request_.reset();
+ }
+
+ // If we have a partial frame, wait before submitting it.
+ if (frame_splitter_->IsPartialFramePending()) {
+ VLOGF(4) << "Partial frame pending, not queueing any buffer now.";
+ return;
+ }
+
+ // The V4L2 input buffer contains a decodable entity, queue it.
+ std::move(*current_input_buffer_).QueueMMap();
+ current_input_buffer_.reset();
+
+ // If we can still progress on a decode request, do it.
+ if (current_decode_request_ || !decode_request_queue_.empty())
+ ScheduleDecodeWork();
+}
+
+void V4L2StatefulVideoDecoderBackend::ScheduleDecodeWork() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&V4L2StatefulVideoDecoderBackend::DoDecodeWork,
+ weak_this_));
+}
+
+void V4L2StatefulVideoDecoderBackend::OnServiceDeviceTask(bool event) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ if (event) {
+ while (base::Optional<struct v4l2_event> ev = device_->DequeueEvent()) {
+ if (ev->type == V4L2_EVENT_SOURCE_CHANGE &&
+ (ev->u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION)) {
+ ChangeResolution();
+ }
+ }
+ }
+
+ // We can enqueue dequeued output buffers immediately.
+ EnqueueOutputBuffers();
+
+ // Try to progress on our work since we may have dequeued input buffers.
+ DoDecodeWork();
+}
+
+void V4L2StatefulVideoDecoderBackend::EnqueueOutputBuffers() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+ const v4l2_memory mem_type = output_queue_->GetMemoryType();
+
+ while (base::Optional<V4L2WritableBufferRef> buffer =
+ output_queue_->GetFreeBuffer()) {
+ bool ret = false;
+
+ switch (mem_type) {
+ case V4L2_MEMORY_MMAP:
+ ret = std::move(*buffer).QueueMMap();
+ break;
+ case V4L2_MEMORY_DMABUF: {
+ scoped_refptr<VideoFrame> video_frame = GetPoolVideoFrame();
+ // Running out of frame is not an error, we will be called again
+ // once frames are available.
+ if (!video_frame)
+ return;
+ ret = std::move(*buffer).QueueDMABuf(std::move(video_frame));
+ break;
+ }
+ default:
+ NOTREACHED();
+ }
+
+ if (!ret)
+ client_->OnBackendError();
+ }
+
+ DVLOGF(3) << output_queue_->QueuedBuffersCount() << "/"
+ << output_queue_->AllocatedBuffersCount()
+ << " output buffers queued";
+}
+
+scoped_refptr<VideoFrame> V4L2StatefulVideoDecoderBackend::GetPoolVideoFrame() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+ DmabufVideoFramePool* pool = client_->GetVideoFramePool();
+ DCHECK_EQ(output_queue_->GetMemoryType(), V4L2_MEMORY_DMABUF);
+ DCHECK_NE(pool, nullptr);
+
+ scoped_refptr<VideoFrame> frame = pool->GetFrame();
+ if (!frame) {
+ DVLOGF(3) << "No available videoframe for now";
+ // We will try again once a frame becomes available.
+ pool->NotifyWhenFrameAvailable(base::BindOnce(
+ base::IgnoreResult(&base::SequencedTaskRunner::PostTask), task_runner_,
+ FROM_HERE,
+ base::BindOnce(
+ base::IgnoreResult(
+ &V4L2StatefulVideoDecoderBackend::EnqueueOutputBuffers),
+ weak_this_)));
+ }
+
+ return frame;
+}
+
+// static
+void V4L2StatefulVideoDecoderBackend::ReuseOutputBufferThunk(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ base::Optional<base::WeakPtr<V4L2StatefulVideoDecoderBackend>> weak_this,
+ V4L2ReadableBufferRef buffer) {
+ DVLOGF(3);
+ DCHECK(weak_this);
+
+ if (task_runner->RunsTasksInCurrentSequence()) {
+ if (*weak_this)
+ (*weak_this)->ReuseOutputBuffer(std::move(buffer));
+ } else {
+ task_runner->PostTask(
+ FROM_HERE,
+ base::BindOnce(&V4L2StatefulVideoDecoderBackend::ReuseOutputBuffer,
+ *weak_this, std::move(buffer)));
+ }
+}
+
+void V4L2StatefulVideoDecoderBackend::ReuseOutputBuffer(
+ V4L2ReadableBufferRef buffer) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3) << "Reuse output buffer #" << buffer->BufferId();
+
+ // Lose reference to the buffer so it goes back to the free list.
+ buffer.reset();
+
+ // Enqueue the newly available buffer.
+ EnqueueOutputBuffers();
+}
+
+void V4L2StatefulVideoDecoderBackend::OnOutputBufferDequeued(
+ V4L2ReadableBufferRef buffer) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // Zero-bytes buffers are returned as part of a flush and can be dismissed.
+ if (buffer->GetPlaneBytesUsed(0) > 0) {
+ const struct timeval timeval = buffer->GetTimeStamp();
+ const struct timespec timespec = {
+ .tv_sec = timeval.tv_sec,
+ .tv_nsec = timeval.tv_usec * 1000,
+ };
+ const base::TimeDelta timestamp = base::TimeDelta::FromTimeSpec(timespec);
+
+ scoped_refptr<VideoFrame> frame;
+
+ switch (output_queue_->GetMemoryType()) {
+ case V4L2_MEMORY_MMAP: {
+ // Wrap the videoframe into another one so we can be signaled when the
+ // consumer is done with it and reuse the V4L2 buffer.
+ scoped_refptr<VideoFrame> origin_frame = buffer->GetVideoFrame();
+ frame = VideoFrame::WrapVideoFrame(origin_frame, origin_frame->format(),
+ origin_frame->visible_rect(),
+ origin_frame->natural_size());
+ frame->AddDestructionObserver(base::BindOnce(
+ &V4L2StatefulVideoDecoderBackend::ReuseOutputBufferThunk,
+ task_runner_, weak_this_, buffer));
+ break;
+ }
+ case V4L2_MEMORY_DMABUF:
+ // The pool VideoFrame we passed to QueueDMABuf() has been decoded into,
+ // pass it as-is.
+ frame = buffer->GetVideoFrame();
+ break;
+ default:
+ NOTREACHED();
+ }
+
+ client_->OutputFrame(std::move(frame), *visible_rect_, timestamp);
+ }
+
+ // We were waiting for the last buffer before a resolution change
+ // The order here is important! A flush event may come after a resolution
+ // change event (but not the opposite), so we must make sure both events
+ // are processed in the correct order.
+ if (buffer->IsLast() && resolution_change_cb_) {
+ std::move(resolution_change_cb_).Run();
+ } else if (buffer->IsLast() && flush_cb_) {
+ // We were waiting for a flush to complete, and received the last buffer.
+ CompleteFlush();
+ }
+
+ EnqueueOutputBuffers();
+}
+
+bool V4L2StatefulVideoDecoderBackend::InitiateFlush(
+ VideoDecoder::DecodeCB flush_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+ DCHECK(!flush_cb_);
+
+ // Submit any pending input buffer at the time of flush.
+ if (current_input_buffer_) {
+ std::move(*current_input_buffer_).QueueMMap();
+ current_input_buffer_.reset();
+ }
+
+ client_->InitiateFlush();
+ flush_cb_ = std::move(flush_cb);
+
+ // Special case: if our CAPTURE queue is not streaming, we cannot receive
+ // the CAPTURE buffer with the LAST flag set that signals the end of flush.
+ // In this case, we should complete the flush immediately.
+ if (!output_queue_->IsStreaming())
+ return CompleteFlush();
+
+ // Send the STOP command to the V4L2 device. The device will let us know
+ // that the flush is completed by sending us a CAPTURE buffer with the LAST
+ // flag set.
+ struct v4l2_decoder_cmd cmd;
+ memset(&cmd, 0, sizeof(cmd));
+ cmd.cmd = V4L2_DEC_CMD_STOP;
+ if (device_->Ioctl(VIDIOC_DECODER_CMD, &cmd) != 0) {
+ LOG(ERROR) << "Failed to issue STOP command";
+ client_->OnBackendError();
+ return false;
+ }
+
+ return true;
+}
+
+bool V4L2StatefulVideoDecoderBackend::CompleteFlush() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+ DCHECK(flush_cb_);
+
+ // Signal that flush has properly been completed.
+ std::move(flush_cb_).Run(DecodeStatus::OK);
+
+ // If CAPTURE queue is streaming, send the START command to the V4L2 device
+ // to signal that we are resuming decoding with the same state.
+ if (output_queue_->IsStreaming()) {
+ struct v4l2_decoder_cmd cmd;
+ memset(&cmd, 0, sizeof(cmd));
+ cmd.cmd = V4L2_DEC_CMD_START;
+ if (device_->Ioctl(VIDIOC_DECODER_CMD, &cmd) != 0) {
+ LOG(ERROR) << "Failed to issue START command";
+ std::move(flush_cb_).Run(DecodeStatus::DECODE_ERROR);
+ client_->OnBackendError();
+ return false;
+ }
+ }
+
+ client_->CompleteFlush();
+
+ // Resume decoding if data is available.
+ ScheduleDecodeWork();
+
+ return true;
+}
+
+void V4L2StatefulVideoDecoderBackend::OnStreamStopped(bool stop_input_queue) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // If we are resetting, also reset the splitter.
+ if (stop_input_queue)
+ frame_splitter_->Reset();
+}
+
+void V4L2StatefulVideoDecoderBackend::ChangeResolution() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // Here we just query the new resolution, visible rect, and number of output
+ // buffers before asking the client to update the resolution.
+
+ auto format = output_queue_->GetFormat().first;
+ if (!format) {
+ client_->OnBackendError();
+ return;
+ }
+ const gfx::Size pic_size(format->fmt.pix_mp.width, format->fmt.pix_mp.height);
+
+ auto visible_rect = output_queue_->GetVisibleRect();
+ if (!visible_rect) {
+ client_->OnBackendError();
+ return;
+ }
+
+ auto ctrl = device_->GetCtrl(V4L2_CID_MIN_BUFFERS_FOR_CAPTURE);
+ constexpr size_t DEFAULT_NUM_OUTPUT_BUFFERS = 12;
+ const size_t num_output_buffers =
+ ctrl ? ctrl->value : DEFAULT_NUM_OUTPUT_BUFFERS;
+ if (!ctrl)
+ VLOGF(1) << "Using default minimum number of CAPTURE buffers";
+
+ // Signal that we are flushing and initiate the resolution change.
+ // Our flush will be done when we receive a buffer with the LAST flag on the
+ // CAPTURE queue.
+ client_->InitiateFlush();
+ DCHECK(!resolution_change_cb_);
+ resolution_change_cb_ =
+ base::BindOnce(&V4L2StatefulVideoDecoderBackend::ContinueChangeResolution,
+ weak_this_, pic_size, *visible_rect, num_output_buffers);
+
+ // ...that is, unless we are not streaming yet, in which case the resolution
+ // change can take place immediately.
+ if (!output_queue_->IsStreaming())
+ std::move(resolution_change_cb_).Run();
+}
+
+void V4L2StatefulVideoDecoderBackend::ContinueChangeResolution(
+ const gfx::Size& pic_size,
+ const gfx::Rect& visible_rect,
+ const size_t num_output_buffers) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // Flush is done, but stay in flushing state and ask our client to set the new
+ // resolution.
+ client_->ChangeResolution(pic_size, visible_rect, num_output_buffers);
+}
+
+bool V4L2StatefulVideoDecoderBackend::ApplyResolution(
+ const gfx::Size& pic_size,
+ const gfx::Rect& visible_rect,
+ const size_t num_output_frames) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ // Use the visible rect for all new frames.
+ visible_rect_ = visible_rect;
+
+ return true;
+}
+
+void V4L2StatefulVideoDecoderBackend::OnChangeResolutionDone(bool success) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ if (!success) {
+ client_->OnBackendError();
+ return;
+ }
+
+ // Flush can be considered completed on the client side.
+ client_->CompleteFlush();
+
+ // Enqueue all available output buffers now that they are allocated.
+ EnqueueOutputBuffers();
+
+ // Also try to progress on our work.
+ DoDecodeWork();
+}
+
+void V4L2StatefulVideoDecoderBackend::ClearPendingRequests(
+ DecodeStatus status) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOGF(3);
+
+ resolution_change_cb_.Reset();
+
+ if (flush_cb_) {
+ std::move(flush_cb_).Run(status);
+ }
+
+ current_input_buffer_.reset();
+
+ if (current_decode_request_) {
+ std::move(current_decode_request_->decode_cb).Run(status);
+ current_decode_request_.reset();
+ }
+
+ while (!decode_request_queue_.empty()) {
+ std::move(decode_request_queue_.front().decode_cb).Run(status);
+ decode_request_queue_.pop();
+ }
+}
+
+// TODO(b:149663704) move into helper function shared between both backends?
+bool V4L2StatefulVideoDecoderBackend::IsSupportedProfile(
+ VideoCodecProfile profile) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(device_);
+ if (supported_profiles_.empty()) {
+ constexpr uint32_t kSupportedInputFourccs[] = {
+ V4L2_PIX_FMT_H264,
+ V4L2_PIX_FMT_VP8,
+ V4L2_PIX_FMT_VP9,
+ };
+ scoped_refptr<V4L2Device> device = V4L2Device::Create();
+ VideoDecodeAccelerator::SupportedProfiles profiles =
+ device->GetSupportedDecodeProfiles(base::size(kSupportedInputFourccs),
+ kSupportedInputFourccs);
+ for (const auto& profile : profiles)
+ supported_profiles_.push_back(profile.profile);
+ }
+ return std::find(supported_profiles_.begin(), supported_profiles_.end(),
+ profile) != supported_profiles_.end();
+}
+
+bool V4L2StatefulVideoDecoderBackend::StopInputQueueOnResChange() const {
+ return false;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h
new file mode 100644
index 00000000000..62d6d715f4b
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h
@@ -0,0 +1,151 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_BACKEND_STATEFUL_H_
+#define MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_BACKEND_STATEFUL_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/containers/queue.h"
+#include "base/macros.h"
+#include "base/optional.h"
+#include "base/sequenced_task_runner.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/v4l2/v4l2_device.h"
+#include "media/gpu/v4l2/v4l2_video_decoder_backend.h"
+
+namespace media {
+
+namespace v4l2_vda_helpers {
+class InputBufferFragmentSplitter;
+}
+
+class V4L2StatefulVideoDecoderBackend : public V4L2VideoDecoderBackend {
+ public:
+ V4L2StatefulVideoDecoderBackend(
+ Client* const client,
+ scoped_refptr<V4L2Device> device,
+ VideoCodecProfile profile,
+ scoped_refptr<base::SequencedTaskRunner> task_runner);
+ ~V4L2StatefulVideoDecoderBackend() override;
+
+ // We don't ever want to copy or move this.
+ V4L2StatefulVideoDecoderBackend(const V4L2StatefulVideoDecoderBackend&) =
+ delete;
+ V4L2StatefulVideoDecoderBackend& operator=(
+ const V4L2StatefulVideoDecoderBackend&) = delete;
+
+ // V4L2VideoDecoderBackend implementation
+ bool Initialize() override;
+ void EnqueueDecodeTask(scoped_refptr<DecoderBuffer> buffer,
+ VideoDecoder::DecodeCB decode_cb,
+ int32_t bitstream_id) override;
+ void OnOutputBufferDequeued(V4L2ReadableBufferRef buffer) override;
+ void OnServiceDeviceTask(bool event) override;
+ void OnStreamStopped(bool stop_input_queue) override;
+ bool ApplyResolution(const gfx::Size& pic_size,
+ const gfx::Rect& visible_rect,
+ const size_t num_output_frames) override;
+ void OnChangeResolutionDone(bool success) override;
+ void ClearPendingRequests(DecodeStatus status) override;
+ bool StopInputQueueOnResChange() const override;
+
+ private:
+ // TODO(b:149663704): merge with stateless?
+ // Request for decoding buffer. Every EnqueueDecodeTask() call generates 1
+ // DecodeRequest.
+ struct DecodeRequest {
+ // The decode buffer passed to EnqueueDecodeTask().
+ scoped_refptr<DecoderBuffer> buffer;
+ // Number of bytes used so far from |buffer|.
+ size_t bytes_used = 0;
+ // The callback function passed to EnqueueDecodeTask().
+ VideoDecoder::DecodeCB decode_cb;
+ // Identifier for the decoder buffer.
+ int32_t bitstream_id;
+
+ DecodeRequest(scoped_refptr<DecoderBuffer> buf,
+ VideoDecoder::DecodeCB cb,
+ int32_t id);
+
+ // Allow move, but not copy
+ DecodeRequest(DecodeRequest&&);
+ DecodeRequest& operator=(DecodeRequest&&);
+
+ ~DecodeRequest();
+
+ bool IsCompleted() const;
+
+ DISALLOW_COPY_AND_ASSIGN(DecodeRequest);
+ };
+
+ bool IsSupportedProfile(VideoCodecProfile profile);
+
+ void DoDecodeWork();
+
+ static void ReuseOutputBufferThunk(
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ base::Optional<base::WeakPtr<V4L2StatefulVideoDecoderBackend>> weak_this,
+ V4L2ReadableBufferRef buffer);
+ void ReuseOutputBuffer(V4L2ReadableBufferRef buffer);
+
+ // Called when the format has changed, in order to reallocate the output
+ // buffers according to the new format.
+ void ChangeResolution();
+ // Called when the flush triggered by a resolution change has completed,
+ // to actually apply the resolution.
+ void ContinueChangeResolution(const gfx::Size& pic_size,
+ const gfx::Rect& visible_rect,
+ const size_t num_output_buffers);
+
+ // Enqueue all output buffers that are available.
+ void EnqueueOutputBuffers();
+ // When a video frame pool is in use, obtain a frame from the pool or, if
+ // none is available, schedule |EnqueueOutputBuffers()| to be called when one
+ // becomes available.
+ scoped_refptr<VideoFrame> GetPoolVideoFrame();
+
+ bool InitiateFlush(VideoDecoder::DecodeCB flush_cb);
+ bool CompleteFlush();
+
+ void ScheduleDecodeWork();
+
+ // Video profile we are decoding.
+ VideoCodecProfile profile_;
+
+ // The task runner we are running on, for convenience.
+ const scoped_refptr<base::SequencedTaskRunner> task_runner_;
+
+ // VideoCodecProfiles supported by a v4l2 stateless decoder driver.
+ std::vector<VideoCodecProfile> supported_profiles_;
+
+ // Queue of pending decode request.
+ base::queue<DecodeRequest> decode_request_queue_;
+
+ // The decode request which is currently processed.
+ base::Optional<DecodeRequest> current_decode_request_;
+ // V4L2 input buffer currently being prepared.
+ base::Optional<V4L2WritableBufferRef> current_input_buffer_;
+
+ std::unique_ptr<v4l2_vda_helpers::InputBufferFragmentSplitter>
+ frame_splitter_;
+
+ base::Optional<gfx::Rect> visible_rect_;
+
+ // Callback of the buffer that triggered a flush, to be called when the
+ // flush completes.
+ VideoDecoder::DecodeCB flush_cb_;
+ // Closure that will be called once the flush triggered by a resolution change
+ // event completes.
+ base::OnceClosure resolution_change_cb_;
+
+ base::WeakPtr<V4L2StatefulVideoDecoderBackend> weak_this_;
+ base::WeakPtrFactory<V4L2StatefulVideoDecoderBackend> weak_this_factory_{
+ this};
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_V4L2_V4L2_VIDEO_DECODER_BACKEND_STATEFUL_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
index b8c3400a990..b03846c0784 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
@@ -559,7 +559,7 @@ void V4L2StatelessVideoDecoderBackend::OnChangeResolutionDone(bool success) {
weak_this_));
}
-void V4L2StatelessVideoDecoderBackend::OnStreamStopped() {
+void V4L2StatelessVideoDecoderBackend::OnStreamStopped(bool stop_input_queue) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DVLOGF(3);
@@ -603,6 +603,10 @@ void V4L2StatelessVideoDecoderBackend::ClearPendingRequests(
}
}
+bool V4L2StatelessVideoDecoderBackend::StopInputQueueOnResChange() const {
+ return true;
+}
+
bool V4L2StatelessVideoDecoderBackend::IsSupportedProfile(
VideoCodecProfile profile) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
index 0dfa817309d..704d6171f7f 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.h
@@ -43,12 +43,13 @@ class V4L2StatelessVideoDecoderBackend : public V4L2VideoDecoderBackend,
VideoDecoder::DecodeCB decode_cb,
int32_t bitstream_id) override;
void OnOutputBufferDequeued(V4L2ReadableBufferRef buffer) override;
- void OnStreamStopped() override;
+ void OnStreamStopped(bool stop_input_queue) override;
bool ApplyResolution(const gfx::Size& pic_size,
const gfx::Rect& visible_rect,
const size_t num_output_frames) override;
void OnChangeResolutionDone(bool success) override;
void ClearPendingRequests(DecodeStatus status) override;
+ bool StopInputQueueOnResChange() const override;
// V4L2DecodeSurfaceHandler implementation.
scoped_refptr<V4L2DecodeSurface> CreateSurface() override;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index 8c7ea443927..97ef7e2a648 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -381,7 +381,6 @@ bool V4L2VideoEncodeAccelerator::CreateImageProcessor(
const gfx::Rect& output_visible_rect) {
VLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
- DCHECK_NE(input_layout.format(), output_format);
auto ip_input_layout = AsMultiPlanarLayout(input_layout);
if (!ip_input_layout) {
@@ -432,7 +431,7 @@ bool V4L2VideoEncodeAccelerator::CreateImageProcessor(
image_processor_ = ImageProcessorFactory::Create(
*input_config, *output_config, {ImageProcessor::OutputMode::IMPORT},
- kImageProcBufferCount, encoder_task_runner_,
+ kImageProcBufferCount, VIDEO_ROTATION_0, encoder_task_runner_,
base::BindRepeating(&V4L2VideoEncodeAccelerator::ImageProcessorError,
weak_this_));
if (!image_processor_) {
@@ -750,6 +749,16 @@ void V4L2VideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
return;
if (image_processor_) {
+ if (!frame) {
+ DCHECK(!flush_callback_.is_null());
+ NOTREACHED()
+ << "Flushing is not supported when using an image processor and this "
+ "situation should not happen for well behaved clients.";
+ NOTIFY_ERROR(kIllegalStateError);
+ child_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(std::move(flush_callback_), false));
+ return;
+ }
image_processor_input_queue_.emplace(std::move(frame), force_keyframe);
InputImageProcessorTask();
} else {
@@ -779,7 +788,7 @@ bool V4L2VideoEncodeAccelerator::ReconfigureFormatIfNeeded(
VLOGF(1) << "Encoder resolution is changed during encoding"
<< ", frame.natural_size()=" << frame.natural_size().ToString()
<< ", encoder_input_visible_rect_="
- << input_frame_size_.ToString();
+ << encoder_input_visible_rect_.ToString();
return false;
}
if (frame.coded_size() == input_frame_size_) {
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index 2524a1c31b5..98dbf6acad4 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -66,11 +66,16 @@ source_set("vaapi") {
"vp8_vaapi_video_decoder_delegate.h",
"vp9_encoder.cc",
"vp9_encoder.h",
+ "vp9_rate_control.cc",
+ "vp9_rate_control.h",
"vp9_vaapi_video_decoder_delegate.cc",
"vp9_vaapi_video_decoder_delegate.h",
]
- configs += [ "//build/config/linux/libva" ]
+ configs += [
+ "//build/config/linux/libva",
+ "//third_party/libvpx:libvpx_config",
+ ]
deps = [
":common",
@@ -83,6 +88,7 @@ source_set("vaapi") {
"//media/gpu/chromeos:common",
"//media/parsers",
"//mojo/public/cpp/bindings",
+ "//third_party/libvpx:libvp9rc",
"//third_party/libyuv",
"//ui/gfx",
"//ui/gfx/geometry",
@@ -116,12 +122,12 @@ source_set("vaapi") {
]
}
- if (ozone_platform_gbm || use_egl) {
+ if (use_ozone || use_egl) {
sources += [
"vaapi_picture_native_pixmap.cc",
"vaapi_picture_native_pixmap.h",
]
- if (ozone_platform_gbm) {
+ if (use_ozone) {
sources += [
"vaapi_picture_native_pixmap_ozone.cc",
"vaapi_picture_native_pixmap_ozone.h",
@@ -194,7 +200,9 @@ source_set("unit_test") {
"vaapi_image_decode_accelerator_worker_unittest.cc",
"vaapi_video_decode_accelerator_unittest.cc",
"vaapi_video_encode_accelerator_unittest.cc",
+ "vp9_encoder_unittest.cc",
]
+ configs += [ "//third_party/libvpx:libvpx_config" ]
deps = [
":common",
":vaapi",
@@ -206,6 +214,7 @@ source_set("unit_test") {
"//mojo/core/embedder",
"//testing/gmock",
"//testing/gtest",
+ "//third_party/libvpx:libvp9rc",
"//ui/gfx:test_support",
"//ui/gfx/geometry",
]
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
index 71acd16c8aa..4bfdb0dc06c 100644
--- a/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.cc
@@ -40,6 +40,12 @@ void AcceleratedVideoEncoder::EncodeJob::AddSetupCallback(
setup_callbacks_.push(std::move(cb));
}
+void AcceleratedVideoEncoder::EncodeJob::AddPostExecuteCallback(
+ base::OnceClosure cb) {
+ DCHECK(!cb.is_null());
+ post_execute_callbacks_.push(std::move(cb));
+}
+
void AcceleratedVideoEncoder::EncodeJob::AddReferencePicture(
scoped_refptr<CodecPicture> ref_pic) {
DCHECK(ref_pic);
@@ -53,10 +59,21 @@ void AcceleratedVideoEncoder::EncodeJob::Execute() {
}
std::move(execute_callback_).Run();
+
+ while (!post_execute_callbacks_.empty()) {
+ std::move(post_execute_callbacks_.front()).Run();
+ post_execute_callbacks_.pop();
+ }
}
size_t AcceleratedVideoEncoder::GetBitstreamBufferSize() const {
return GetEncodeBitstreamBufferSize(GetCodedSize());
}
+void AcceleratedVideoEncoder::BitrateControlUpdate(
+ uint64_t encoded_chunk_size_bytes) {
+ NOTREACHED() << __func__ << "() is called to on an"
+ << "AcceleratedVideoEncoder that doesn't support BitrateControl"
+ << "::kConstantQuantizationParameter";
+}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/accelerated_video_encoder.h b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
index f214831dd62..e5a51bd96ac 100644
--- a/chromium/media/gpu/vaapi/accelerated_video_encoder.h
+++ b/chromium/media/gpu/vaapi/accelerated_video_encoder.h
@@ -40,12 +40,23 @@ class AcceleratedVideoEncoder {
AcceleratedVideoEncoder() = default;
virtual ~AcceleratedVideoEncoder() = default;
+ enum class BitrateControl {
+ kConstantBitrate, // Constant Bitrate mode. This class relies on other
+ // parts (e.g. driver) to achieve the specified bitrate.
+ kConstantQuantizationParameter // Constant Quantization Parameter mode.
+ // This class needs to compute a proper
+ // quantization parameter and give other
+ // parts (e.g. the driver) the value.
+ };
+
struct Config {
// Maxium number of reference frames.
// For H.264 encoding, the value represents the maximum number of reference
// frames for both the reference picture list 0 (bottom 16 bits) and the
// reference picture list 1 (top 16 bits).
size_t max_num_ref_frames;
+
+ BitrateControl bitrate_control = BitrateControl::kConstantBitrate;
};
// An abstraction of an encode job for one frame. Parameters required for an
@@ -71,6 +82,12 @@ class AcceleratedVideoEncoder {
// is executed.
void AddSetupCallback(base::OnceClosure cb);
+ // Schedules a callback to be run immediately after this job is executed.
+ // Can be called multiple times to schedule multiple callbacks, and all
+ // of them will be run, in order added. Callbacks can be used to e.g. get
+ // the encoded buffer linear size.
+ void AddPostExecuteCallback(base::OnceClosure cb);
+
// Adds |ref_pic| to the list of pictures to be used as reference pictures
// for this frame, to ensure they remain valid until the job is executed
// (or discarded).
@@ -114,6 +131,10 @@ class AcceleratedVideoEncoder {
// calls) to set up the job.
base::queue<base::OnceClosure> setup_callbacks_;
+ // Callbacks to be run (in the same order as the order of
+ // AddPostExecuteCallback() calls) to do post processing after execute.
+ base::queue<base::OnceClosure> post_execute_callbacks_;
+
// Callback to be run to execute this job.
base::OnceClosure execute_callback_;
@@ -153,6 +174,12 @@ class AcceleratedVideoEncoder {
// Prepares a new |encode_job| to be executed in Accelerator and returns true
// on success. The caller may then call Execute() on the job to run it.
virtual bool PrepareEncodeJob(EncodeJob* encode_job) = 0;
+
+ // Notifies the encoded chunk size in bytes to update a bitrate controller in
+ // AcceleratedVideoEncoder. This should be called only if
+ // AcceleratedVideoEncoder is configured with
+ // BitrateControl::kConstantQuantizationParameter.
+ virtual void BitrateControlUpdate(uint64_t encoded_chunk_size_bytes);
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/test_utils.cc b/chromium/media/gpu/vaapi/test_utils.cc
index b534b297bd5..f578bae1071 100644
--- a/chromium/media/gpu/vaapi/test_utils.cc
+++ b/chromium/media/gpu/vaapi/test_utils.cc
@@ -54,9 +54,9 @@ bool CompareImages(const DecodedImage& reference_image,
// Uses the reference image's size as the ground truth.
const gfx::Size image_size = reference_image.size;
if (image_size != hw_decoded_image.size) {
- DLOG(ERROR) << "Wrong expected software decoded image size, "
- << image_size.ToString() << " versus VaAPI provided "
- << hw_decoded_image.size.ToString();
+ LOG(ERROR) << "Wrong expected software decoded image size, "
+ << image_size.ToString() << " versus VaAPI provided "
+ << hw_decoded_image.size.ToString();
return false;
}
@@ -100,7 +100,7 @@ bool CompareImages(const DecodedImage& reference_image,
image_size.width(), image_size.height());
}
if (conversion_result != 0) {
- DLOG(ERROR) << "libyuv conversion error";
+ LOG(ERROR) << "libyuv conversion error";
return false;
}
@@ -112,12 +112,12 @@ bool CompareImages(const DecodedImage& reference_image,
temp_v.get(), half_image_size.width(), image_size.width(),
image_size.height());
} else {
- DLOG(ERROR) << "HW FourCC not supported: " << FourccToString(hw_fourcc);
+ LOG(ERROR) << "HW FourCC not supported: " << FourccToString(hw_fourcc);
return false;
}
if (ssim < min_ssim) {
- DLOG(ERROR) << "SSIM too low: " << ssim << " < " << min_ssim;
+ LOG(ERROR) << "SSIM too low: " << ssim << " < " << min_ssim;
return false;
}
diff --git a/chromium/media/gpu/vaapi/va.sigs b/chromium/media/gpu/vaapi/va.sigs
index f333cb33a7b..c24aad2c3e5 100644
--- a/chromium/media/gpu/vaapi/va.sigs
+++ b/chromium/media/gpu/vaapi/va.sigs
@@ -19,6 +19,7 @@ VAStatus vaDestroyImage(VADisplay dpy, VAImageID image);
VAStatus vaDestroySurfaces(VADisplay dpy, VASurfaceID *surfaces, int num_surfaces);
int vaDisplayIsValid(VADisplay dpy);
VAStatus vaEndPicture(VADisplay dpy, VAContextID context);
+const char *vaEntrypointStr(VAEntrypoint entrypoint);
const char *vaErrorStr(VAStatus error_status);
VAStatus vaExportSurfaceHandle(VADisplay dpy, VASurfaceID surface_id, uint32_t mem_type, uint32_t flags, void *descriptor);
VAStatus vaGetConfigAttributes(VADisplay dpy, VAProfile profile, VAEntrypoint entrypoint, VAConfigAttrib *attrib_list, int num_attribs);
@@ -29,6 +30,7 @@ int vaMaxNumConfigAttributes(VADisplay dpy);
int vaMaxNumEntrypoints(VADisplay dpy);
int vaMaxNumImageFormats(VADisplay dpy);
int vaMaxNumProfiles(VADisplay dpy);
+const char *vaProfileStr(VAProfile profile);
VAStatus vaPutImage (VADisplay dpy, VASurfaceID surface, VAImageID image, int src_x, int src_y, unsigned int src_width, unsigned int src_height, int dest_x, int dest_y, unsigned int dest_width, unsigned int dest_height);
VAStatus vaQueryConfigAttributes(VADisplay dpy, VAConfigID config_id, VAProfile *profile, VAEntrypoint *entrypoint, VAConfigAttrib *attrib_list, int *num_attribs);
VAStatus vaQueryConfigEntrypoints(VADisplay dpy, VAProfile profile, VAEntrypoint *entrypoint_list, int *num_entrypoints);
@@ -37,7 +39,6 @@ VAStatus vaQueryImageFormats(VADisplay dpy, VAImageFormat *format_list, int *num
VAStatus vaQuerySurfaceAttributes(VADisplay dpy, VAConfigID config, VASurfaceAttrib *attrib_list, unsigned int *num_attribs);
const char* vaQueryVendorString(VADisplay dpy);
VAStatus vaRenderPicture(VADisplay dpy, VAContextID context, VABufferID *buffers, int num_buffers);
-VAStatus vaSetDisplayAttributes(VADisplay dpy, VADisplayAttribute *attr_list, int num_attributes);
VAStatus vaSyncSurface(VADisplay dpy, VASurfaceID render_target);
VAStatus vaTerminate(VADisplay dpy);
VAStatus vaUnmapBuffer(VADisplay dpy, VABufferID buf_id);
diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc
index 4cb6bceda56..3c72d13786b 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc
+++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.cc
@@ -75,6 +75,7 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create(
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner) {
// VaapiImageProcessorBackend supports ChromeOS only.
@@ -136,6 +137,13 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create(
return nullptr;
}
+ // Checks if VA-API driver supports rotation.
+ if (relative_rotation != VIDEO_ROTATION_0 &&
+ !vaapi_wrapper->IsRotationSupported()) {
+ VLOGF(1) << "VaapiIP doesn't support rotation";
+ return nullptr;
+ }
+
// We should restrict the acceptable PortConfig for input and output both to
// the one returned by GetPlatformVideoFrameLayout(). However,
// ImageProcessorFactory interface doesn't provide information about what
@@ -146,7 +154,7 @@ std::unique_ptr<ImageProcessorBackend> VaapiImageProcessorBackend::Create(
// scenario.
return base::WrapUnique<ImageProcessorBackend>(new VaapiImageProcessorBackend(
std::move(vaapi_wrapper), input_config, output_config, OutputMode::IMPORT,
- std::move(error_cb), std::move(backend_task_runner)));
+ relative_rotation, std::move(error_cb), std::move(backend_task_runner)));
#endif
}
@@ -155,11 +163,13 @@ VaapiImageProcessorBackend::VaapiImageProcessorBackend(
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner)
: ImageProcessorBackend(input_config,
output_config,
output_mode,
+ relative_rotation,
std::move(error_cb),
std::move(backend_task_runner)),
vaapi_wrapper_(std::move(vaapi_wrapper)) {}
@@ -206,9 +216,9 @@ void VaapiImageProcessorBackend::Process(scoped_refptr<VideoFrame> input_frame,
return;
// VA-API performs pixel format conversion and scaling without any filters.
- if (!vaapi_wrapper_->BlitSurface(*src_va_surface, *dst_va_surface,
- input_frame->visible_rect(),
- output_frame->visible_rect())) {
+ if (!vaapi_wrapper_->BlitSurface(
+ *src_va_surface, *dst_va_surface, input_frame->visible_rect(),
+ output_frame->visible_rect(), relative_rotation_)) {
// Failed to execute BlitSurface(). Since VaapiWrapper has invoked
// ReportToUMA(), calling error_cb_ here is not needed.
return;
diff --git a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h
index 8abbb323dd8..8d5da751214 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h
+++ b/chromium/media/gpu/vaapi/vaapi_image_processor_backend.h
@@ -28,6 +28,7 @@ class VaapiImageProcessorBackend : public ImageProcessorBackend {
const PortConfig& input_config,
const PortConfig& output_config,
const std::vector<OutputMode>& preferred_output_modes,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
@@ -42,6 +43,7 @@ class VaapiImageProcessorBackend : public ImageProcessorBackend {
const PortConfig& input_config,
const PortConfig& output_config,
OutputMode output_mode,
+ VideoRotation relative_rotation,
ErrorCB error_cb,
scoped_refptr<base::SequencedTaskRunner> backend_task_runner);
~VaapiImageProcessorBackend() override;
diff --git a/chromium/media/gpu/vaapi/vaapi_unittest.cc b/chromium/media/gpu/vaapi/vaapi_unittest.cc
index d3d459fadf8..abb662d777a 100644
--- a/chromium/media/gpu/vaapi/vaapi_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_unittest.cc
@@ -11,9 +11,11 @@
#include <vector>
#include <va/va.h>
+#include <va/va_str.h>
#include "base/files/file.h"
#include "base/files/scoped_file.h"
+#include "base/logging.h"
#include "base/optional.h"
#include "base/process/launch.h"
#include "base/stl_util.h"
@@ -34,10 +36,8 @@ base::Optional<VAProfile> ConvertToVAProfile(VideoCodecProfile profile) {
{VP8PROFILE_ANY, VAProfileVP8Version0_3},
{VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
{VP9PROFILE_PROFILE1, VAProfileVP9Profile1},
- // TODO(crbug.com/1011454, crbug.com/1011469): Reenable
- // VP9PROFILE_PROFILE2 and _PROFILE3 when P010 is completely supported.
- //{VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
- //{VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
+ {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
+ {VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
};
auto it = kProfileMap.find(profile);
return it != kProfileMap.end() ? base::make_optional<VAProfile>(it->second)
@@ -56,10 +56,8 @@ base::Optional<VAProfile> StringToVAProfile(const std::string& va_profile) {
{"VAProfileVP8Version0_3", VAProfileVP8Version0_3},
{"VAProfileVP9Profile0", VAProfileVP9Profile0},
{"VAProfileVP9Profile1", VAProfileVP9Profile1},
- // TODO(crbug.com/1011454, crbug.com/1011469): Reenable
- // VP9PROFILE_PROFILE2 and _PROFILE3 when P010 is completely supported.
- // {"VAProfileVP9Profile2", VAProfileVP9Profile2},
- // {"VAProfileVP9Profile3", VAProfileVP9Profile3},
+ {"VAProfileVP9Profile2", VAProfileVP9Profile2},
+ {"VAProfileVP9Profile3", VAProfileVP9Profile3},
};
auto it = kStringToVAProfile.find(va_profile);
@@ -165,7 +163,8 @@ TEST_F(VaapiTest, VaapiProfiles) {
va_info[VAProfileH264ConstrainedBaseline], VAEntrypointVLD);
}
- EXPECT_TRUE(is_profile_supported) << " profile: " << profile.profile;
+ EXPECT_TRUE(is_profile_supported)
+ << " profile: " << GetProfileName(profile.profile);
}
for (const auto& profile : VaapiWrapper::GetSupportedEncodeProfiles()) {
@@ -184,7 +183,8 @@ TEST_F(VaapiTest, VaapiProfiles) {
VAEntrypointEncSliceLP);
}
- EXPECT_TRUE(is_profile_supported) << " profile: " << profile.profile;
+ EXPECT_TRUE(is_profile_supported)
+ << " profile: " << GetProfileName(profile.profile);
}
EXPECT_EQ(VaapiWrapper::IsDecodeSupported(VAProfileJPEGBaseline),
@@ -194,21 +194,24 @@ TEST_F(VaapiTest, VaapiProfiles) {
base::Contains(va_info[VAProfileJPEGBaseline], VAEntrypointEncPicture));
}
+// Verifies that the default VAEntrypoint as per VaapiWrapper is indeed among
+// the supported ones.
TEST_F(VaapiTest, DefaultEntrypointIsSupported) {
for (size_t i = 0; i < VaapiWrapper::kCodecModeMax; ++i) {
- const VaapiWrapper::CodecMode mode =
- static_cast<VaapiWrapper::CodecMode>(i);
+ const auto wrapper_mode = static_cast<VaapiWrapper::CodecMode>(i);
std::map<VAProfile, std::vector<VAEntrypoint>> configurations =
- VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting(mode);
+ VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting(
+ wrapper_mode);
for (const auto& profile_and_entrypoints : configurations) {
const VAEntrypoint default_entrypoint =
- VaapiWrapper::GetDefaultVaEntryPoint(mode,
+ VaapiWrapper::GetDefaultVaEntryPoint(wrapper_mode,
profile_and_entrypoints.first);
const auto& supported_entrypoints = profile_and_entrypoints.second;
EXPECT_TRUE(base::Contains(supported_entrypoints, default_entrypoint))
- << "Default VAEntrypoint " << default_entrypoint
- << " (mode = " << mode << ") is not supported for VAProfile = "
- << profile_and_entrypoints.first;
+ << "Default VAEntrypoint " << vaEntrypointStr(default_entrypoint)
+ << " (VaapiWrapper mode = " << wrapper_mode
+ << ") is not supported for "
+ << vaProfileStr(profile_and_entrypoints.first);
}
}
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
index ed053f16ec5..62b90c85858 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.h
@@ -19,7 +19,6 @@
#include "base/containers/queue.h"
#include "base/containers/small_map.h"
-#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
index c97f1a06cd9..48b9092156b 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -33,17 +33,15 @@ constexpr size_t kTimestampCacheSize = 128;
// Returns the preferred VA_RT_FORMAT for the given |profile|.
unsigned int GetVaFormatForVideoCodecProfile(VideoCodecProfile profile) {
- switch (profile) {
- case VP9PROFILE_PROFILE2:
- case VP9PROFILE_PROFILE3:
- return VA_RT_FORMAT_YUV420_10BPP;
- default:
- return VA_RT_FORMAT_YUV420;
- }
+ if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3)
+ return VA_RT_FORMAT_YUV420_10BPP;
+ return VA_RT_FORMAT_YUV420;
}
-gfx::BufferFormat GetBufferFormat() {
+gfx::BufferFormat GetBufferFormat(VideoCodecProfile profile) {
#if defined(USE_OZONE)
+ if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3)
+ return gfx::BufferFormat::P010;
return gfx::BufferFormat::YUV_420_BIPLANAR;
#else
return gfx::BufferFormat::RGBX_8888;
@@ -341,7 +339,7 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() {
void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
int32_t buffer_id,
const gfx::Rect& visible_rect,
- const VideoColorSpace& /*color_space*/) {
+ const VideoColorSpace& color_space) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK_EQ(state_, State::kDecoding);
DVLOGF(3);
@@ -360,16 +358,7 @@ void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
// Find the frame associated with the surface. We won't erase it from
// |output_frames_| yet, as the decoder might still be using it for reference.
DCHECK_EQ(output_frames_.count(va_surface->id()), 1u);
- OutputFrameTask(output_frames_[va_surface->id()], visible_rect, timestamp);
-}
-
-void VaapiVideoDecoder::OutputFrameTask(scoped_refptr<VideoFrame> video_frame,
- const gfx::Rect& visible_rect,
- base::TimeDelta timestamp) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
- DCHECK_EQ(state_, State::kDecoding);
- DCHECK(video_frame);
- DVLOGF(4);
+ scoped_refptr<VideoFrame> video_frame = output_frames_[va_surface->id()];
// Set the timestamp at which the decode operation started on the
// |video_frame|. If the frame has been outputted before (e.g. because of VP9
@@ -389,6 +378,10 @@ void VaapiVideoDecoder::OutputFrameTask(scoped_refptr<VideoFrame> video_frame,
video_frame = std::move(wrapped_frame);
}
+ const auto gfx_color_space = color_space.ToGfxColorSpace();
+ if (gfx_color_space.IsValid())
+ video_frame->set_color_space(gfx_color_space);
+
output_cb_.Run(std::move(video_frame));
}
@@ -403,12 +396,18 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
gfx::Size natural_size = GetNaturalSize(visible_rect, pixel_aspect_ratio_);
pic_size_ = decoder_->GetPicSize();
const base::Optional<VideoPixelFormat> format =
- GfxBufferFormatToVideoPixelFormat(GetBufferFormat());
+ GfxBufferFormatToVideoPixelFormat(
+ GetBufferFormat(decoder_->GetProfile()));
CHECK(format);
auto format_fourcc = Fourcc::FromVideoPixelFormat(*format);
CHECK(format_fourcc);
- frame_pool_->Initialize(*format_fourcc, pic_size_, visible_rect, natural_size,
- decoder_->GetRequiredNumOfPictures());
+ if (!frame_pool_->Initialize(*format_fourcc, pic_size_, visible_rect,
+ natural_size,
+ decoder_->GetRequiredNumOfPictures())) {
+ DLOG(WARNING) << "Failed Initialize()ing the frame pool.";
+ SetState(State::kError);
+ return;
+ }
// All pending decode operations will be completed before triggering a
// resolution change, so we can safely destroy the context here.
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
index db186f14734..d7a4d3e18c0 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
@@ -102,10 +102,6 @@ class VaapiVideoDecoder : public DecoderInterface,
// resetting or destroying the decoder, or encountering an error.
void ClearDecodeTaskQueue(DecodeStatus status);
- // Output a single |video_frame| on the decoder thread.
- void OutputFrameTask(scoped_refptr<VideoFrame> video_frame,
- const gfx::Rect& visible_rect,
- base::TimeDelta timestamp);
// Release the video frame associated with the specified |surface_id| on the
// decoder thread. This is called when the last reference to the associated
// VASurface has been released, which happens when the decoder outputted the
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index c7ae04b8be9..528c424a1a8 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -18,6 +18,7 @@
#include "base/bind_helpers.h"
#include "base/bits.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
@@ -77,12 +78,6 @@ static void ReportToUMA(VAVEAEncoderFailure failure) {
// requirements.
gfx::Size GetInputFrameSize(VideoPixelFormat format,
const gfx::Size& visible_size) {
- if (format == PIXEL_FORMAT_I420) {
- // Since we don't have gfx::BufferFormat for I420, replace I420 with YV12.
- // Remove this workaround once crrev.com/c/1573718 is landed.
- format = PIXEL_FORMAT_YV12;
- }
-
std::unique_ptr<::gpu::GpuMemoryBufferFactory> gpu_memory_buffer_factory =
::gpu::GpuMemoryBufferFactory::CreateNativeType(nullptr);
// Get a VideoFrameLayout of a graphic buffer with the same gfx::BufferUsage
@@ -168,6 +163,8 @@ struct VaapiVideoEncodeAccelerator::BitstreamBufferRef {
VideoEncodeAccelerator::SupportedProfiles
VaapiVideoEncodeAccelerator::GetSupportedProfiles() {
+ if (IsConfiguredForTesting())
+ return supported_profiles_for_testing_;
return VaapiWrapper::GetSupportedEncodeProfiles();
}
@@ -341,13 +338,18 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
return false;
}
- vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
- VaapiWrapper::kEncode, config.output_profile,
- base::Bind(&ReportToUMA, VAAPI_ERROR));
- if (!vaapi_wrapper_) {
- VLOGF(1) << "Failed initializing VAAPI for profile "
- << GetProfileName(config.output_profile);
- return false;
+ DCHECK_EQ(IsConfiguredForTesting(), !!vaapi_wrapper_);
+ if (!IsConfiguredForTesting()) {
+ VaapiWrapper::CodecMode mode =
+ codec == kCodecVP9 ? VaapiWrapper::kEncodeConstantQuantizationParameter
+ : VaapiWrapper::kEncode;
+ vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
+ mode, config.output_profile, base::Bind(&ReportToUMA, VAAPI_ERROR));
+ if (!vaapi_wrapper_) {
+ VLOGF(1) << "Failed initializing VAAPI for profile "
+ << GetProfileName(config.output_profile);
+ return false;
+ }
}
// Finish remaining initialization on the encoder thread.
@@ -363,33 +365,46 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
VLOGF(2);
VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile);
+ AcceleratedVideoEncoder::Config ave_config{};
+ DCHECK_EQ(IsConfiguredForTesting(), !!encoder_);
switch (codec) {
case kCodecH264:
- encoder_ = std::make_unique<H264Encoder>(
- std::make_unique<H264Accelerator>(this));
+ if (!IsConfiguredForTesting()) {
+ encoder_ = std::make_unique<H264Encoder>(
+ std::make_unique<H264Accelerator>(this));
+ }
+ DCHECK_EQ(ave_config.bitrate_control,
+ AcceleratedVideoEncoder::BitrateControl::kConstantBitrate);
break;
-
case kCodecVP8:
- encoder_ =
- std::make_unique<VP8Encoder>(std::make_unique<VP8Accelerator>(this));
+ if (!IsConfiguredForTesting()) {
+ encoder_ = std::make_unique<VP8Encoder>(
+ std::make_unique<VP8Accelerator>(this));
+ }
+ DCHECK_EQ(ave_config.bitrate_control,
+ AcceleratedVideoEncoder::BitrateControl::kConstantBitrate);
break;
-
case kCodecVP9:
- encoder_ =
- std::make_unique<VP9Encoder>(std::make_unique<VP9Accelerator>(this));
+ if (!IsConfiguredForTesting()) {
+ encoder_ = std::make_unique<VP9Encoder>(
+ std::make_unique<VP9Accelerator>(this));
+ }
+ ave_config.bitrate_control = AcceleratedVideoEncoder::BitrateControl::
+ kConstantQuantizationParameter;
break;
-
default:
NOTREACHED() << "Unsupported codec type " << GetCodecName(codec);
return;
}
- AcceleratedVideoEncoder::Config ave_config;
if (!vaapi_wrapper_->GetVAEncMaxNumOfRefFrames(
- config.output_profile, &ave_config.max_num_ref_frames))
+ config.output_profile, &ave_config.max_num_ref_frames)) {
+ NOTIFY_ERROR(kPlatformFailureError,
+ "Failed getting max number of reference frames"
+ "supported by the driver");
return;
+ }
DCHECK_GT(ave_config.max_num_ref_frames, 0u);
-
if (!encoder_->Initialize(config, ave_config)) {
NOTIFY_ERROR(kInvalidArgumentError, "Failed initializing encoder");
return;
@@ -409,13 +424,17 @@ void VaapiVideoEncodeAccelerator::InitializeTask(const Config& config) {
expected_input_coded_size_.width() <= encoder_->GetCodedSize().width() &&
expected_input_coded_size_.height() <= encoder_->GetCodedSize().height());
- // The aligned surface size must be the same as a size of a native graphic
- // buffer.
- aligned_va_surface_size_ =
- GetInputFrameSize(config.input_format, config.input_visible_size);
- if (aligned_va_surface_size_.IsEmpty()) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed to get frame size");
- return;
+ DCHECK_EQ(IsConfiguredForTesting(), !aligned_va_surface_size_.IsEmpty());
+ if (!IsConfiguredForTesting()) {
+ // The aligned VA surface size must be the same as a size of a native
+ // graphics buffer. Since the VA surface's format is NV12, we specify NV12
+ // to query the size of the native graphics buffer.
+ aligned_va_surface_size_ =
+ GetInputFrameSize(PIXEL_FORMAT_NV12, config.input_visible_size);
+ if (aligned_va_surface_size_.IsEmpty()) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed to get frame size");
+ return;
+ }
}
va_surfaces_per_video_frame_ =
@@ -536,6 +555,19 @@ void VaapiVideoEncodeAccelerator::SubmitH264BitstreamBuffer(
}
}
+void VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize(
+ VABufferID buffer_id,
+ VASurfaceID sync_surface_id) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ const uint64_t encoded_chunk_size =
+ vaapi_wrapper_->GetEncodedChunkSize(buffer_id, sync_surface_id);
+ if (encoded_chunk_size == 0)
+ NOTIFY_ERROR(kPlatformFailureError, "Failed getting an encoded chunksize");
+
+ DCHECK(encoder_);
+ encoder_->BitrateControlUpdate(encoded_chunk_size);
+}
+
void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
@@ -567,26 +599,27 @@ void VaapiVideoEncodeAccelerator::ReturnBitstreamBuffer(
std::unique_ptr<VaapiEncodeJob> encode_job,
std::unique_ptr<BitstreamBufferRef> buffer) {
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ const VABufferID coded_buffer_id = encode_job->coded_buffer_id();
+ base::ScopedClosureRunner scoped_buffer(
+ base::BindOnce(&VaapiWrapper::DestroyVABuffer,
+ base::Unretained(vaapi_wrapper_.get()), coded_buffer_id));
uint8_t* target_data = static_cast<uint8_t*>(buffer->shm->memory());
size_t data_size = 0;
-
if (!vaapi_wrapper_->DownloadFromVABuffer(
encode_job->coded_buffer_id(), encode_job->input_surface()->id(),
target_data, buffer->shm->size(), &data_size)) {
NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
return;
}
-
DVLOGF(4) << "Returning bitstream buffer "
<< (encode_job->IsKeyframeRequested() ? "(keyframe)" : "")
<< " id: " << buffer->id << " size: " << data_size;
+ scoped_buffer.RunAndReset();
child_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&Client::BitstreamBufferReady, client_,
buffer->id, encode_job->Metadata(data_size)));
-
- vaapi_wrapper_->DestroyVABuffer(encode_job->coded_buffer_id());
}
void VaapiVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
@@ -1476,6 +1509,25 @@ bool VaapiVideoEncodeAccelerator::VP9Accelerator::SubmitFrameParameters(
pic_param.log2_tile_rows = frame_header->tile_rows_log2;
pic_param.log2_tile_columns = frame_header->tile_cols_log2;
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncSequenceParameterBufferType,
+ MakeRefCountedBytes(&seq_param, sizeof(seq_param))));
+
+ job->AddSetupCallback(
+ base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
+ base::Unretained(vea_), VAEncPictureParameterBufferType,
+ MakeRefCountedBytes(&pic_param, sizeof(pic_param))));
+
+ if (bitrate_control_ ==
+ AcceleratedVideoEncoder::BitrateControl::kConstantQuantizationParameter) {
+ job->AddPostExecuteCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize,
+ base::Unretained(vea_), job->AsVaapiEncodeJob()->coded_buffer_id(),
+ job->AsVaapiEncodeJob()->input_surface()->id()));
+ return true;
+ }
+
VAEncMiscParameterRateControl rate_control_param = {};
rate_control_param.bits_per_second =
encode_params.bitrate_allocation.GetSumBps();
@@ -1493,16 +1545,6 @@ bool VaapiVideoEncodeAccelerator::VP9Accelerator::SubmitFrameParameters(
hrd_param.buffer_size = encode_params.cpb_size_bits;
hrd_param.initial_buffer_fullness = hrd_param.buffer_size / 2;
- job->AddSetupCallback(
- base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
- base::Unretained(vea_), VAEncSequenceParameterBufferType,
- MakeRefCountedBytes(&seq_param, sizeof(seq_param))));
-
- job->AddSetupCallback(
- base::BindOnce(&VaapiVideoEncodeAccelerator::SubmitBuffer,
- base::Unretained(vea_), VAEncPictureParameterBufferType,
- MakeRefCountedBytes(&pic_param, sizeof(pic_param))));
-
job->AddSetupCallback(base::BindOnce(
&VaapiVideoEncodeAccelerator::SubmitVAEncMiscParamBuffer,
base::Unretained(vea_), VAEncMiscParameterTypeRateControl,
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
index cdb90fd455d..ec0f1ca5860 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.h
@@ -35,7 +35,7 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
~VaapiVideoEncodeAccelerator() override;
// VideoEncodeAccelerator implementation.
- VideoEncodeAccelerator::SupportedProfiles GetSupportedProfiles() override;
+ SupportedProfiles GetSupportedProfiles() override;
bool Initialize(const Config& config, Client* client) override;
void Encode(scoped_refptr<VideoFrame> frame, bool force_keyframe) override;
void UseOutputBitstreamBuffer(BitstreamBuffer buffer) override;
@@ -49,6 +49,7 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
bool IsFlushSupported() override;
private:
+ friend class VaapiVideoEncodeAcceleratorTest;
class H264Accelerator;
class VP8Accelerator;
class VP9Accelerator;
@@ -148,6 +149,15 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// Submits a H264BitstreamBuffer |buffer| to the driver.
void SubmitH264BitstreamBuffer(scoped_refptr<H264BitstreamBuffer> buffer);
+ // Gets the encoded chunk size whose id is |buffer_id| and notifies |encoder_|
+ // the size.
+ void NotifyEncodedChunkSize(VABufferID buffer_id,
+ VASurfaceID sync_surface_id);
+
+ bool IsConfiguredForTesting() const {
+ return !supported_profiles_for_testing_.empty();
+ }
+
// The unchanged values are filled upon the construction. The varied values
// (e.g. ScalingSettings) are filled properly during encoding.
VideoEncoderInfo encoder_info_;
@@ -240,6 +250,9 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// The completion callback of the Flush() function.
FlushCallback flush_callback_;
+ // Supported profiles that are filled if and only if in a unit test.
+ SupportedProfiles supported_profiles_for_testing_;
+
// WeakPtr of this, bound to |child_task_runner_|.
base::WeakPtr<VaapiVideoEncodeAccelerator> child_weak_this_;
// WeakPtr of this, bound to |encoder_task_runner_|.
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
index 01bfbb3a6e0..896a7251dbc 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
@@ -4,22 +4,48 @@
#include "media/gpu/vaapi/vaapi_video_encode_accelerator.h"
+#include <memory>
+#include <numeric>
+#include <vector>
+
+#include "base/run_loop.h"
+#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
#include "media/video/video_encode_accelerator.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+using base::test::RunClosure;
+using ::testing::_;
+using ::testing::Return;
+using ::testing::WithArgs;
+
namespace media {
namespace {
constexpr gfx::Size kDefaultEncodeSize(1280, 720);
constexpr uint32_t kDefaultBitrateBps = 4 * 1000 * 1000;
constexpr uint32_t kDefaultFramerate = 30;
-const VideoEncodeAccelerator::Config kDefaultVEAConfig(PIXEL_FORMAT_I420,
- kDefaultEncodeSize,
- VP8PROFILE_ANY,
- kDefaultBitrateBps,
- kDefaultFramerate);
+constexpr size_t kMaxNumOfRefFrames = 3u;
+const VideoEncodeAccelerator::Config kDefaultVideoEncodeAcceleratorConfig(
+ PIXEL_FORMAT_I420,
+ kDefaultEncodeSize,
+ VP9PROFILE_PROFILE0,
+ kDefaultBitrateBps,
+ kDefaultFramerate);
+
+MATCHER_P2(MatchesAcceleratedVideoEncoderConfig,
+ max_ref_frames,
+ bitrate_control,
+ "") {
+ return arg.max_num_ref_frames == max_ref_frames &&
+ arg.bitrate_control == bitrate_control;
+}
+
+MATCHER_P2(MatchesBitstreamBufferMetadata, payload_size_bytes, key_frame, "") {
+ return arg.payload_size_bytes == payload_size_bytes &&
+ arg.key_frame == key_frame;
+}
class MockVideoEncodeAcceleratorClient : public VideoEncodeAccelerator::Client {
public:
@@ -27,35 +53,239 @@ class MockVideoEncodeAcceleratorClient : public VideoEncodeAccelerator::Client {
virtual ~MockVideoEncodeAcceleratorClient() = default;
MOCK_METHOD3(RequireBitstreamBuffers,
- void(unsigned int, const gfx::Size&, size_t output_buffer_size));
+ void(unsigned int, const gfx::Size&, size_t));
MOCK_METHOD2(BitstreamBufferReady,
void(int32_t, const BitstreamBufferMetadata&));
MOCK_METHOD1(NotifyError, void(VideoEncodeAccelerator::Error));
- MOCK_METHOD1(NotifyEncoderInfoChange, void(const VideoEncoderInfo& info));
+ MOCK_METHOD1(NotifyEncoderInfoChange, void(const VideoEncoderInfo&));
};
-struct VaapiVEAInitializeTestParam {
- uint8_t num_of_temporal_layers = 0;
- uint8_t num_of_spatial_layers = 0;
- bool expected_result;
+class MockVaapiWrapper : public VaapiWrapper {
+ public:
+ MockVaapiWrapper(CodecMode mode) : VaapiWrapper(mode) {}
+ MOCK_METHOD2(GetVAEncMaxNumOfRefFrames, bool(VideoCodecProfile, size_t*));
+ MOCK_METHOD5(CreateContextAndSurfaces,
+ bool(unsigned int,
+ const gfx::Size&,
+ SurfaceUsageHint,
+ size_t,
+ std::vector<VASurfaceID>*));
+ MOCK_METHOD2(CreateVABuffer, bool(size_t, VABufferID*));
+ MOCK_METHOD2(GetEncodedChunkSize, uint64_t(VABufferID, VASurfaceID));
+ MOCK_METHOD5(DownloadFromVABuffer,
+ bool(VABufferID, VASurfaceID, uint8_t*, size_t, size_t*));
+ MOCK_METHOD3(UploadVideoFrameToSurface,
+ bool(const VideoFrame&, VASurfaceID, const gfx::Size&));
+ MOCK_METHOD1(ExecuteAndDestroyPendingBuffers, bool(VASurfaceID));
+ MOCK_METHOD1(DestroyVABuffer, void(VABufferID));
+ MOCK_METHOD0(DestroyContext, void());
+ MOCK_METHOD1(DestroySurfaces, void(std::vector<VASurfaceID> va_surface_ids));
+
+ private:
+ ~MockVaapiWrapper() override = default;
+};
+
+class MockAcceleratedVideoEncoder : public AcceleratedVideoEncoder {
+ public:
+ MOCK_METHOD2(Initialize,
+ bool(const VideoEncodeAccelerator::Config&,
+ const AcceleratedVideoEncoder::Config&));
+ MOCK_CONST_METHOD0(GetCodedSize, gfx::Size());
+ MOCK_CONST_METHOD0(GetBitstreamBufferSize, size_t());
+ MOCK_CONST_METHOD0(GetMaxNumOfRefFrames, size_t());
+ MOCK_METHOD1(PrepareEncodeJob, bool(EncodeJob*));
+ MOCK_METHOD1(BitrateControlUpdate, void(uint64_t));
+ bool UpdateRates(const VideoBitrateAllocation&, uint32_t) override {
+ return false;
+ }
+ ScalingSettings GetScalingSettings() const override {
+ return ScalingSettings();
+ }
};
+} // namespace
+
+struct VaapiVideoEncodeAcceleratorTestParam;
-class VaapiVEAInitializeTest
- : public ::testing::TestWithParam<VaapiVEAInitializeTestParam> {
+class VaapiVideoEncodeAcceleratorTest
+ : public ::testing::TestWithParam<VaapiVideoEncodeAcceleratorTestParam> {
protected:
- VaapiVEAInitializeTest() = default;
- ~VaapiVEAInitializeTest() override = default;
+ VaapiVideoEncodeAcceleratorTest() = default;
+ ~VaapiVideoEncodeAcceleratorTest() override = default;
+
+ void SetUp() override {
+ mock_vaapi_wrapper_ =
+ base::MakeRefCounted<MockVaapiWrapper>(VaapiWrapper::kEncode);
+ encoder_.reset(new VaapiVideoEncodeAccelerator);
+ auto* vaapi_encoder =
+ reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get());
+ vaapi_encoder->vaapi_wrapper_ = mock_vaapi_wrapper_;
+ vaapi_encoder->encoder_ = std::make_unique<MockAcceleratedVideoEncoder>();
+ mock_encoder_ = reinterpret_cast<MockAcceleratedVideoEncoder*>(
+ vaapi_encoder->encoder_.get());
+ }
+
+ void SetDefaultMocksBehavior(const VideoEncodeAccelerator::Config& config) {
+ ASSERT_TRUE(mock_vaapi_wrapper_);
+ ASSERT_TRUE(mock_encoder_);
+
+ ON_CALL(*mock_vaapi_wrapper_, GetVAEncMaxNumOfRefFrames)
+ .WillByDefault(WithArgs<1>([](size_t* max_ref_frames) {
+ *max_ref_frames = kMaxNumOfRefFrames;
+ return true;
+ }));
+
+ ON_CALL(*mock_encoder_, GetBitstreamBufferSize)
+ .WillByDefault(Return(config.input_visible_size.GetArea()));
+ ON_CALL(*mock_encoder_, GetCodedSize())
+ .WillByDefault(Return(config.input_visible_size));
+ ON_CALL(*mock_encoder_, GetMaxNumOfRefFrames())
+ .WillByDefault(Return(kMaxNumOfRefFrames));
+ }
+
+ bool InitializeVideoEncodeAccelerator(
+ const VideoEncodeAccelerator::Config& config) {
+ VideoEncodeAccelerator::SupportedProfile profile(config.output_profile,
+ config.input_visible_size);
+ auto* vaapi_encoder =
+ reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder_.get());
+ vaapi_encoder->supported_profiles_for_testing_.push_back(profile);
+ vaapi_encoder->aligned_va_surface_size_ = config.input_visible_size;
+ if (config.input_visible_size.IsEmpty())
+ return false;
+ return encoder_->Initialize(config, &client_);
+ }
+
+ void InitializeSequenceForVP9(const VideoEncodeAccelerator::Config& config) {
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ ::testing::InSequence s;
+ constexpr auto kBitrateControl =
+ AcceleratedVideoEncoder::BitrateControl::kConstantQuantizationParameter;
+ EXPECT_CALL(*mock_encoder_,
+ Initialize(_, MatchesAcceleratedVideoEncoderConfig(
+ kMaxNumOfRefFrames, kBitrateControl)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_vaapi_wrapper_,
+ CreateContextAndSurfaces(
+ _, kDefaultEncodeSize,
+ VaapiWrapper::SurfaceUsageHint::kVideoEncoder, _, _))
+ .WillOnce(WithArgs<3, 4>(
+ [&surfaces = this->va_surfaces_](
+ size_t num_surfaces, std::vector<VASurfaceID>* va_surface_ids) {
+ surfaces.resize(num_surfaces);
+ std::iota(surfaces.begin(), surfaces.end(), 0);
+ *va_surface_ids = surfaces;
+ return true;
+ }));
+ EXPECT_CALL(client_, RequireBitstreamBuffers(_, kDefaultEncodeSize, _))
+ .WillOnce(WithArgs<2>([this, &quit_closure](size_t output_buffer_size) {
+ this->output_buffer_size_ = output_buffer_size;
+ quit_closure.Run();
+ }));
+ ASSERT_TRUE(InitializeVideoEncodeAccelerator(config));
+ run_loop.Run();
+ }
+
+ void EncodeSequenceForVP9() {
+ base::RunLoop run_loop;
+ base::Closure quit_closure = run_loop.QuitClosure();
+ ::testing::InSequence s;
+
+ constexpr VABufferID kCodedBufferId = 123;
+ EXPECT_CALL(*mock_vaapi_wrapper_, CreateVABuffer(output_buffer_size_, _))
+ .WillOnce(WithArgs<1>([](VABufferID* va_buffer_id) {
+ *va_buffer_id = kCodedBufferId;
+ return true;
+ }));
+
+ ASSERT_FALSE(va_surfaces_.empty());
+ const VASurfaceID kInputSurfaceId = va_surfaces_.back();
+ EXPECT_CALL(*mock_encoder_, PrepareEncodeJob(_))
+ .WillOnce(WithArgs<0>(
+ [encoder = encoder_.get(), kCodedBufferId,
+ kInputSurfaceId](AcceleratedVideoEncoder::EncodeJob* job) {
+ job->AddPostExecuteCallback(base::BindOnce(
+ &VaapiVideoEncodeAccelerator::NotifyEncodedChunkSize,
+ base::Unretained(
+ reinterpret_cast<VaapiVideoEncodeAccelerator*>(encoder)),
+ kCodedBufferId, kInputSurfaceId));
+ return true;
+ }));
+ EXPECT_CALL(
+ *mock_vaapi_wrapper_,
+ UploadVideoFrameToSurface(_, kInputSurfaceId, kDefaultEncodeSize))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_vaapi_wrapper_,
+ ExecuteAndDestroyPendingBuffers(kInputSurfaceId))
+ .WillOnce(Return(true));
+
+ constexpr uint64_t kEncodedChunkSize = 1234;
+ ASSERT_LE(kEncodedChunkSize, output_buffer_size_);
+ EXPECT_CALL(*mock_vaapi_wrapper_,
+ GetEncodedChunkSize(kCodedBufferId, kInputSurfaceId))
+ .WillOnce(Return(kEncodedChunkSize));
+ EXPECT_CALL(*mock_encoder_, BitrateControlUpdate(kEncodedChunkSize))
+ .WillOnce(Return());
+ EXPECT_CALL(*mock_vaapi_wrapper_,
+ DownloadFromVABuffer(kCodedBufferId, kInputSurfaceId, _,
+ output_buffer_size_, _))
+ .WillOnce(WithArgs<4>([](size_t* coded_data_size) {
+ *coded_data_size = kEncodedChunkSize;
+ return true;
+ }));
+ EXPECT_CALL(*mock_vaapi_wrapper_, DestroyVABuffer(kCodedBufferId))
+ .WillOnce(Return());
+
+ constexpr int32_t kBitstreamId = 12;
+ EXPECT_CALL(client_, BitstreamBufferReady(kBitstreamId,
+ MatchesBitstreamBufferMetadata(
+ kEncodedChunkSize, false)))
+ .WillOnce(RunClosure(quit_closure));
+
+ auto region = base::UnsafeSharedMemoryRegion::Create(output_buffer_size_);
+ ASSERT_TRUE(region.IsValid());
+ encoder_->UseOutputBitstreamBuffer(
+ BitstreamBuffer(kBitstreamId, std::move(region), output_buffer_size_));
+
+ auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, kDefaultEncodeSize,
+ gfx::Rect(kDefaultEncodeSize),
+ kDefaultEncodeSize, base::TimeDelta());
+ ASSERT_TRUE(frame);
+ encoder_->Encode(std::move(frame), false /* force_keyframe */);
+ run_loop.Run();
+ }
+
+ size_t output_buffer_size_ = 0;
+ std::vector<VASurfaceID> va_surfaces_;
base::test::TaskEnvironment task_environment_;
+ MockVideoEncodeAcceleratorClient client_;
+ std::unique_ptr<VideoEncodeAccelerator> encoder_;
+ scoped_refptr<MockVaapiWrapper> mock_vaapi_wrapper_;
+ MockAcceleratedVideoEncoder* mock_encoder_ = nullptr;
};
-TEST_P(VaapiVEAInitializeTest, SpatialLayerAndTemporalLayerEncoding) {
- VideoEncodeAccelerator::Config config = kDefaultVEAConfig;
- const uint8_t num_of_temporal_layers = GetParam().num_of_temporal_layers;
+struct VaapiVideoEncodeAcceleratorTestParam {
+ uint8_t num_of_spatial_layers = 0;
+ uint8_t num_of_temporal_layers = 0;
+} kTestCases[]{
+ {1u, 1u}, // Single spatial layer, single temporal layer.
+ {1u, 3u}, // Single spatial layer, multiple temporal layers.
+ {3u, 1u}, // Multiple spatial layers, single temporal layer.
+ {3u, 3u}, // Multiple spatial layers, multiple temporal layers.
+};
+
+TEST_P(VaapiVideoEncodeAcceleratorTest,
+ InitializeVP9WithMultipleSpatialLayers) {
const uint8_t num_of_spatial_layers = GetParam().num_of_spatial_layers;
+ if (num_of_spatial_layers <= 1)
+ GTEST_SKIP() << "Test only meant for multiple spatial layers configuration";
+
+ VideoEncodeAccelerator::Config config = kDefaultVideoEncodeAcceleratorConfig;
+ const uint8_t num_of_temporal_layers = GetParam().num_of_temporal_layers;
constexpr int kDenom[] = {4, 2, 1};
for (uint8_t i = 0; i < num_of_spatial_layers; ++i) {
VideoEncodeAccelerator::Config::SpatialLayer spatial_layer;
- int denom = kDenom[i];
+ const int denom = kDenom[i];
spatial_layer.width = kDefaultEncodeSize.width() / denom;
spatial_layer.height = kDefaultEncodeSize.height() / denom;
spatial_layer.bitrate_bps = kDefaultBitrateBps / denom;
@@ -65,18 +295,29 @@ TEST_P(VaapiVEAInitializeTest, SpatialLayerAndTemporalLayerEncoding) {
config.spatial_layers.push_back(spatial_layer);
}
- VaapiVideoEncodeAccelerator vea;
- MockVideoEncodeAcceleratorClient client;
- EXPECT_EQ(vea.Initialize(config, &client), GetParam().expected_result);
+ EXPECT_FALSE(InitializeVideoEncodeAccelerator(config));
}
-constexpr VaapiVEAInitializeTestParam kTestCases[] = {
- {1u, 3u, false}, // Spatial Layer only.
- {3u, 3u, false}, // Temporal + Spatial Layer.
-};
+TEST_P(VaapiVideoEncodeAcceleratorTest, EncodeVP9WithSingleSpatialLayer) {
+ if (GetParam().num_of_spatial_layers > 1u)
+ GTEST_SKIP() << "Test only meant for single spatial layer";
+
+ VideoEncodeAccelerator::Config config = kDefaultVideoEncodeAcceleratorConfig;
+ VideoEncodeAccelerator::Config::SpatialLayer spatial_layer;
+ spatial_layer.width = kDefaultEncodeSize.width();
+ spatial_layer.height = kDefaultEncodeSize.height();
+ spatial_layer.bitrate_bps = kDefaultBitrateBps;
+ spatial_layer.framerate = kDefaultFramerate;
+ spatial_layer.max_qp = 30;
+ spatial_layer.num_of_temporal_layers = GetParam().num_of_temporal_layers;
+ config.spatial_layers.push_back(spatial_layer);
+ SetDefaultMocksBehavior(config);
-INSTANTIATE_TEST_SUITE_P(SpatialLayerAndTemporalLayerEncoding,
- VaapiVEAInitializeTest,
+ InitializeSequenceForVP9(config);
+ EncodeSequenceForVP9();
+}
+
+INSTANTIATE_TEST_SUITE_P(,
+ VaapiVideoEncodeAcceleratorTest,
::testing::ValuesIn(kTestCases));
-} // namespace
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index f238e6f0851..ad898555fe7 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -10,6 +10,7 @@
#include <va/va.h>
#include <va/va_drm.h>
#include <va/va_drmcommon.h>
+#include <va/va_str.h>
#include <va/va_version.h>
#include <algorithm>
@@ -115,8 +116,10 @@ uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
return VA_FOURCC_YV12;
case gfx::BufferFormat::YUV_420_BIPLANAR:
return VA_FOURCC_NV12;
+ case gfx::BufferFormat::P010:
+ return VA_FOURCC_P010;
default:
- NOTREACHED();
+ NOTREACHED() << gfx::BufferFormatToString(fmt);
return 0;
}
}
@@ -142,6 +145,21 @@ namespace media {
namespace {
+// Returns true if the SoC has a Gen9 GPU. CPU model ID's are referenced from
+// the following file in the kernel source: arch/x86/include/asm/intel-family.h.
+bool IsGen9Gpu() {
+ constexpr int kPentiumAndLaterFamily = 0x06;
+ constexpr int kSkyLakeModelId = 0x5E;
+ constexpr int kSkyLake_LModelId = 0x4E;
+ constexpr int kApolloLakeModelId = 0x5c;
+ static base::NoDestructor<base::CPU> cpuid;
+ static const bool is_gen9_gpu = cpuid->family() == kPentiumAndLaterFamily &&
+ (cpuid->model() == kSkyLakeModelId ||
+ cpuid->model() == kSkyLake_LModelId ||
+ cpuid->model() == kApolloLakeModelId);
+ return is_gen9_gpu;
+}
+
// Returns true if the SoC has a 9.5 GPU. CPU model IDs are referenced from the
// following file in the kernel source: arch/x86/include/asm/intel-family.h.
bool IsGen95Gpu() {
@@ -246,111 +264,18 @@ static const struct {
{H264PROFILE_HIGH, VAProfileH264High},
{VP8PROFILE_ANY, VAProfileVP8Version0_3},
{VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
- // VP9 hw encode/decode on profile 1 is not enabled on chromium-vaapi.
+ // Chrome does not support VP9 Profile 1, see b/153680337.
// {VP9PROFILE_PROFILE1, VAProfileVP9Profile1},
- // TODO(crbug.com/1011454, crbug.com/1011469): Reenable VP9PROFILE_PROFILE2
- // and _PROFILE3 when P010 is completely supported.
- //{VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
- //{VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
+ {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
+ {VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
};
-// Converts the given |va_profile| to the corresponding string.
-// See: http://go/gh/intel/libva/blob/master/va/va.h#L359
-std::string VAProfileToString(VAProfile va_profile) {
- switch (va_profile) {
- case VAProfileNone:
- return "VAProfileNone";
- case VAProfileMPEG2Simple:
- return "VAProfileMPEG2Simple";
- case VAProfileMPEG2Main:
- return "VAProfileMPEG2Main";
- case VAProfileMPEG4Simple:
- return "VAProfileMPEG4Simple";
- case VAProfileMPEG4AdvancedSimple:
- return "VAProfileMPEG4AdvancedSimple";
- case VAProfileMPEG4Main:
- return "VAProfileMPEG4Main";
- case VAProfileH264Baseline:
- return "VAProfileH264Baseline";
- case VAProfileH264Main:
- return "VAProfileH264Main";
- case VAProfileH264High:
- return "VAProfileH264High";
- case VAProfileVC1Simple:
- return "VAProfileVC1Simple";
- case VAProfileVC1Main:
- return "VAProfileVC1Main";
- case VAProfileVC1Advanced:
- return "VAProfileVC1Advanced";
- case VAProfileH263Baseline:
- return "VAProfileH263Baseline";
- case VAProfileJPEGBaseline:
- return "VAProfileJPEGBaseline";
- case VAProfileH264ConstrainedBaseline:
- return "VAProfileH264ConstrainedBaseline";
- case VAProfileVP8Version0_3:
- return "VAProfileVP8Version0_3";
- case VAProfileH264MultiviewHigh:
- return "VAProfileH264MultiviewHigh";
- case VAProfileH264StereoHigh:
- return "VAProfileH264StereoHigh";
- case VAProfileHEVCMain:
- return "VAProfileHEVCMain";
- case VAProfileHEVCMain10:
- return "VAProfileHEVCMain10";
- case VAProfileVP9Profile0:
- return "VAProfileVP9Profile0";
- case VAProfileVP9Profile1:
- return "VAProfileVP9Profile1";
- case VAProfileVP9Profile2:
- return "VAProfileVP9Profile2";
- case VAProfileVP9Profile3:
- return "VAProfileVP9Profile3";
-#if VA_MAJOR_VERSION >= 2 || (VA_MAJOR_VERSION == 1 && VA_MINOR_VERSION >= 2)
- case VAProfileHEVCMain12:
- return "VAProfileHEVCMain12";
- case VAProfileHEVCMain422_10:
- return "VAProfileHEVCMain422_10";
- case VAProfileHEVCMain422_12:
- return "VAProfileHEVCMain422_12";
- case VAProfileHEVCMain444:
- return "VAProfileHEVCMain444";
- case VAProfileHEVCMain444_10:
- return "VAProfileHEVCMain444_10";
- case VAProfileHEVCMain444_12:
- return "VAProfileHEVCMain444_12";
- case VAProfileHEVCSccMain:
- return "VAProfileHEVCSccMain";
- case VAProfileHEVCSccMain10:
- return "VAProfileHEVCSccMain10";
- case VAProfileHEVCSccMain444:
- return "VAProfileHEVCSccMain444";
-#endif
- default:
- NOTREACHED();
- return "";
- }
-}
-
bool IsBlackListedDriver(const std::string& va_vendor_string,
VaapiWrapper::CodecMode mode,
VAProfile va_profile) {
if (!IsModeEncoding(mode))
return false;
- // TODO(crbug.com/828482): Remove once H264 encoder on AMD is enabled by
- // default.
- if (VendorStringToImplementationType(va_vendor_string) ==
- VAImplementation::kMesaGallium &&
- base::Contains(va_vendor_string, "AMD STONEY") &&
- !base::FeatureList::IsEnabled(kVaapiH264AMDEncoder)) {
- constexpr VAProfile kH264Profiles[] = {VAProfileH264Baseline,
- VAProfileH264Main, VAProfileH264High,
- VAProfileH264ConstrainedBaseline};
- if (base::Contains(kH264Profiles, va_profile))
- return true;
- }
-
// TODO(posciak): Remove once VP8 encoding is to be enabled by default.
if (va_profile == VAProfileVP8Version0_3 &&
!base::FeatureList::IsEnabled(kVaapiVP8Encoder)) {
@@ -639,8 +564,8 @@ static bool GetRequiredAttribs(const base::Lock* va_lock,
VAStatus va_res =
vaGetConfigAttributes(va_display, profile, entrypoint, &attrib, 1);
if (va_res != VA_STATUS_SUCCESS) {
- LOG(ERROR) << "GetConfigAttributes failed for va_profile "
- << VAProfileToString(profile);
+ LOG(ERROR) << "vaGetConfigAttributes failed for "
+ << vaProfileStr(profile);
return false;
}
@@ -761,7 +686,7 @@ VASupportedProfiles::VASupportedProfiles()
static_assert(std::extent<decltype(supported_profiles_)>() ==
VaapiWrapper::kCodecModeMax,
- "The array size of supported profile is incorrect.");
+ "|supported_profiles_| size is incorrect.");
if (!display_state->Initialize())
return;
@@ -793,6 +718,18 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal(
VaapiWrapper::CodecMode mode) const {
std::vector<ProfileInfo> supported_profile_infos;
std::vector<VAProfile> va_profiles;
+ // VAProfiles supported by VaapiWrapper.
+ constexpr VAProfile kSupportedVaProfiles[] = {
+ VAProfileH264ConstrainedBaseline,
+ VAProfileH264Main,
+ VAProfileH264High,
+ VAProfileJPEGBaseline,
+ VAProfileVP8Version0_3,
+ VAProfileVP9Profile0,
+ // Chrome does not support VP9 Profile 1, see b/153680337.
+ // VAProfileVP9Profile1,
+ VAProfileVP9Profile2,
+ VAProfileVP9Profile3};
if (!GetSupportedVAProfiles(&va_profiles))
return supported_profile_infos;
@@ -802,6 +739,10 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal(
VADisplayState::Get()->va_vendor_string();
for (const auto& va_profile : va_profiles) {
+ if ((mode != VaapiWrapper::CodecMode::kVideoProcess) &&
+ !base::Contains(kSupportedVaProfiles, va_profile)) {
+ continue;
+ }
const std::vector<VAEntrypoint> supported_entrypoints =
GetEntryPointsForProfile(va_lock_, va_display_, mode, va_profile);
if (supported_entrypoints.empty())
@@ -824,8 +765,8 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal(
if (!FillProfileInfo_Locked(va_profile, entrypoint, required_attribs,
&profile_info)) {
LOG(ERROR) << "FillProfileInfo_Locked failed for va_profile "
- << VAProfileToString(va_profile) << " and entrypoint "
- << entrypoint;
+ << vaProfileStr(va_profile) << " and entrypoint "
+ << vaEntrypointStr(entrypoint);
continue;
}
supported_profile_infos.push_back(profile_info);
@@ -1195,6 +1136,37 @@ bool VASupportedImageFormats::InitSupportedImageFormats_Locked() {
return true;
}
+bool IsLowPowerEncSupported(VAProfile va_profile) {
+ constexpr VAProfile kSupportedLowPowerEncodeProfiles[] = {
+ VAProfileH264ConstrainedBaseline,
+ VAProfileH264Main,
+ VAProfileH264High,
+ VAProfileVP9Profile0,
+ VAProfileVP9Profile1,
+ VAProfileVP9Profile2,
+ VAProfileVP9Profile3};
+ if (!base::Contains(kSupportedLowPowerEncodeProfiles, va_profile))
+ return false;
+
+ if ((IsGen95Gpu() || IsGen9Gpu()) &&
+ !base::FeatureList::IsEnabled(kVaapiLowPowerEncoderGen9x)) {
+ return false;
+ }
+
+ const std::vector<VASupportedProfiles::ProfileInfo>& encode_profile_infos =
+ VASupportedProfiles::Get().GetSupportedProfileInfosForCodecMode(
+ VaapiWrapper::kEncode);
+
+ for (const auto& profile_info : encode_profile_infos) {
+ if (profile_info.va_profile == va_profile &&
+ profile_info.va_entrypoint == VAEntrypointEncSliceLP) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
} // namespace
NativePixmapAndSizeInfo::NativePixmapAndSizeInfo() = default;
@@ -1212,7 +1184,7 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::Create(
VAProfile va_profile,
const base::Closure& report_error_to_uma_cb) {
if (!VASupportedProfiles::Get().IsProfileSupported(mode, va_profile)) {
- DVLOG(1) << "Unsupported va_profile: " << va_profile;
+ DVLOG(1) << "Unsupported va_profile: " << vaProfileStr(va_profile);
return nullptr;
}
@@ -1222,7 +1194,7 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::Create(
return vaapi_wrapper;
}
LOG(ERROR) << "Failed to create VaapiWrapper for va_profile: "
- << VAProfileToString(va_profile);
+ << vaProfileStr(va_profile);
return nullptr;
}
@@ -1494,8 +1466,10 @@ VAEntrypoint VaapiWrapper::GetDefaultVaEntryPoint(CodecMode mode,
case VaapiWrapper::kEncodeConstantQuantizationParameter:
if (profile == VAProfileJPEGBaseline)
return VAEntrypointEncPicture;
- else
- return VAEntrypointEncSlice;
+ DCHECK(IsModeEncoding(mode));
+ if (IsLowPowerEncSupported(profile))
+ return VAEntrypointEncSliceLP;
+ return VAEntrypointEncSlice;
case VaapiWrapper::kVideoProcess:
return VAEntrypointVideoProc;
case VaapiWrapper::kCodecModeMax:
@@ -1515,8 +1489,10 @@ uint32_t VaapiWrapper::BufferFormatToVARTFormat(gfx::BufferFormat fmt) {
case gfx::BufferFormat::YVU_420:
case gfx::BufferFormat::YUV_420_BIPLANAR:
return VA_RT_FORMAT_YUV420;
+ case gfx::BufferFormat::P010:
+ return VA_RT_FORMAT_YUV420_10BPP;
default:
- NOTREACHED();
+ NOTREACHED() << gfx::BufferFormatToString(fmt);
return 0;
}
}
@@ -2028,6 +2004,28 @@ bool VaapiWrapper::CreateVABuffer(size_t size, VABufferID* buffer_id) {
return true;
}
+uint64_t VaapiWrapper::GetEncodedChunkSize(VABufferID buffer_id,
+ VASurfaceID sync_surface_id) {
+ TRACE_EVENT0("media,gpu", "VaapiWrapper::GetEncodedChunkSize");
+ base::AutoLock auto_lock(*va_lock_);
+ TRACE_EVENT0("media,gpu", "VaapiWrapper::GetEncodedChunkSizeLocked");
+ VAStatus va_res = vaSyncSurface(va_display_, sync_surface_id);
+ VA_SUCCESS_OR_RETURN(va_res, "vaSyncSurface", 0u);
+
+ ScopedVABufferMapping mapping(va_lock_, va_display_, buffer_id);
+ if (!mapping.IsValid())
+ return 0u;
+
+ uint64_t coded_data_size = 0;
+ for (auto* buffer_segment =
+ reinterpret_cast<VACodedBufferSegment*>(mapping.data());
+ buffer_segment; buffer_segment = reinterpret_cast<VACodedBufferSegment*>(
+ buffer_segment->next)) {
+ coded_data_size += buffer_segment->size;
+ }
+ return coded_data_size;
+}
+
bool VaapiWrapper::DownloadFromVABuffer(VABufferID buffer_id,
VASurfaceID sync_surface_id,
uint8_t* target_ptr,
@@ -2062,13 +2060,11 @@ bool VaapiWrapper::DownloadFromVABuffer(VABufferID buffer_id,
<< ", the buffer segment size: " << buffer_segment->size;
break;
}
-
memcpy(target_ptr, buffer_segment->buf, buffer_segment->size);
target_ptr += buffer_segment->size;
- *coded_data_size += buffer_segment->size;
target_size -= buffer_segment->size;
-
+ *coded_data_size += buffer_segment->size;
buffer_segment =
reinterpret_cast<VACodedBufferSegment*>(buffer_segment->next);
}
@@ -2113,10 +2109,28 @@ void VaapiWrapper::DestroyVABuffers() {
va_buffers_.clear();
}
+bool VaapiWrapper::IsRotationSupported() {
+ base::AutoLock auto_lock(*va_lock_);
+ VAProcPipelineCaps pipeline_caps;
+ memset(&pipeline_caps, 0, sizeof(pipeline_caps));
+ VAStatus va_res = vaQueryVideoProcPipelineCaps(va_display_, va_context_id_,
+ nullptr, 0, &pipeline_caps);
+ if (va_res != VA_STATUS_SUCCESS) {
+ LOG_VA_ERROR_AND_REPORT(va_res, "vaQueryVideoProcPipelineCaps failed");
+ return false;
+ }
+ if (!pipeline_caps.rotation_flags) {
+ DVLOG(2) << "VA-API driver doesn't support any rotation";
+ return false;
+ }
+ return true;
+}
+
bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src,
const VASurface& va_surface_dest,
base::Optional<gfx::Rect> src_rect,
- base::Optional<gfx::Rect> dest_rect) {
+ base::Optional<gfx::Rect> dest_rect,
+ VideoRotation rotation) {
base::AutoLock auto_lock(*va_lock_);
if (va_buffers_.empty()) {
@@ -2165,6 +2179,21 @@ bool VaapiWrapper::BlitSurface(const VASurface& va_surface_src,
pipeline_param->output_color_standard = VAProcColorStandardNone;
pipeline_param->filter_flags = VA_FILTER_SCALING_DEFAULT;
+ switch (rotation) {
+ case VIDEO_ROTATION_0:
+ pipeline_param->rotation_state = VA_ROTATION_NONE;
+ break;
+ case VIDEO_ROTATION_90:
+ pipeline_param->rotation_state = VA_ROTATION_90;
+ break;
+ case VIDEO_ROTATION_180:
+ pipeline_param->rotation_state = VA_ROTATION_180;
+ break;
+ case VIDEO_ROTATION_270:
+ pipeline_param->rotation_state = VA_ROTATION_270;
+ break;
+ }
+
VA_SUCCESS_OR_RETURN(mapping.Unmap(), "Vpp Buffer unmapping", false);
}
@@ -2241,15 +2270,7 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
}
#endif // DCHECK_IS_ON()
- if (mode != kVideoProcess)
- TryToSetVADisplayAttributeToLocalGPU();
-
- VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode, va_profile);
- if (IsModeEncoding(mode) && IsLowPowerEncSupported(va_profile, mode) &&
- base::FeatureList::IsEnabled(kVaapiLowPowerEncoder)) {
- entrypoint = VAEntrypointEncSliceLP;
- DVLOG(2) << "Enable VA-API Low-Power Encode Entrypoint";
- }
+ const VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode, va_profile);
base::AutoLock auto_lock(*va_lock_);
std::vector<VAConfigAttrib> required_attribs;
@@ -2258,7 +2279,7 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
return false;
}
- VAStatus va_res =
+ const VAStatus va_res =
vaCreateConfig(va_display_, va_profile, entrypoint,
required_attribs.empty() ? nullptr : &required_attribs[0],
required_attribs.size(), &va_config_id_);
@@ -2456,54 +2477,4 @@ bool VaapiWrapper::Execute_Locked(VASurfaceID va_surface_id) {
return true;
}
-void VaapiWrapper::TryToSetVADisplayAttributeToLocalGPU() {
- base::AutoLock auto_lock(*va_lock_);
- VADisplayAttribute item = {VADisplayAttribRenderMode,
- 1, // At least support '_LOCAL_OVERLAY'.
- -1, // The maximum possible support 'ALL'.
- VA_RENDER_MODE_LOCAL_GPU,
- VA_DISPLAY_ATTRIB_SETTABLE};
-
- VAStatus va_res = vaSetDisplayAttributes(va_display_, &item, 1);
- if (va_res != VA_STATUS_SUCCESS)
- DVLOG(2) << "vaSetDisplayAttributes unsupported, ignoring by default.";
-}
-
-// Check the support for low-power encode
-bool VaapiWrapper::IsLowPowerEncSupported(VAProfile va_profile,
- CodecMode mode) const {
- // Enabled only for H264/AVC & VP9 Encoders
- if (va_profile != VAProfileH264ConstrainedBaseline &&
- va_profile != VAProfileH264Main && va_profile != VAProfileH264High &&
- va_profile != VAProfileVP9Profile0 && va_profile != VAProfileVP9Profile1)
- return false;
-
- constexpr VAEntrypoint kLowPowerEncEntryPoint = VAEntrypointEncSliceLP;
- std::vector<VAConfigAttrib> required_attribs;
-
- base::AutoLock auto_lock(*va_lock_);
- GetRequiredAttribs(va_lock_, va_display_, mode, va_profile,
- kLowPowerEncEntryPoint, &required_attribs);
- // Query the driver for required attributes.
- std::vector<VAConfigAttrib> attribs = required_attribs;
- for (size_t i = 0; i < required_attribs.size(); ++i)
- attribs[i].value = 0;
-
- VAStatus va_res =
- vaGetConfigAttributes(va_display_, va_profile, kLowPowerEncEntryPoint,
- &attribs[0], attribs.size());
- VA_SUCCESS_OR_RETURN(va_res, "vaGetConfigAttributes", false);
-
- for (size_t i = 0; i < required_attribs.size(); ++i) {
- if (attribs[i].type != required_attribs[i].type ||
- (attribs[i].value & required_attribs[i].value) !=
- required_attribs[i].value) {
- DVLOG(1) << "Unsupported value " << required_attribs[i].value
- << " for attribute type " << required_attribs[i].type;
- return false;
- }
- }
- return true;
-}
-
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index 7f087039c58..c4d005ba456 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -322,9 +322,9 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// Useful when a pending job is to be cancelled (on reset or error).
void DestroyPendingBuffers();
- // Execute job in hardware on target |va_surface_id| and destroy pending
- // buffers. Return false if Execute() fails.
- bool ExecuteAndDestroyPendingBuffers(VASurfaceID va_surface_id);
+ // Executes job in hardware on target |va_surface_id| and destroys pending
+ // buffers. Returns false if Execute() fails.
+ virtual bool ExecuteAndDestroyPendingBuffers(VASurfaceID va_surface_id);
#if defined(USE_X11)
// Put data from |va_surface_id| into |x_pixmap| of size
@@ -343,28 +343,37 @@ class MEDIA_GPU_EXPORT VaapiWrapper
VAImageFormat* format,
const gfx::Size& size);
- // Upload contents of |frame| into |va_surface_id| for encode.
- bool UploadVideoFrameToSurface(const VideoFrame& frame,
- VASurfaceID va_surface_id,
- const gfx::Size& va_surface_size);
+ // Uploads contents of |frame| into |va_surface_id| for encode.
+ virtual bool UploadVideoFrameToSurface(const VideoFrame& frame,
+ VASurfaceID va_surface_id,
+ const gfx::Size& va_surface_size);
- // Create a buffer of |size| bytes to be used as encode output.
- bool CreateVABuffer(size_t size, VABufferID* buffer_id);
+ // Creates a buffer of |size| bytes to be used as encode output.
+ virtual bool CreateVABuffer(size_t size, VABufferID* buffer_id);
- // Download the contents of the buffer with given |buffer_id| into a buffer of
- // size |target_size|, pointed to by |target_ptr|. The number of bytes
+ // Gets the encoded frame linear size of the buffer with given |buffer_id|.
+ // |sync_surface_id| will be used as a sync point, i.e. it will have to become
+ // idle before starting the acquirement. |sync_surface_id| should be the
+ // source surface passed to the encode job. Returns 0 if it fails for any
+ // reason.
+ virtual uint64_t GetEncodedChunkSize(VABufferID buffer_id,
+ VASurfaceID sync_surface_id);
+
+ // Downloads the contents of the buffer with given |buffer_id| into a buffer
+ // of size |target_size|, pointed to by |target_ptr|. The number of bytes
// downloaded will be returned in |coded_data_size|. |sync_surface_id| will
// be used as a sync point, i.e. it will have to become idle before starting
// the download. |sync_surface_id| should be the source surface passed
- // to the encode job.
- bool DownloadFromVABuffer(VABufferID buffer_id,
- VASurfaceID sync_surface_id,
- uint8_t* target_ptr,
- size_t target_size,
- size_t* coded_data_size);
+ // to the encode job. Returns false if it fails for any reason. For example,
+ // the linear size of the resulted encoded frame is larger than |target_size|.
+ virtual bool DownloadFromVABuffer(VABufferID buffer_id,
+ VASurfaceID sync_surface_id,
+ uint8_t* target_ptr,
+ size_t target_size,
+ size_t* coded_data_size);
// Deletes the VA buffer identified by |buffer_id|.
- void DestroyVABuffer(VABufferID buffer_id);
+ virtual void DestroyVABuffer(VABufferID buffer_id);
// Destroy all previously-allocated (and not yet destroyed) buffers.
void DestroyVABuffers();
@@ -374,23 +383,27 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// For H.264 encoding, the value represents the maximum number of reference
// frames for both the reference picture list 0 (bottom 16 bits) and the
// reference picture list 1 (top 16 bits).
- bool GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile,
- size_t* max_ref_frames);
+ virtual bool GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile,
+ size_t* max_ref_frames);
+
+ // Checks if the driver supports frame rotation.
+ bool IsRotationSupported();
// Blits a VASurface |va_surface_src| into another VASurface
- // |va_surface_dest| applying pixel format conversion, cropping and scaling
- // if needed. |src_rect| and |dest_rect| are optional. They can be used to
- // specify the area used in the blit.
+ // |va_surface_dest| applying pixel format conversion, rotation, cropping
+ // and scaling if needed. |src_rect| and |dest_rect| are optional. They can
+ // be used to specify the area used in the blit.
bool BlitSurface(const VASurface& va_surface_src,
const VASurface& va_surface_dest,
base::Optional<gfx::Rect> src_rect = base::nullopt,
- base::Optional<gfx::Rect> dest_rect = base::nullopt);
+ base::Optional<gfx::Rect> dest_rect = base::nullopt,
+ VideoRotation rotation = VIDEO_ROTATION_0);
// Initialize static data before sandbox is enabled.
static void PreSandboxInitialization();
// vaDestroySurfaces() a vector or a single VASurfaceID.
- void DestroySurfaces(std::vector<VASurfaceID> va_surfaces);
+ virtual void DestroySurfaces(std::vector<VASurfaceID> va_surfaces);
virtual void DestroySurface(VASurfaceID va_surface_id);
protected:
@@ -425,12 +438,6 @@ class MEDIA_GPU_EXPORT VaapiWrapper
void DestroyPendingBuffers_Locked() EXCLUSIVE_LOCKS_REQUIRED(va_lock_);
- // Attempt to set render mode to "render to texture.". Failure is non-fatal.
- void TryToSetVADisplayAttributeToLocalGPU();
-
- // Check low-power encode support for |profile| and |mode|.
- bool IsLowPowerEncSupported(VAProfile va_profile, CodecMode mode) const;
-
const CodecMode mode_;
// Pointer to VADisplayState's member |va_lock_|. Guaranteed to be valid for
diff --git a/chromium/media/gpu/vaapi/vp9_encoder.cc b/chromium/media/gpu/vaapi/vp9_encoder.cc
index 140ac37af4c..0c125f02b36 100644
--- a/chromium/media/gpu/vaapi/vp9_encoder.cc
+++ b/chromium/media/gpu/vaapi/vp9_encoder.cc
@@ -4,8 +4,12 @@
#include "media/gpu/vaapi/vp9_encoder.h"
+#include <algorithm>
+
#include "base/bits.h"
#include "media/gpu/macros.h"
+#include "media/gpu/vaapi/vp9_rate_control.h"
+#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h"
namespace media {
@@ -19,6 +23,9 @@ constexpr int kCPBWindowSizeMs = 500;
// Quantization parameter. They are vp9 ac/dc indices and their ranges are
// 0-255. Based on WebRTC's defaults.
constexpr int kMinQP = 4;
+// TODO(crbug.com/1060775): Relax this max quantization parameter upper bound
+// so that our encoder and bitrate controller can select a higher value in the
+// case a requested bitrate is small.
constexpr int kMaxQP = 112;
// This stands for 31 as a real ac value (see rfc 8.6.1 table
// ac_qlookup[3][256]). Note: This needs to be revisited once we have 10&12 bit
@@ -29,6 +36,84 @@ constexpr int kDefaultQP = 24;
// we set a constant value (== 10) which is what other VA-API
// implementations like libyami and gstreamer-vaapi are using.
constexpr uint8_t kDefaultLfLevel = 10;
+
+// Convert Qindex, whose range is 0-255, to the quantizer parameter used in
+// libvpx vp9 rate control, whose range is 0-63.
+// Cited from //third_party/libvpx/source/libvpx/vp9/encoder/vp9_quantize.cc.
+int QindexToQuantizer(int q_index) {
+ constexpr int kQuantizerToQindex[] = {
+ 0, 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48,
+ 52, 56, 60, 64, 68, 72, 76, 80, 84, 88, 92, 96, 100,
+ 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144, 148, 152,
+ 156, 160, 164, 168, 172, 176, 180, 184, 188, 192, 196, 200, 204,
+ 208, 212, 216, 220, 224, 228, 232, 236, 240, 244, 249, 255,
+ };
+
+ for (size_t q = 0; q < base::size(kQuantizerToQindex); ++q) {
+ if (kQuantizerToQindex[q] >= q_index)
+ return q;
+ }
+ return base::size(kQuantizerToQindex) - 1;
+}
+
+// The return value is expressed as a percentage of the average. For example,
+// to allocate no more than 4.5 frames worth of bitrate to a keyframe, the
+// return value is 450.
+uint32_t MaxSizeOfKeyframeAsPercentage(uint32_t optimal_buffer_size,
+ uint32_t max_framerate) {
+ // Set max to the optimal buffer level (normalized by target BR),
+ // and scaled by a scale_par.
+ // Max target size = scale_par * optimal_buffer_size * targetBR[Kbps].
+ // This value is presented in percentage of perFrameBw:
+ // perFrameBw = targetBR[Kbps] * 1000 / framerate.
+ // The target in % is as follows:
+ const double target_size_byte_per_frame = optimal_buffer_size * 0.5;
+ const uint32_t target_size_kbyte =
+ target_size_byte_per_frame * max_framerate / 1000;
+ const uint32_t target_size_kbyte_as_percent = target_size_kbyte * 100;
+
+ // Don't go below 3 times the per frame bandwidth.
+ constexpr uint32_t kMinIntraSizePercentage = 300u;
+ return std::max(kMinIntraSizePercentage, target_size_kbyte_as_percent);
+}
+
+libvpx::VP9RateControlRtcConfig CreateRCConfig(
+ const gfx::Size& encode_size,
+ const VP9Encoder::EncodeParams& encode_params) {
+ libvpx::VP9RateControlRtcConfig rc_cfg{};
+ rc_cfg.width = encode_size.width();
+ rc_cfg.height = encode_size.height();
+ rc_cfg.max_quantizer =
+ QindexToQuantizer(encode_params.scaling_settings.max_qp);
+ rc_cfg.min_quantizer =
+ QindexToQuantizer(encode_params.scaling_settings.min_qp);
+ // libvpx::VP9RateControlRtcConfig is kbps.
+ rc_cfg.target_bandwidth =
+ encode_params.bitrate_allocation.GetSumBps() / 1000.0;
+ // These default values come from
+ // //third_party/webrtc/modules/video_coding/codecs/vp9/vp9_impl.cc.
+ rc_cfg.buf_initial_sz = 500;
+ rc_cfg.buf_optimal_sz = 600;
+ rc_cfg.buf_sz = 1000;
+ rc_cfg.undershoot_pct = 50;
+ rc_cfg.overshoot_pct = 50;
+ rc_cfg.max_intra_bitrate_pct = MaxSizeOfKeyframeAsPercentage(
+ rc_cfg.buf_optimal_sz, encode_params.framerate);
+ rc_cfg.framerate = encode_params.framerate;
+
+ // Spatial layer variables.
+ rc_cfg.ss_number_layers = 1;
+ rc_cfg.max_quantizers[0] = rc_cfg.max_quantizer;
+ rc_cfg.min_quantizers[0] = rc_cfg.min_quantizer;
+ // TODO(crbug.com/1030199): Fill multiple temporal layers variables.
+ // Temporal layer variables.
+ rc_cfg.ts_number_layers = 1;
+ rc_cfg.scaling_factor_num[0] = 1;
+ rc_cfg.scaling_factor_den[0] = 1;
+ rc_cfg.layer_target_bitrate[0] = rc_cfg.target_bandwidth;
+ rc_cfg.ts_rate_decimator[0] = 1;
+ return rc_cfg;
+}
} // namespace
VP9Encoder::EncodeParams::EncodeParams()
@@ -40,6 +125,11 @@ VP9Encoder::EncodeParams::EncodeParams()
scaling_settings(kMinQP, kMaxQP),
error_resilient_mode(false) {}
+void VP9Encoder::set_rate_ctrl_for_testing(
+ std::unique_ptr<VP9RateControl> rate_ctrl) {
+ rate_ctrl_ = std::move(rate_ctrl);
+}
+
void VP9Encoder::Reset() {
current_params_ = EncodeParams();
reference_frames_.Clear();
@@ -66,20 +156,27 @@ bool VP9Encoder::Initialize(const VideoEncodeAccelerator::Config& config,
DVLOGF(1) << "Input visible size could not be empty";
return false;
}
- // 4:2:0 format has to be 2-aligned.
- if ((config.input_visible_size.width() % 2 != 0) ||
- (config.input_visible_size.height() % 2 != 0)) {
- DVLOGF(1) << "The pixel sizes are not even: "
- << config.input_visible_size.ToString();
- return false;
- }
+ accelerator_->set_bitrate_control(ave_config.bitrate_control);
visible_size_ = config.input_visible_size;
coded_size_ = gfx::Size(base::bits::Align(visible_size_.width(), 16),
base::bits::Align(visible_size_.height(), 16));
-
Reset();
+ if (ave_config.bitrate_control ==
+ BitrateControl::kConstantQuantizationParameter) {
+ // |rate_ctrl_| might be injected for tests.
+ if (!rate_ctrl_) {
+ rate_ctrl_ = VP9RateControl::Create(
+ CreateRCConfig(visible_size_, current_params_));
+ }
+ if (!rate_ctrl_)
+ return false;
+ } else {
+ DCHECK(!rate_ctrl_) << "|rate_ctrl_| should only be configured when in "
+ "kConstantQuantizationParameter";
+ }
+
VideoBitrateAllocation initial_bitrate_allocation;
initial_bitrate_allocation.SetBitrate(0, 0, config.initial_bitrate);
return UpdateRates(initial_bitrate_allocation,
@@ -121,13 +218,14 @@ bool VP9Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
scoped_refptr<VP9Picture> picture = accelerator_->GetPicture(encode_job);
DCHECK(picture);
- UpdateFrameHeader(encode_job->IsKeyframeRequested());
+ const bool keyframe = encode_job->IsKeyframeRequested();
+ UpdateFrameHeader(keyframe);
*picture->frame_hdr = current_frame_hdr_;
// Use last, golden and altref for references.
- constexpr std::array<bool, kVp9NumRefsPerFrame> ref_frames_used = {true, true,
- true};
+ const std::array<bool, kVp9NumRefsPerFrame> ref_frames_used = {
+ !keyframe, !keyframe, !keyframe};
if (!accelerator_->SubmitFrameParameters(encode_job, current_params_, picture,
reference_frames_,
ref_frames_used)) {
@@ -139,6 +237,18 @@ bool VP9Encoder::PrepareEncodeJob(EncodeJob* encode_job) {
return true;
}
+void VP9Encoder::BitrateControlUpdate(uint64_t encoded_chunk_size_bytes) {
+ if (accelerator_->bitrate_control() !=
+ BitrateControl::kConstantQuantizationParameter ||
+ !rate_ctrl_) {
+ DLOG(ERROR) << __func__ << "() is called when no bitrate controller exists";
+ return;
+ }
+
+ DVLOGF(4) << "|encoded_chunk_size_bytes|=" << encoded_chunk_size_bytes;
+ rate_ctrl_->PostEncodeUpdate(encoded_chunk_size_bytes);
+}
+
bool VP9Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -160,6 +270,10 @@ bool VP9Encoder::UpdateRates(const VideoBitrateAllocation& bitrate_allocation,
current_params_.bitrate_allocation.GetSumBps() *
current_params_.cpb_window_size_ms / 1000;
+ if (!rate_ctrl_)
+ return true;
+
+ rate_ctrl_->UpdateRateControl(CreateRCConfig(visible_size_, current_params_));
return true;
}
@@ -181,8 +295,6 @@ void VP9Encoder::UpdateFrameHeader(bool keyframe) {
current_frame_hdr_.refresh_frame_flags = 0xff;
ref_frame_index_ = 0;
} else {
- // TODO(crbug.com/811912): Add temporal layer support when there is a driver
- // support. Use the last three frames for reference.
current_frame_hdr_.frame_type = Vp9FrameHeader::INTERFRAME;
current_frame_hdr_.ref_frame_idx[0] = ref_frame_index_;
current_frame_hdr_.ref_frame_idx[1] =
@@ -192,6 +304,19 @@ void VP9Encoder::UpdateFrameHeader(bool keyframe) {
ref_frame_index_ = (ref_frame_index_ + 1) % kVp9NumRefFrames;
current_frame_hdr_.refresh_frame_flags = 1 << ref_frame_index_;
}
+
+ if (!rate_ctrl_)
+ return;
+
+ libvpx::VP9FrameParamsQpRTC frame_params{};
+ frame_params.frame_type =
+ keyframe ? FRAME_TYPE::KEY_FRAME : FRAME_TYPE::INTER_FRAME;
+ rate_ctrl_->ComputeQP(frame_params);
+ // TODO(crbug.com/1030199): Fill temporal layer id.
+ current_frame_hdr_.quant_params.base_q_idx = rate_ctrl_->GetQP();
+ current_frame_hdr_.loop_filter.level = rate_ctrl_->GetLoopfilterLevel();
+ DVLOGF(4) << "|qp|=" << rate_ctrl_->GetQP()
+ << ", |filter_level|=" << rate_ctrl_->GetLoopfilterLevel();
}
void VP9Encoder::UpdateReferenceFrames(scoped_refptr<VP9Picture> picture) {
diff --git a/chromium/media/gpu/vaapi/vp9_encoder.h b/chromium/media/gpu/vaapi/vp9_encoder.h
index 2f3eda4b440..9c0ad1cb9f3 100644
--- a/chromium/media/gpu/vaapi/vp9_encoder.h
+++ b/chromium/media/gpu/vaapi/vp9_encoder.h
@@ -19,6 +19,7 @@
#include "media/gpu/vp9_reference_frame_vector.h"
namespace media {
+class VP9RateControl;
class VP9Encoder : public AcceleratedVideoEncoder {
public:
@@ -71,6 +72,12 @@ class VP9Encoder : public AcceleratedVideoEncoder {
const Vp9ReferenceFrameVector& ref_frames,
const std::array<bool, kVp9NumRefsPerFrame>& ref_frames_used) = 0;
+ void set_bitrate_control(BitrateControl bc) { bitrate_control_ = bc; }
+ BitrateControl bitrate_control() { return bitrate_control_; }
+
+ protected:
+ BitrateControl bitrate_control_ = BitrateControl::kConstantBitrate;
+
DISALLOW_COPY_AND_ASSIGN(Accelerator);
};
@@ -86,8 +93,13 @@ class VP9Encoder : public AcceleratedVideoEncoder {
size_t GetMaxNumOfRefFrames() const override;
ScalingSettings GetScalingSettings() const override;
bool PrepareEncodeJob(EncodeJob* encode_job) override;
+ void BitrateControlUpdate(uint64_t encoded_chunk_size_bytes) override;
private:
+ friend class VP9EncoderTest;
+
+ void set_rate_ctrl_for_testing(std::unique_ptr<VP9RateControl> rate_ctrl);
+
void InitializeFrameHeader();
void UpdateFrameHeader(bool keyframe);
void UpdateReferenceFrames(scoped_refptr<VP9Picture> picture);
@@ -105,6 +117,7 @@ class VP9Encoder : public AcceleratedVideoEncoder {
Vp9FrameHeader current_frame_hdr_;
Vp9ReferenceFrameVector reference_frames_;
+ std::unique_ptr<VP9RateControl> rate_ctrl_;
const std::unique_ptr<Accelerator> accelerator_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc
new file mode 100644
index 00000000000..fa0f8b53d3c
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc
@@ -0,0 +1,381 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/vp9_encoder.h"
+
+#include <memory>
+#include <numeric>
+
+#include "base/bind_helpers.h"
+#include "base/callback.h"
+#include "base/logging.h"
+#include "base/optional.h"
+#include "media/filters/vp9_parser.h"
+#include "media/gpu/vaapi/vp9_rate_control.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/libvpx/source/libvpx/vp9/common/vp9_blockd.h"
+#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h"
+
+using ::testing::_;
+using ::testing::InSequence;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace media {
+namespace {
+
+constexpr size_t kDefaultMaxNumRefFrames = kVp9NumRefsPerFrame;
+
+AcceleratedVideoEncoder::Config kDefaultAcceleratedVideoEncoderConfig{
+ kDefaultMaxNumRefFrames,
+ AcceleratedVideoEncoder::BitrateControl::kConstantBitrate};
+
+VideoEncodeAccelerator::Config kDefaultVideoEncodeAcceleratorConfig(
+ PIXEL_FORMAT_I420,
+ gfx::Size(1280, 720),
+ VP9PROFILE_PROFILE0,
+ 14000000 /* = maximum bitrate in bits per second for level 3.1 */,
+ VideoEncodeAccelerator::kDefaultFramerate,
+ base::nullopt /* gop_length */,
+ base::nullopt /* h264 output level*/,
+ VideoEncodeAccelerator::Config::StorageType::kShmem);
+
+const std::vector<bool> kRefFramesUsedForKeyFrame = {false, false, false};
+const std::vector<bool> kRefFramesUsedForInterFrame = {true, true, true};
+
+MATCHER_P4(MatchRtcConfigWithRates,
+ size,
+ bitrate_allocation,
+ framerate,
+ num_temporal_layers,
+ "") {
+ if (arg.target_bandwidth !=
+ static_cast<int64_t>(bitrate_allocation.GetSumBps() / 1000.0)) {
+ return false;
+ }
+
+ if (arg.framerate != static_cast<double>(framerate))
+ return false;
+
+ for (size_t i = 0; i < num_temporal_layers; i++) {
+ if (arg.layer_target_bitrate[i] !=
+ static_cast<int>(bitrate_allocation.GetBitrateBps(0, i) / 1000.0)) {
+ return false;
+ }
+ if (arg.ts_rate_decimator[i] != (1 << i))
+ return false;
+ }
+
+ return arg.width == size.width() && arg.height == size.height() &&
+ base::checked_cast<size_t>(arg.ts_number_layers) ==
+ num_temporal_layers &&
+ arg.ss_number_layers == 1 && arg.scaling_factor_num[0] == 1 &&
+ arg.scaling_factor_den[0] == 1;
+}
+
+MATCHER_P2(MatchFrameParam, frame_type, temporal_idx, "") {
+ return arg.frame_type == frame_type &&
+ (!temporal_idx || arg.temporal_layer_id == *temporal_idx);
+}
+
+class MockVP9Accelerator : public VP9Encoder::Accelerator {
+ public:
+ MockVP9Accelerator() = default;
+ ~MockVP9Accelerator() override = default;
+ MOCK_METHOD1(GetPicture,
+ scoped_refptr<VP9Picture>(AcceleratedVideoEncoder::EncodeJob*));
+
+ MOCK_METHOD5(SubmitFrameParameters,
+ bool(AcceleratedVideoEncoder::EncodeJob*,
+ const VP9Encoder::EncodeParams&,
+ scoped_refptr<VP9Picture>,
+ const Vp9ReferenceFrameVector&,
+ const std::array<bool, kVp9NumRefsPerFrame>&));
+};
+
+class MockVP9RateControl : public VP9RateControl {
+ public:
+ MockVP9RateControl() = default;
+ ~MockVP9RateControl() override = default;
+
+ MOCK_METHOD1(UpdateRateControl, void(const libvpx::VP9RateControlRtcConfig&));
+ MOCK_CONST_METHOD0(GetQP, int());
+ MOCK_CONST_METHOD0(GetLoopfilterLevel, int());
+ MOCK_METHOD1(ComputeQP, void(const libvpx::VP9FrameParamsQpRTC&));
+ MOCK_METHOD1(PostEncodeUpdate, void(uint64_t));
+};
+} // namespace
+
+struct VP9EncoderTestParam;
+
+class VP9EncoderTest : public ::testing::TestWithParam<VP9EncoderTestParam> {
+ public:
+ using BitrateControl = AcceleratedVideoEncoder::BitrateControl;
+
+ VP9EncoderTest() = default;
+ ~VP9EncoderTest() override = default;
+
+ void SetUp() override;
+
+ protected:
+ using FrameType = Vp9FrameHeader::FrameType;
+
+ void InitializeVP9Encoder(BitrateControl bitrate_control);
+ void EncodeSequence(FrameType frame_type);
+ void EncodeConstantQuantizationParameterSequence(
+ FrameType frame_type,
+ const std::vector<bool>& expected_ref_frames_used,
+ base::Optional<uint8_t> expected_temporal_idx = base::nullopt);
+ void UpdateRatesTest(BitrateControl bitrate_control,
+ size_t num_temporal_layers);
+
+ private:
+ std::unique_ptr<AcceleratedVideoEncoder::EncodeJob> CreateEncodeJob(
+ bool keyframe);
+ void UpdateRatesSequence(const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate,
+ BitrateControl bitrate_control);
+
+ std::unique_ptr<VP9Encoder> encoder_;
+ MockVP9Accelerator* mock_accelerator_ = nullptr;
+ MockVP9RateControl* mock_rate_ctrl_ = nullptr;
+};
+
+void VP9EncoderTest::SetUp() {
+ auto mock_accelerator = std::make_unique<MockVP9Accelerator>();
+ mock_accelerator_ = mock_accelerator.get();
+ auto rate_ctrl = std::make_unique<MockVP9RateControl>();
+ mock_rate_ctrl_ = rate_ctrl.get();
+
+ encoder_ = std::make_unique<VP9Encoder>(std::move(mock_accelerator));
+ encoder_->set_rate_ctrl_for_testing(std::move(rate_ctrl));
+}
+
+std::unique_ptr<AcceleratedVideoEncoder::EncodeJob>
+VP9EncoderTest::CreateEncodeJob(bool keyframe) {
+ auto input_frame = VideoFrame::CreateFrame(
+ kDefaultVideoEncodeAcceleratorConfig.input_format,
+ kDefaultVideoEncodeAcceleratorConfig.input_visible_size,
+ gfx::Rect(kDefaultVideoEncodeAcceleratorConfig.input_visible_size),
+ kDefaultVideoEncodeAcceleratorConfig.input_visible_size,
+ base::TimeDelta());
+ LOG_ASSERT(input_frame) << " Failed to create VideoFrame";
+ return std::make_unique<AcceleratedVideoEncoder::EncodeJob>(
+ input_frame, keyframe, base::DoNothing());
+}
+
+void VP9EncoderTest::InitializeVP9Encoder(BitrateControl bitrate_control) {
+ auto ave_config = kDefaultAcceleratedVideoEncoderConfig;
+ ave_config.bitrate_control = bitrate_control;
+ if (bitrate_control == BitrateControl::kConstantQuantizationParameter) {
+ constexpr size_t kNumTemporalLayers = 1u;
+ VideoBitrateAllocation initial_bitrate_allocation;
+ initial_bitrate_allocation.SetBitrate(
+ 0, 0, kDefaultVideoEncodeAcceleratorConfig.initial_bitrate);
+
+ EXPECT_CALL(
+ *mock_rate_ctrl_,
+ UpdateRateControl(MatchRtcConfigWithRates(
+ kDefaultVideoEncodeAcceleratorConfig.input_visible_size,
+ initial_bitrate_allocation,
+ VideoEncodeAccelerator::kDefaultFramerate, kNumTemporalLayers)))
+ .Times(1)
+ .WillOnce(Return());
+ }
+ EXPECT_TRUE(
+ encoder_->Initialize(kDefaultVideoEncodeAcceleratorConfig, ave_config));
+}
+
+void VP9EncoderTest::EncodeSequence(FrameType frame_type) {
+ InSequence seq;
+ const bool keyframe = frame_type == FrameType::KEYFRAME;
+ auto encode_job = CreateEncodeJob(keyframe);
+ scoped_refptr<VP9Picture> picture(new VP9Picture);
+ EXPECT_CALL(*mock_accelerator_, GetPicture(encode_job.get()))
+ .WillOnce(Invoke(
+ [picture](AcceleratedVideoEncoder::EncodeJob*) { return picture; }));
+ const auto& expected_ref_frames_used =
+ keyframe ? kRefFramesUsedForKeyFrame : kRefFramesUsedForInterFrame;
+ EXPECT_CALL(*mock_accelerator_,
+ SubmitFrameParameters(
+ encode_job.get(), _, _, _,
+ ::testing::ElementsAreArray(expected_ref_frames_used)))
+ .WillOnce(Return(true));
+ EXPECT_TRUE(encoder_->PrepareEncodeJob(encode_job.get()));
+ // TODO(hiroh): Test for encoder_->reference_frames_.
+}
+
+void VP9EncoderTest::EncodeConstantQuantizationParameterSequence(
+ FrameType frame_type,
+ const std::vector<bool>& expected_ref_frames_used,
+ base::Optional<uint8_t> expected_temporal_idx) {
+ const bool keyframe = frame_type == FrameType::KEYFRAME;
+ InSequence seq;
+ auto encode_job = CreateEncodeJob(keyframe);
+ scoped_refptr<VP9Picture> picture(new VP9Picture);
+ EXPECT_CALL(*mock_accelerator_, GetPicture(encode_job.get()))
+ .WillOnce(Invoke(
+ [picture](AcceleratedVideoEncoder::EncodeJob*) { return picture; }));
+
+ FRAME_TYPE libvpx_frame_type =
+ keyframe ? FRAME_TYPE::KEY_FRAME : FRAME_TYPE::INTER_FRAME;
+ EXPECT_CALL(*mock_rate_ctrl_, ComputeQP(MatchFrameParam(
+ libvpx_frame_type, expected_temporal_idx)))
+ .WillOnce(Return());
+ constexpr int kDefaultQP = 34;
+ constexpr int kDefaultLoopFilterLevel = 8;
+ EXPECT_CALL(*mock_rate_ctrl_, GetQP()).WillOnce(Return(kDefaultQP));
+ EXPECT_CALL(*mock_rate_ctrl_, GetLoopfilterLevel())
+ .WillOnce(Return(kDefaultLoopFilterLevel));
+ if (!expected_ref_frames_used.empty()) {
+ EXPECT_CALL(*mock_accelerator_,
+ SubmitFrameParameters(
+ encode_job.get(), _, _, _,
+ ::testing::ElementsAreArray(expected_ref_frames_used)))
+ .WillOnce(Return(true));
+ } else {
+ EXPECT_CALL(*mock_accelerator_,
+ SubmitFrameParameters(encode_job.get(), _, _, _, _))
+ .WillOnce(Return(true));
+ }
+ EXPECT_TRUE(encoder_->PrepareEncodeJob(encode_job.get()));
+
+ // TODO(hiroh): Test for encoder_->reference_frames_.
+
+ constexpr size_t kDefaultEncodedFrameSize = 123456;
+ // For BitrateControlUpdate sequence.
+ EXPECT_CALL(*mock_rate_ctrl_, PostEncodeUpdate(kDefaultEncodedFrameSize))
+ .WillOnce(Return());
+ encoder_->BitrateControlUpdate(kDefaultEncodedFrameSize);
+}
+
+void VP9EncoderTest::UpdateRatesSequence(
+ const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate,
+ BitrateControl bitrate_control) {
+ ASSERT_TRUE(encoder_->current_params_.bitrate_allocation !=
+ bitrate_allocation ||
+ encoder_->current_params_.framerate != framerate);
+
+ if (bitrate_control == BitrateControl::kConstantQuantizationParameter) {
+ constexpr size_t kNumTemporalLayers = 1u;
+ EXPECT_CALL(*mock_rate_ctrl_,
+ UpdateRateControl(MatchRtcConfigWithRates(
+ encoder_->visible_size_, bitrate_allocation, framerate,
+ kNumTemporalLayers)))
+ .Times(1)
+ .WillOnce(Return());
+ }
+
+ EXPECT_TRUE(encoder_->UpdateRates(bitrate_allocation, framerate));
+ EXPECT_EQ(encoder_->current_params_.bitrate_allocation, bitrate_allocation);
+ EXPECT_EQ(encoder_->current_params_.framerate, framerate);
+}
+
+void VP9EncoderTest::UpdateRatesTest(BitrateControl bitrate_control,
+ size_t num_temporal_layers) {
+ ASSERT_LE(num_temporal_layers, 3u);
+ auto create_allocation =
+ [num_temporal_layers](uint32_t bitrate) -> VideoBitrateAllocation {
+ VideoBitrateAllocation bitrate_allocation;
+ constexpr int kTemporalLayerBitrateScaleFactor[] = {1, 2, 4};
+ const int kScaleFactors =
+ std::accumulate(std::cbegin(kTemporalLayerBitrateScaleFactor),
+ std::cend(kTemporalLayerBitrateScaleFactor), 0);
+ for (size_t ti = 0; ti < num_temporal_layers; ti++) {
+ bitrate_allocation.SetBitrate(
+ 0, ti,
+ bitrate * kTemporalLayerBitrateScaleFactor[ti] / kScaleFactors);
+ }
+ return bitrate_allocation;
+ };
+
+ const auto update_rates_and_encode =
+ [this, bitrate_control](FrameType frame_type,
+ const VideoBitrateAllocation& bitrate_allocation,
+ uint32_t framerate) {
+ UpdateRatesSequence(bitrate_allocation, framerate, bitrate_control);
+ if (bitrate_control == BitrateControl::kConstantQuantizationParameter) {
+ EncodeConstantQuantizationParameterSequence(frame_type, {},
+ base::nullopt);
+ } else {
+ EncodeSequence(frame_type);
+ }
+ };
+
+ const uint32_t kBitrate =
+ kDefaultVideoEncodeAcceleratorConfig.initial_bitrate;
+ const uint32_t kFramerate =
+ *kDefaultVideoEncodeAcceleratorConfig.initial_framerate;
+ // Call UpdateRates before Encode.
+ update_rates_and_encode(FrameType::KEYFRAME, create_allocation(kBitrate / 2),
+ kFramerate);
+ // Bitrate change only.
+ update_rates_and_encode(FrameType::INTERFRAME, create_allocation(kBitrate),
+ kFramerate);
+ // Framerate change only.
+ update_rates_and_encode(FrameType::INTERFRAME, create_allocation(kBitrate),
+ kFramerate + 2);
+ // Bitrate + Frame changes.
+ update_rates_and_encode(FrameType::INTERFRAME,
+ create_allocation(kBitrate * 3 / 4), kFramerate - 5);
+}
+
+struct VP9EncoderTestParam {
+ VP9EncoderTest::BitrateControl bitrate_control;
+} kTestCasesForVP9EncoderTest[] = {
+ {VP9EncoderTest::BitrateControl::kConstantBitrate},
+ {VP9EncoderTest::BitrateControl::kConstantQuantizationParameter},
+};
+
+TEST_P(VP9EncoderTest, Initialize) {
+ InitializeVP9Encoder(GetParam().bitrate_control);
+}
+
+TEST_P(VP9EncoderTest, Encode) {
+ const auto& bitrate_control = GetParam().bitrate_control;
+ InitializeVP9Encoder(bitrate_control);
+ if (bitrate_control == BitrateControl::kConstantBitrate) {
+ EncodeSequence(FrameType::KEYFRAME);
+ EncodeSequence(FrameType::INTERFRAME);
+ } else {
+ EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME,
+ kRefFramesUsedForKeyFrame);
+ EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME,
+ kRefFramesUsedForInterFrame);
+ }
+}
+
+TEST_P(VP9EncoderTest, UpdateRates) {
+ const auto& bitrate_control = GetParam().bitrate_control;
+ InitializeVP9Encoder(bitrate_control);
+ constexpr size_t kNumTemporalLayers = 1u;
+ UpdateRatesTest(bitrate_control, kNumTemporalLayers);
+}
+
+TEST_P(VP9EncoderTest, ForceKeyFrame) {
+ const auto& bitrate_control = GetParam().bitrate_control;
+ InitializeVP9Encoder(GetParam().bitrate_control);
+ if (bitrate_control == BitrateControl::kConstantBitrate) {
+ EncodeSequence(FrameType::KEYFRAME);
+ EncodeSequence(FrameType::INTERFRAME);
+ EncodeSequence(FrameType::KEYFRAME);
+ EncodeSequence(FrameType::INTERFRAME);
+ } else {
+ EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME,
+ kRefFramesUsedForKeyFrame);
+ EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME,
+ kRefFramesUsedForInterFrame);
+ EncodeConstantQuantizationParameterSequence(FrameType::KEYFRAME,
+ kRefFramesUsedForKeyFrame);
+ EncodeConstantQuantizationParameterSequence(FrameType::INTERFRAME,
+ kRefFramesUsedForInterFrame);
+ }
+}
+
+INSTANTIATE_TEST_SUITE_P(,
+ VP9EncoderTest,
+ ::testing::ValuesIn(kTestCasesForVP9EncoderTest));
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp9_rate_control.cc b/chromium/media/gpu/vaapi/vp9_rate_control.cc
new file mode 100644
index 00000000000..f4d6beb6129
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vp9_rate_control.cc
@@ -0,0 +1,53 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/vp9_rate_control.h"
+
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "third_party/libvpx/source/libvpx/vp9/ratectrl_rtc.h"
+
+namespace media {
+namespace {
+class LibvpxVP9RateControl : public VP9RateControl {
+ public:
+ explicit LibvpxVP9RateControl(std::unique_ptr<libvpx::VP9RateControlRTC> impl)
+ : impl_(std::move(impl)) {}
+
+ ~LibvpxVP9RateControl() override = default;
+ LibvpxVP9RateControl(const LibvpxVP9RateControl&) = delete;
+ LibvpxVP9RateControl& operator=(const LibvpxVP9RateControl&) = delete;
+
+ void UpdateRateControl(
+ const libvpx::VP9RateControlRtcConfig& rate_control_config) override {
+ impl_->UpdateRateControl(rate_control_config);
+ }
+ int GetQP() const override { return impl_->GetQP(); }
+ int GetLoopfilterLevel() const override {
+ return impl_->GetLoopfilterLevel();
+ }
+ void ComputeQP(const libvpx::VP9FrameParamsQpRTC& frame_params) override {
+ impl_->ComputeQP(frame_params);
+ }
+ void PostEncodeUpdate(uint64_t encoded_frame_size) override {
+ impl_->PostEncodeUpdate(encoded_frame_size);
+ }
+
+ private:
+ const std::unique_ptr<libvpx::VP9RateControlRTC> impl_;
+};
+
+} // namespace
+
+// static
+std::unique_ptr<VP9RateControl> VP9RateControl::Create(
+ const libvpx::VP9RateControlRtcConfig& config) {
+ auto impl = libvpx::VP9RateControlRTC::Create(config);
+ if (!impl) {
+ DLOG(ERROR) << "Failed creating libvpx::VP9RateControlRTC";
+ return nullptr;
+ }
+ return std::make_unique<LibvpxVP9RateControl>(std::move(impl));
+}
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp9_rate_control.h b/chromium/media/gpu/vaapi/vp9_rate_control.h
new file mode 100644
index 00000000000..116f47f5895
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vp9_rate_control.h
@@ -0,0 +1,38 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+#ifndef MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_
+#define MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_
+
+#include <memory>
+
+#include "base/callback.h"
+#include "base/optional.h"
+
+namespace libvpx {
+struct VP9FrameParamsQpRTC;
+struct VP9RateControlRtcConfig;
+} // namespace libvpx
+
+namespace media {
+// VP9RateControl is an interface to compute proper quantization
+// parameter and loop filter level for vp9.
+class VP9RateControl {
+ public:
+ // Creates VP9RateControl using libvpx implementation.
+ static std::unique_ptr<VP9RateControl> Create(
+ const libvpx::VP9RateControlRtcConfig& config);
+
+ virtual ~VP9RateControl() = default;
+
+ virtual void UpdateRateControl(
+ const libvpx::VP9RateControlRtcConfig& rate_control_config) = 0;
+ // libvpx::VP9FrameParamsQpRTC takes 0-63 quantization parameter.
+ virtual void ComputeQP(const libvpx::VP9FrameParamsQpRTC& frame_params) = 0;
+ // GetQP() returns vp9 ac/dc table index. The range is 0-255.
+ virtual int GetQP() const = 0;
+ virtual int GetLoopfilterLevel() const = 0;
+ virtual void PostEncodeUpdate(uint64_t encoded_frame_size) = 0;
+};
+} // namespace media
+#endif // MEDIA_GPU_VAAPI_VP9_RATE_CONTROL_H_
diff --git a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
index 24eb0e26467..f86d3824973 100644
--- a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
@@ -29,6 +29,7 @@ namespace {
// TODO(dstaessens): Add video_encoder_perf_test_usage.md
constexpr const char* usage_msg =
"usage: video_encode_accelerator_perf_tests\n"
+ " [--codec=<codec>]\n"
" [-v=<level>] [--vmodule=<config>] [--output_folder]\n"
" [--gtest_help] [--help]\n"
" [<video path>] [<video metadata path>]\n";
@@ -42,6 +43,8 @@ constexpr const char* help_msg =
"containing the video's metadata. By default <video path>.json will be\n"
"used.\n"
"\nThe following arguments are supported:\n"
+ " --codec codec profile to encode, \"h264 (baseline)\",\n"
+ " \"h264main, \"h264high\", \"vp8\" and \"vp9\"\n"
" -v enable verbose mode, e.g. -v=2.\n"
" --vmodule enable verbose mode for the specified module,\n"
" --output_folder overwrite the output folder used to store\n"
@@ -115,11 +118,8 @@ class PerformanceEvaluator : public BitstreamProcessor {
// Create a new performance evaluator.
PerformanceEvaluator() {}
- // Interface BitstreamProcessor
- void ProcessBitstreamBuffer(
- int32_t bitstream_buffer_id,
- const BitstreamBufferMetadata& metadata,
- const base::UnsafeSharedMemoryRegion* shm) override;
+ void ProcessBitstream(scoped_refptr<BitstreamRef> bitstream,
+ size_t frame_index) override;
bool WaitUntilDone() override { return true; }
// Start/Stop collecting performance metrics.
@@ -141,10 +141,9 @@ class PerformanceEvaluator : public BitstreamProcessor {
PerformanceMetrics perf_metrics_;
};
-void PerformanceEvaluator::ProcessBitstreamBuffer(
- int32_t bitstream_buffer_id,
- const BitstreamBufferMetadata& metadata,
- const base::UnsafeSharedMemoryRegion* shm) {
+void PerformanceEvaluator::ProcessBitstream(
+ scoped_refptr<BitstreamRef> bitstream,
+ size_t frame_index) {
base::TimeTicks now = base::TimeTicks::Now();
base::TimeDelta delivery_time = (now - prev_bitstream_delivery_time_);
@@ -152,7 +151,8 @@ void PerformanceEvaluator::ProcessBitstreamBuffer(
delivery_time.InMillisecondsF());
prev_bitstream_delivery_time_ = now;
- base::TimeDelta encode_time = now.since_origin() - metadata.timestamp;
+ base::TimeDelta encode_time =
+ now.since_origin() - bitstream->metadata.timestamp;
perf_metrics_.bitstream_encode_times_.push_back(
encode_time.InMillisecondsF());
}
@@ -270,7 +270,8 @@ void PerformanceMetrics::WriteToFile() const {
class VideoEncoderTest : public ::testing::Test {
public:
// Create a new video encoder instance.
- std::unique_ptr<VideoEncoder> CreateVideoEncoder(const Video* video) {
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(const Video* video,
+ VideoCodecProfile profile) {
LOG_ASSERT(video);
std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
@@ -280,6 +281,7 @@ class VideoEncoderTest : public ::testing::Test {
VideoEncoderClientConfig config;
config.framerate = video->FrameRate();
+ config.output_profile = profile;
auto video_encoder =
VideoEncoder::Create(config, std::move(bitstream_processors));
LOG_ASSERT(video_encoder);
@@ -297,7 +299,7 @@ class VideoEncoderTest : public ::testing::Test {
// test will encode a video as fast as possible, and gives an idea about the
// maximum output of the encoder.
TEST_F(VideoEncoderTest, MeasureUncappedPerformance) {
- auto encoder = CreateVideoEncoder(g_env->Video());
+ auto encoder = CreateVideoEncoder(g_env->Video(), g_env->Profile());
performance_evaluator_->StartMeasuring();
encoder->Encode();
@@ -336,6 +338,7 @@ int main(int argc, char** argv) {
: base::FilePath(media::test::kDefaultTestVideoPath);
base::FilePath video_metadata_path =
(args.size() >= 2) ? base::FilePath(args[1]) : base::FilePath();
+ std::string codec = "h264";
// Parse command line arguments.
base::FilePath::StringType output_folder = media::test::kDefaultOutputFolder;
@@ -349,6 +352,8 @@ int main(int argc, char** argv) {
if (it->first == "output_folder") {
output_folder = it->second;
+ } else if (it->first == "codec") {
+ codec = it->second;
} else {
std::cout << "unknown option: --" << it->first << "\n"
<< media::test::usage_msg;
@@ -361,7 +366,8 @@ int main(int argc, char** argv) {
// Set up our test environment.
media::test::VideoEncoderTestEnvironment* test_environment =
media::test::VideoEncoderTestEnvironment::Create(
- video_path, video_metadata_path, base::FilePath(output_folder));
+ video_path, video_metadata_path, false, base::FilePath(output_folder),
+ codec, false /* output_bitstream */);
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_encode_accelerator_tests.cc b/chromium/media/gpu/video_encode_accelerator_tests.cc
index 155a602be67..1531e9ba965 100644
--- a/chromium/media/gpu/video_encode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_tests.cc
@@ -2,15 +2,26 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <algorithm>
#include <limits>
#include "base/command_line.h"
#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "media/base/media_util.h"
#include "media/base/test_data_util.h"
+#include "media/base/video_bitrate_allocation.h"
+#include "media/base/video_decoder_config.h"
#include "media/gpu/test/video.h"
+#include "media/gpu/test/video_encoder/bitstream_file_writer.h"
+#include "media/gpu/test/video_encoder/bitstream_validator.h"
+#include "media/gpu/test/video_encoder/decoder_buffer_validator.h"
#include "media/gpu/test/video_encoder/video_encoder.h"
#include "media/gpu/test/video_encoder/video_encoder_client.h"
#include "media/gpu/test/video_encoder/video_encoder_test_environment.h"
+#include "media/gpu/test/video_frame_helpers.h"
+#include "media/gpu/test/video_frame_validator.h"
+#include "media/gpu/test/video_test_helpers.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -23,6 +34,8 @@ namespace {
// TODO(dstaessens): Add video_encoder_test_usage.md
constexpr const char* usage_msg =
"usage: video_encode_accelerator_tests\n"
+ " [--codec=<codec>] [--disable_validator]\n"
+ " [--output_bitstream] [--output_folder=<filepath>]\n"
" [-v=<level>] [--vmodule=<config>] [--gtest_help] [--help]\n"
" [<video path>] [<video metadata path>]\n";
@@ -35,6 +48,16 @@ constexpr const char* help_msg =
"containing the video's metadata, such as frame checksums. By default\n"
"<video path>.json will be used.\n"
"\nThe following arguments are supported:\n"
+ " --codec codec profile to encode, \"h264\" (baseline),\n"
+ " \"h264main, \"h264high\", \"vp8\" and \"vp9\".\n"
+ " H264 Baseline is selected if unspecified.\n"
+ " --disable_validator disable validation of encoded bitstream.\n\n"
+ " --output_bitstream save the output bitstream in either H264 AnnexB\n"
+ " format (for H264) or IVF format (for vp8 and vp9)\n"
+ " to <output_folder>/<testname>/<filename> +\n"
+ " .(h264|ivf).\n"
+ " --output_folder set the basic folder used to store the output\n"
+ " stream. The default is the current directory.\n"
" -v enable verbose mode, e.g. -v=2.\n"
" --vmodule enable verbose mode for the specified module,\n"
" e.g. --vmodule=*media/gpu*=2.\n\n"
@@ -45,31 +68,115 @@ constexpr const char* help_msg =
constexpr base::FilePath::CharType kDefaultTestVideoPath[] =
FILE_PATH_LITERAL("bear_320x192_40frames.yuv.webm");
+// The number of frames to encode for bitrate check test cases.
+// TODO(hiroh): Decrease this values to make the test faster.
+constexpr size_t kNumFramesToEncodeForBitrateCheck = 300;
+// Tolerance factor for how encoded bitrate can differ from requested bitrate.
+constexpr double kBitrateTolerance = 0.1;
+
media::test::VideoEncoderTestEnvironment* g_env;
// Video encode test class. Performs setup and teardown for each single test.
class VideoEncoderTest : public ::testing::Test {
public:
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
- const Video* video,
- VideoEncoderClientConfig config = VideoEncoderClientConfig()) {
+ Video* video,
+ VideoEncoderClientConfig config) {
LOG_ASSERT(video);
- auto video_encoder = VideoEncoder::Create(config);
-
+ auto video_encoder =
+ VideoEncoder::Create(config, CreateBitstreamProcessors(video, config));
LOG_ASSERT(video_encoder);
- LOG_ASSERT(video_encoder->Initialize(video));
+
+ if (!video_encoder->Initialize(video))
+ ADD_FAILURE();
return video_encoder;
}
+
+ private:
+ std::vector<std::unique_ptr<BitstreamProcessor>> CreateBitstreamProcessors(
+ Video* video,
+ VideoEncoderClientConfig config) {
+ std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
+ if (!g_env->IsBitstreamValidatorEnabled()) {
+ return bitstream_processors;
+ }
+
+ const gfx::Rect visible_rect(video->Resolution());
+ VideoCodec codec = VideoCodecProfileToVideoCodec(config.output_profile);
+ switch (codec) {
+ case kCodecH264:
+ bitstream_processors.emplace_back(
+ new H264Validator(config.output_profile, visible_rect));
+ break;
+ case kCodecVP8:
+ bitstream_processors.emplace_back(new VP8Validator(visible_rect));
+ break;
+ case kCodecVP9:
+ bitstream_processors.emplace_back(
+ new VP9Validator(config.output_profile, visible_rect));
+ break;
+ default:
+ LOG(ERROR) << "Unsupported profile: "
+ << GetProfileName(config.output_profile);
+ break;
+ }
+
+ // Attach a bitstream validator to validate all encoded video frames. The
+ // bitstream validator uses a software video decoder to validate the
+ // encoded buffers by decoding them. Metrics such as the image's SSIM can
+ // be calculated for additional quality checks.
+ VideoDecoderConfig decoder_config(
+ codec, config.output_profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, visible_rect.size(), visible_rect,
+ visible_rect.size(), EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ std::vector<std::unique_ptr<VideoFrameProcessor>> video_frame_processors;
+
+ raw_data_helper_ = RawDataHelper::Create(video);
+ if (!raw_data_helper_) {
+ LOG(ERROR) << "Failed to create raw data helper";
+ return bitstream_processors;
+ }
+
+ // TODO(hiroh): Add corrupt frame processors.
+ VideoFrameValidator::GetModelFrameCB get_model_frame_cb =
+ base::BindRepeating(&VideoEncoderTest::GetModelFrame,
+ base::Unretained(this));
+ auto psnr_validator = PSNRVideoFrameValidator::Create(get_model_frame_cb);
+ auto ssim_validator = SSIMVideoFrameValidator::Create(get_model_frame_cb);
+ video_frame_processors.push_back(std::move(psnr_validator));
+ video_frame_processors.push_back(std::move(ssim_validator));
+ auto bitstream_validator = BitstreamValidator::Create(
+ decoder_config, config.num_frames_to_encode - 1,
+ std::move(video_frame_processors));
+ LOG_ASSERT(bitstream_validator);
+ bitstream_processors.emplace_back(std::move(bitstream_validator));
+
+ auto output_bitstream_filepath = g_env->OutputBitstreamFilePath();
+ if (output_bitstream_filepath) {
+ auto bitstream_writer = BitstreamFileWriter::Create(
+ *output_bitstream_filepath, codec, visible_rect.size(),
+ config.framerate, config.num_frames_to_encode);
+ LOG_ASSERT(bitstream_writer);
+ bitstream_processors.emplace_back(std::move(bitstream_writer));
+ }
+
+ return bitstream_processors;
+ }
+
+ scoped_refptr<const VideoFrame> GetModelFrame(size_t frame_index) {
+ LOG_ASSERT(raw_data_helper_);
+ return raw_data_helper_->GetFrame(frame_index %
+ g_env->Video()->NumFrames());
+ }
+
+ std::unique_ptr<RawDataHelper> raw_data_helper_;
};
} // namespace
// TODO(dstaessens): Add more test scenarios:
-// - Vary framerate
-// - Vary bitrate
-// - Flush midstream
// - Forcing key frames
// Encode video from start to end. Wait for the kFlushDone event at the end of
@@ -77,6 +184,8 @@ class VideoEncoderTest : public ::testing::Test {
TEST_F(VideoEncoderTest, FlushAtEndOfStream) {
VideoEncoderClientConfig config = VideoEncoderClientConfig();
config.framerate = g_env->Video()->FrameRate();
+ config.output_profile = g_env->Profile();
+ config.num_frames_to_encode = g_env->Video()->NumFrames();
auto encoder = CreateVideoEncoder(g_env->Video(), config);
encoder->Encode();
@@ -87,6 +196,132 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream) {
EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
}
+// Test initializing the video encoder. The test will be successful if the video
+// encoder is capable of setting up the encoder for the specified codec and
+// resolution. The test only verifies initialization and doesn't do any
+// encoding.
+TEST_F(VideoEncoderTest, Initialize) {
+ VideoEncoderClientConfig config = VideoEncoderClientConfig();
+ auto encoder = CreateVideoEncoder(g_env->Video(), config);
+
+ EXPECT_EQ(encoder->GetEventCount(VideoEncoder::kInitialized), 1u);
+}
+
+// Create a video encoder and immediately destroy it without initializing. The
+// video encoder will be automatically destroyed when the video encoder goes out
+// of scope at the end of the test. The test will pass if no asserts or crashes
+// are triggered upon destroying.
+TEST_F(VideoEncoderTest, DestroyBeforeInitialize) {
+ VideoEncoderClientConfig config = VideoEncoderClientConfig();
+ auto video_encoder = VideoEncoder::Create(config);
+
+ EXPECT_NE(video_encoder, nullptr);
+}
+
+// Encode multiple videos simultaneously from start to finish.
+TEST_F(VideoEncoderTest, FlushAtEndOfStream_MultipleConcurrentEncodes) {
+ // The minimal number of concurrent encoders we expect to be supported.
+ constexpr size_t kMinSupportedConcurrentEncoders = 3;
+
+ VideoEncoderClientConfig config = VideoEncoderClientConfig();
+ config.framerate = g_env->Video()->FrameRate();
+ config.output_profile = g_env->Profile();
+ config.num_frames_to_encode = g_env->Video()->NumFrames();
+
+ std::vector<std::unique_ptr<VideoEncoder>> encoders(
+ kMinSupportedConcurrentEncoders);
+ for (size_t i = 0; i < kMinSupportedConcurrentEncoders; ++i)
+ encoders[i] = CreateVideoEncoder(g_env->Video(), config);
+
+ for (size_t i = 0; i < kMinSupportedConcurrentEncoders; ++i)
+ encoders[i]->Encode();
+
+ for (size_t i = 0; i < kMinSupportedConcurrentEncoders; ++i) {
+ EXPECT_TRUE(encoders[i]->WaitForFlushDone());
+ EXPECT_EQ(encoders[i]->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoders[i]->GetFrameReleasedCount(),
+ g_env->Video()->NumFrames());
+ EXPECT_TRUE(encoders[i]->WaitForBitstreamProcessors());
+ }
+}
+
+TEST_F(VideoEncoderTest, BitrateCheck) {
+ VideoEncoderClientConfig config = VideoEncoderClientConfig();
+ config.framerate = g_env->Video()->FrameRate();
+ config.output_profile = g_env->Profile();
+ config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck;
+ auto encoder = CreateVideoEncoder(g_env->Video(), config);
+
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), config.num_frames_to_encode);
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+ EXPECT_NEAR(encoder->GetStats().Bitrate(), config.bitrate,
+ kBitrateTolerance * config.bitrate);
+}
+
+TEST_F(VideoEncoderTest, DynamicBitrateChange) {
+ VideoEncoderClientConfig config;
+ config.framerate = g_env->Video()->FrameRate();
+ config.output_profile = g_env->Profile();
+ config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck * 2;
+ auto encoder = CreateVideoEncoder(g_env->Video(), config);
+
+ // Encode the video with the first bitrate.
+ const uint32_t first_bitrate = config.bitrate;
+ encoder->EncodeUntil(VideoEncoder::kFrameReleased,
+ kNumFramesToEncodeForBitrateCheck);
+ encoder->WaitForEvent(VideoEncoder::kFrameReleased,
+ kNumFramesToEncodeForBitrateCheck);
+ EXPECT_NEAR(encoder->GetStats().Bitrate(), first_bitrate,
+ kBitrateTolerance * first_bitrate);
+
+ // Encode the video with the second bitrate.
+ const uint32_t second_bitrate = first_bitrate * 3 / 2;
+ encoder->ResetStats();
+ encoder->UpdateBitrate(second_bitrate, config.framerate);
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_NEAR(encoder->GetStats().Bitrate(), second_bitrate,
+ kBitrateTolerance * second_bitrate);
+
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), config.num_frames_to_encode);
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
+
+TEST_F(VideoEncoderTest, DynamicFramerateChange) {
+ VideoEncoderClientConfig config;
+ config.framerate = g_env->Video()->FrameRate();
+ config.output_profile = g_env->Profile();
+ config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck * 2;
+ auto encoder = CreateVideoEncoder(g_env->Video(), config);
+
+ // Encode the video with the first framerate.
+ const uint32_t first_framerate = config.framerate;
+
+ encoder->EncodeUntil(VideoEncoder::kFrameReleased,
+ kNumFramesToEncodeForBitrateCheck);
+ encoder->WaitForEvent(VideoEncoder::kFrameReleased,
+ kNumFramesToEncodeForBitrateCheck);
+ EXPECT_NEAR(encoder->GetStats().Bitrate(), config.bitrate,
+ kBitrateTolerance * config.bitrate);
+
+ // Encode the video with the second framerate.
+ const uint32_t second_framerate = first_framerate * 3 / 2;
+ encoder->ResetStats();
+ encoder->UpdateBitrate(config.bitrate, second_framerate);
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_NEAR(encoder->GetStats().Bitrate(), config.bitrate,
+ kBitrateTolerance * config.bitrate);
+
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), config.num_frames_to_encode);
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
} // namespace test
} // namespace media
@@ -111,8 +346,13 @@ int main(int argc, char** argv) {
: base::FilePath(media::test::kDefaultTestVideoPath);
base::FilePath video_metadata_path =
(args.size() >= 2) ? base::FilePath(args[1]) : base::FilePath();
+ std::string codec = "h264";
+ bool output_bitstream = false;
+ base::FilePath output_folder =
+ base::FilePath(base::FilePath::kCurrentDirectory);
// Parse command line arguments.
+ bool enable_bitstream_validator = true;
base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
@@ -121,9 +361,19 @@ int main(int argc, char** argv) {
continue;
}
- std::cout << "unknown option: --" << it->first << "\n"
- << media::test::usage_msg;
- return EXIT_FAILURE;
+ if (it->first == "codec") {
+ codec = it->second;
+ } else if (it->first == "disable_validator") {
+ enable_bitstream_validator = false;
+ } else if (it->first == "output_bitstream") {
+ output_bitstream = true;
+ } else if (it->first == "output_folder") {
+ output_folder = base::FilePath(it->second);
+ } else {
+ std::cout << "unknown option: --" << it->first << "\n"
+ << media::test::usage_msg;
+ return EXIT_FAILURE;
+ }
}
testing::InitGoogleTest(&argc, argv);
@@ -131,7 +381,8 @@ int main(int argc, char** argv) {
// Set up our test environment.
media::test::VideoEncoderTestEnvironment* test_environment =
media::test::VideoEncoderTestEnvironment::Create(
- video_path, video_metadata_path, base::FilePath());
+ video_path, video_metadata_path, enable_bitstream_validator,
+ output_folder, codec, output_bitstream);
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
index 25b8342343c..002cd3bea2a 100644
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_encode_accelerator_unittest.cc
@@ -20,6 +20,7 @@
#include "base/containers/queue.h"
#include "base/files/file_util.h"
#include "base/macros.h"
+#include "base/memory/aligned_memory.h"
#include "base/memory/ref_counted.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/memory/weak_ptr.h"
@@ -216,6 +217,21 @@ bool g_native_input = false;
class VideoEncodeAcceleratorTestEnvironment;
VideoEncodeAcceleratorTestEnvironment* g_env;
+std::unique_ptr<base::test::ScopedFeatureList> CreateScopedFeatureList() {
+#if BUILDFLAG(USE_VAAPI)
+ auto scoped_feature_list = std::make_unique<base::test::ScopedFeatureList>();
+ std::vector<base::Feature> enabled_features = {
+ // TODO(crbug.com/828482): remove once enabled by default.
+ media::kVaapiLowPowerEncoderGen9x,
+ // TODO(crbug.com/811912): remove once enabled by default.
+ media::kVaapiVP9Encoder};
+ scoped_feature_list->InitWithFeatures(enabled_features, {});
+ return scoped_feature_list;
+#else
+ return nullptr;
+#endif // BUILDFLAG(USE_VAAPI)
+}
+
// The number of frames to be encoded. This variable is set by the switch
// "--num_frames_to_encode". Ignored if 0.
int g_num_frames_to_encode = 0;
@@ -541,7 +557,7 @@ static void CreateAlignedInputStreamFile(const gfx::Size& coded_size,
const char* src_ptr = &src_data[0];
for (size_t i = 0; i < num_planes; i++) {
// Assert that each plane of frame starts at required byte boundary.
- ASSERT_EQ(0u, dest_offset & (test::kPlatformBufferAlignment - 1))
+ ASSERT_TRUE(base::IsAligned(dest_offset, test::kPlatformBufferAlignment))
<< "Planes of frame should be mapped per platform requirements";
char* dst_ptr = &test_stream->aligned_in_file_data[dest_offset];
for (size_t j = 0; j < visible_plane_rows[i]; j++) {
@@ -1492,10 +1508,15 @@ class VEAClient : public VEAClientBase {
bool mid_stream_framerate_switch,
bool verify_output,
bool verify_output_timestamp,
- bool force_level);
+ bool force_level,
+ bool scale);
void CreateEncoder();
void DestroyEncoder();
+ bool requested_scaling() const {
+ return encoded_visible_size_ != test_stream_->visible_size;
+ }
+
// VideoDecodeAccelerator::Client implementation.
void RequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
@@ -1653,6 +1674,11 @@ class VEAClient : public VEAClientBase {
// Check whether the output timestamps match input timestamps.
bool verify_output_timestamp_;
+ // The visible size we want the encoded stream to have. This can be different
+ // than the visible size of the |test_stream_| when doing scaling in native
+ // input mode.
+ gfx::Size encoded_visible_size_;
+
// Used to perform codec-specific sanity checks on the stream.
std::unique_ptr<StreamValidator> stream_validator_;
@@ -1704,7 +1730,8 @@ VEAClient::VEAClient(TestStream* test_stream,
bool mid_stream_framerate_switch,
bool verify_output,
bool verify_output_timestamp,
- bool force_level)
+ bool force_level,
+ bool scale)
: VEAClientBase(note),
state_(CS_CREATED),
test_stream_(test_stream),
@@ -1756,6 +1783,18 @@ VEAClient::VEAClient(TestStream* test_stream,
}
}
+ encoded_visible_size_ = test_stream_->visible_size;
+ if (scale) {
+ LOG_ASSERT(g_native_input)
+ << "Scaling is only supported on native input mode";
+ // Scale to 3/4 of the original size. The reason we don't go smaller is that
+ // we don't want to go below the minimum supported resolution of the
+ // hardware encoder and 3/4 works across all boards with the current test
+ // videos.
+ encoded_visible_size_.set_width(3 * encoded_visible_size_.width() / 4);
+ encoded_visible_size_.set_height(3 * encoded_visible_size_.height() / 4);
+ }
+
if (save_to_file_) {
LOG_ASSERT(!test_stream_->out_filename.empty());
#if defined(OS_POSIX)
@@ -1805,7 +1844,7 @@ void VEAClient::CreateEncoder() {
? VideoEncodeAccelerator::Config::StorageType::kDmabuf
: VideoEncodeAccelerator::Config::StorageType::kShmem;
const VideoEncodeAccelerator::Config config(
- test_stream_->pixel_format, test_stream_->visible_size,
+ test_stream_->pixel_format, encoded_visible_size_,
test_stream_->requested_profile, requested_bitrate_, requested_framerate_,
keyframe_period_, test_stream_->requested_level, storage_type);
encoder_ = CreateVideoEncodeAccelerator(config, this, gpu::GpuPreferences());
@@ -1892,9 +1931,16 @@ void VEAClient::RequireBitstreamBuffers(unsigned int input_count,
if (quality_validator_)
quality_validator_->Initialize(input_coded_size,
- gfx::Rect(test_stream_->visible_size));
+ gfx::Rect(encoded_visible_size_));
- CreateAlignedInputStreamFile(input_coded_size, test_stream_);
+ // When scaling is requested in native input mode, |input_coded_size| is not
+ // useful for building the input video frames because the encoder's image
+ // processor will be the one responsible for building the video frames that
+ // are fed to the hardware encoder. Instead, we can just use the unscaled
+ // visible size as the coded size.
+ const gfx::Size coded_size_to_use =
+ requested_scaling() ? test_stream_->visible_size : input_coded_size;
+ CreateAlignedInputStreamFile(coded_size_to_use, test_stream_);
num_frames_to_encode_ = test_stream_->num_frames;
if (g_num_frames_to_encode > 0)
@@ -1916,7 +1962,7 @@ void VEAClient::RequireBitstreamBuffers(unsigned int input_count,
}
}
- input_coded_size_ = input_coded_size;
+ input_coded_size_ = coded_size_to_use;
num_required_input_buffers_ = input_count;
ASSERT_GT(num_required_input_buffers_, 0UL);
@@ -1989,9 +2035,9 @@ void VEAClient::BitstreamBufferReady(
stream_validator_->ProcessStreamBuffer(stream_ptr,
metadata.payload_size_bytes);
} else {
- // We don't know the visible size of without stream validator, just
- // send the expected value to pass the check.
- HandleEncodedFrame(metadata.key_frame, test_stream_->visible_size);
+ // We don't know the visible size of the encoded stream without the stream
+ // validator, so just send the expected value to pass the check.
+ HandleEncodedFrame(metadata.key_frame, encoded_visible_size_);
}
if (quality_validator_) {
@@ -2001,8 +2047,9 @@ void VEAClient::BitstreamBufferReady(
quality_validator_->AddDecodeBuffer(buffer);
}
// If the encoder does not support flush, pretend flush is done when all
- // frames are received.
- if (!encoder_->IsFlushSupported() &&
+ // frames are received. We also do this when scaling is requested (because a
+ // well behaved client should not request a flush in this situation).
+ if ((!encoder_->IsFlushSupported() || requested_scaling()) &&
num_encoded_frames_ == num_frames_to_encode_) {
FlushEncoderDone(true);
}
@@ -2098,7 +2145,7 @@ scoped_refptr<VideoFrame> VEAClient::CreateFrame(off_t position) {
scoped_refptr<VideoFrame> video_frame =
VideoFrame::WrapExternalYuvDataWithLayout(
*layout, gfx::Rect(test_stream_->visible_size),
- test_stream_->visible_size, frame_data[0], frame_data[1],
+ /*natural_size=*/encoded_visible_size_, frame_data[0], frame_data[1],
frame_data[2],
// Timestamp needs to avoid starting from 0.
base::TimeDelta().FromMilliseconds(
@@ -2201,7 +2248,12 @@ void VEAClient::FeedEncoderWithOneInput() {
}
encoder_->Encode(video_frame, force_keyframe);
++num_frames_submitted_to_encoder_;
- if (num_frames_submitted_to_encoder_ == num_frames_to_encode_) {
+
+ // If scaling was requested, we don't need to flush: that's because the only
+ // use case for Flush() is ARC++ and pixel format conversion and/or scaling
+ // are not used.
+ if (!requested_scaling() &&
+ num_frames_submitted_to_encoder_ == num_frames_to_encode_) {
FlushEncoder();
}
}
@@ -2266,7 +2318,7 @@ bool VEAClient::HandleEncodedFrame(bool keyframe,
}
}
- EXPECT_EQ(test_stream_->visible_size, visible_size);
+ EXPECT_EQ(encoded_visible_size_, visible_size);
if (num_encoded_frames_ == num_frames_to_encode_ / 2) {
VerifyStreamProperties();
@@ -2398,10 +2450,8 @@ void VEAClient::WriteIvfFileHeader(uint32_t fourcc) {
header.version = 0;
header.header_size = sizeof(header);
header.fourcc = fourcc; // VP80 or VP90
- header.width =
- base::checked_cast<uint16_t>(test_stream_->visible_size.width());
- header.height =
- base::checked_cast<uint16_t>(test_stream_->visible_size.height());
+ header.width = base::checked_cast<uint16_t>(encoded_visible_size_.width());
+ header.height = base::checked_cast<uint16_t>(encoded_visible_size_.height());
header.timebase_denum = requested_framerate_;
header.timebase_num = 1;
header.num_frames = num_frames_to_encode_;
@@ -2681,11 +2731,32 @@ void VEACacheLineUnalignedInputClient::FeedEncoderWithOneInput(
// - If true, verify the timestamps of output frames.
// - If true, verify the output level is as provided in input stream. Only
// available for H264 encoder for now.
+// - If true, request that the encoder scales the input stream to 75% of the
+// original size prior to encoding. This is only applicable when
+// |g_native_input| is true. Otherwise, the test is skipped. This is because
+// the intention is to exercise the image processor path inside the decoder,
+// and in non-native input mode, the scaling is done by the client instead of
+// the encoder (and we're not interested in testing that).
+// Note: we don't go smaller than 75% because we don't want to go below the
+// minimum supported resolution by the encoder (75% happens to work across all
+// devices with the current test videos).
class VideoEncodeAcceleratorTest
: public ::testing::TestWithParam<
- std::tuple<int, bool, int, bool, bool, bool, bool, bool, bool>> {};
+ std::
+ tuple<int, bool, int, bool, bool, bool, bool, bool, bool, bool>> {
+ public:
+ void SetUp() override {
+ const bool scale = std::get<9>(GetParam());
+ if (scale && !g_native_input)
+ GTEST_SKIP();
+ }
+};
TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
+ // Workaround: TestSuite::Initialize() overwrites specified features.
+ // Re-enable our required features here so that they are enabled in encoding.
+ auto scoped_feature_list = CreateScopedFeatureList();
+
size_t num_concurrent_encoders = std::get<0>(GetParam());
const bool save_to_file = std::get<1>(GetParam());
const unsigned int keyframe_period = std::get<2>(GetParam());
@@ -2696,6 +2767,7 @@ TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
std::get<6>(GetParam()) || g_env->verify_all_output();
const bool verify_output_timestamp = std::get<7>(GetParam());
const bool force_level = std::get<8>(GetParam());
+ const bool scale = std::get<9>(GetParam());
#if defined(OS_CHROMEOS)
if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
@@ -2749,7 +2821,7 @@ TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
g_env->test_streams_[test_stream_index].get(), notes.back().get(),
encoder_save_to_file, keyframe_period, force_bitrate,
mid_stream_bitrate_switch, mid_stream_framerate_switch, verify_output,
- verify_output_timestamp, force_level));
+ verify_output_timestamp, force_level, scale));
g_env->GetRenderingTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&VEAClient::CreateEncoder,
@@ -2825,6 +2897,10 @@ void SimpleTestFunc() {
}
TEST_P(VideoEncodeAcceleratorSimpleTest, TestSimpleEncode) {
+ // Workaround: TestSuite::Initialize() overwrites specified features.
+ // Re-enable our required features here so that they are enabled in encoding.
+ auto scoped_feature_list = CreateScopedFeatureList();
+
const int test_type = GetParam();
ASSERT_LT(test_type, 2) << "Invalid test type=" << test_type;
@@ -2852,8 +2928,22 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
false,
false,
+ false,
false)));
+INSTANTIATE_TEST_SUITE_P(SimpleEncodeWithScaling,
+ VideoEncodeAcceleratorTest,
+ ::testing::Values(std::make_tuple(1,
+ true,
+ 0,
+ false,
+ false,
+ false,
+ false,
+ false,
+ false,
+ true)));
+
INSTANTIATE_TEST_SUITE_P(EncoderPerf,
VideoEncodeAcceleratorTest,
::testing::Values(std::make_tuple(1,
@@ -2864,6 +2954,7 @@ INSTANTIATE_TEST_SUITE_P(EncoderPerf,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(ForceKeyframes,
@@ -2876,6 +2967,7 @@ INSTANTIATE_TEST_SUITE_P(ForceKeyframes,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(ForceBitrate,
@@ -2888,6 +2980,7 @@ INSTANTIATE_TEST_SUITE_P(ForceBitrate,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(MidStreamParamSwitchBitrate,
@@ -2900,6 +2993,7 @@ INSTANTIATE_TEST_SUITE_P(MidStreamParamSwitchBitrate,
false,
false,
false,
+ false,
false)));
// TODO(kcwu): add back bitrate test after https://crbug.com/693336 fixed.
@@ -2913,6 +3007,7 @@ INSTANTIATE_TEST_SUITE_P(DISABLED_MidStreamParamSwitchFPS,
true,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
@@ -2925,6 +3020,7 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
false,
false,
false,
+ false,
false),
std::make_tuple(3,
false,
@@ -2934,6 +3030,7 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
@@ -2946,6 +3043,7 @@ INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
false,
false,
true,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(ForceLevel,
@@ -2958,7 +3056,8 @@ INSTANTIATE_TEST_SUITE_P(ForceLevel,
false,
false,
false,
- true)));
+ true,
+ false)));
INSTANTIATE_TEST_SUITE_P(NoInputTest,
VideoEncodeAcceleratorSimpleTest,
@@ -2979,6 +3078,7 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
false,
false,
+ false,
false),
std::make_tuple(1,
true,
@@ -2988,6 +3088,7 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
true,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(EncoderPerf,
@@ -3000,6 +3101,7 @@ INSTANTIATE_TEST_SUITE_P(EncoderPerf,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
@@ -3012,6 +3114,7 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
false,
false,
false,
+ false,
false)));
INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
@@ -3024,6 +3127,7 @@ INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
false,
false,
true,
+ false,
false)));
#if defined(OS_WIN)
@@ -3037,6 +3141,7 @@ INSTANTIATE_TEST_SUITE_P(ForceBitrate,
false,
false,
false,
+ false,
false)));
#endif // defined(OS_WIN)
@@ -3072,14 +3177,7 @@ class VEATestSuite : public base::TestSuite {
media::g_verify_all_output)));
#if BUILDFLAG(USE_VAAPI)
- base::test::ScopedFeatureList scoped_feature_list;
- std::vector<base::Feature> enabled_features = {
- // TODO(crbug.com/811912): remove once enabled by default.
- media::kVaapiVP9Encoder,
- // TODO(crbug.com/828482): Remove once H264 encoder on AMD is enabled by
- // default.
- media::kVaapiH264AMDEncoder};
- scoped_feature_list.InitWithFeatures(enabled_features, {});
+ auto scoped_feature_list = CreateScopedFeatureList();
media::VaapiWrapper::PreSandboxInitialization();
#elif defined(OS_WIN)
media::MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization();
diff --git a/chromium/media/gpu/vp8_decoder.cc b/chromium/media/gpu/vp8_decoder.cc
index 9b6767284df..23a88b6696b 100644
--- a/chromium/media/gpu/vp8_decoder.cc
+++ b/chromium/media/gpu/vp8_decoder.cc
@@ -3,6 +3,9 @@
// found in the LICENSE file.
#include "media/gpu/vp8_decoder.h"
+
+#include "base/logging.h"
+#include "base/notreached.h"
#include "media/base/limits.h"
namespace media {
diff --git a/chromium/media/gpu/vp9_reference_frame_vector.cc b/chromium/media/gpu/vp9_reference_frame_vector.cc
index 94627f23ecc..f4541bf53bd 100644
--- a/chromium/media/gpu/vp9_reference_frame_vector.cc
+++ b/chromium/media/gpu/vp9_reference_frame_vector.cc
@@ -4,6 +4,8 @@
#include "media/gpu/vp9_reference_frame_vector.h"
+#include <bitset>
+
#include "media/gpu/vp9_picture.h"
namespace media {
@@ -12,32 +14,26 @@ Vp9ReferenceFrameVector::Vp9ReferenceFrameVector() {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
-Vp9ReferenceFrameVector::~Vp9ReferenceFrameVector() {}
+Vp9ReferenceFrameVector::~Vp9ReferenceFrameVector() = default;
-// Refresh the reference frame buffer slots with current frame
-// based on refresh_frame_flags set in the frame_hdr.
+// Refreshes |reference_frames_| slots with the current |pic|s frame header.
void Vp9ReferenceFrameVector::Refresh(scoped_refptr<VP9Picture> pic) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(pic);
- const auto& frame_hdr = pic->frame_hdr;
+ const std::bitset<kVp9NumRefFrames> refresh_frame_flags(
+ pic->frame_hdr->refresh_frame_flags);
- for (size_t i = 0, mask = frame_hdr->refresh_frame_flags; mask;
- mask >>= 1, ++i) {
- if (mask & 1)
+ for (size_t i = 0; i < kVp9NumRefFrames; ++i) {
+ if (refresh_frame_flags[i])
reference_frames_[i] = pic;
}
}
void Vp9ReferenceFrameVector::Clear() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
-
- for (auto& f : reference_frames_)
- f = nullptr;
+ reference_frames_.fill(nullptr);
}
-// VP9 can maintains up to eight active reference frames and each
-// frame can use up to three reference frames from this list.
-// GetFrame will return the reference frame placed in reference_frames_[index]
scoped_refptr<VP9Picture> Vp9ReferenceFrameVector::GetFrame(
size_t index) const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/chromium/media/gpu/vp9_reference_frame_vector.h b/chromium/media/gpu/vp9_reference_frame_vector.h
index ea9b2b8bdee..bf91596b320 100644
--- a/chromium/media/gpu/vp9_reference_frame_vector.h
+++ b/chromium/media/gpu/vp9_reference_frame_vector.h
@@ -15,9 +15,8 @@ namespace media {
class VP9Picture;
-// class to share reference frame management code
-// between encoder and decoder classes.
-// TODO(crbug.com/924804): Add the support in Decoder class.
+// This class encapsulates VP9-specific reference frame management code. This
+// class is thread afine.
class Vp9ReferenceFrameVector {
public:
Vp9ReferenceFrameVector();
diff --git a/chromium/media/gpu/windows/av1_guids.h b/chromium/media/gpu/windows/av1_guids.h
new file mode 100644
index 00000000000..c5e4e5d314c
--- /dev/null
+++ b/chromium/media/gpu/windows/av1_guids.h
@@ -0,0 +1,52 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_WINDOWS_AV1_GUIDS_H_
+#define MEDIA_GPU_WINDOWS_AV1_GUIDS_H_
+
+#include <dxva.h>
+#include <initguid.h>
+
+#if WDK_NTDDI_VERSION <= NTDDI_WIN10_19H1
+DEFINE_GUID(DXVA_ModeAV1_VLD_Profile0,
+ 0xb8be4ccb,
+ 0xcf53,
+ 0x46ba,
+ 0x8d,
+ 0x59,
+ 0xd6,
+ 0xb8,
+ 0xa6,
+ 0xda,
+ 0x5d,
+ 0x2a);
+
+DEFINE_GUID(DXVA_ModeAV1_VLD_Profile1,
+ 0x6936ff0f,
+ 0x45b1,
+ 0x4163,
+ 0x9c,
+ 0xc1,
+ 0x64,
+ 0x6e,
+ 0xf6,
+ 0x94,
+ 0x61,
+ 0x08);
+
+DEFINE_GUID(DXVA_ModeAV1_VLD_Profile2,
+ 0x0c5f2aa1,
+ 0xe541,
+ 0x4089,
+ 0xbb,
+ 0x7b,
+ 0x98,
+ 0x11,
+ 0x0a,
+ 0x19,
+ 0xd7,
+ 0xc8);
+#endif // WDK_NTDDI_VERSION <= NTDDI_WIN10_19H1
+
+#endif // MEDIA_GPU_WINDOWS_AV1_GUIDS_H_
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
index a4a42eabf51..869106f87e9 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -73,9 +73,11 @@ bool D3D11DecoderConfigurator::SupportsDevice(
ComD3D11Texture2D D3D11DecoderConfigurator::CreateOutputTexture(
ComD3D11Device device,
- gfx::Size size) {
+ gfx::Size size,
+ uint32_t array_size) {
output_texture_desc_.Width = size.width();
output_texture_desc_.Height = size.height();
+ output_texture_desc_.ArraySize = array_size;
ComD3D11Texture2D result;
if (!SUCCEEDED(
@@ -100,7 +102,6 @@ void D3D11DecoderConfigurator::SetUpTextureDescriptor(bool supports_swap_chain,
bool is_encrypted) {
output_texture_desc_ = {};
output_texture_desc_.MipLevels = 1;
- output_texture_desc_.ArraySize = D3D11DecoderConfigurator::BUFFER_COUNT;
output_texture_desc_.Format = dxgi_format_;
output_texture_desc_.SampleDesc.Count = 1;
output_texture_desc_.Usage = D3D11_USAGE_DEFAULT;
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.h b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
index a23535bc615..3d6cd49e90c 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.h
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
@@ -40,7 +40,9 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
bool SupportsDevice(ComD3D11VideoDevice video_device);
// Create the decoder's output texture.
- ComD3D11Texture2D CreateOutputTexture(ComD3D11Device device, gfx::Size size);
+ ComD3D11Texture2D CreateOutputTexture(ComD3D11Device device,
+ gfx::Size size,
+ uint32_t array_size);
const D3D11_VIDEO_DECODER_DESC* DecoderDescriptor() const {
return &decoder_desc_;
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
index df549d3a380..e87c1ece44f 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
@@ -45,12 +45,12 @@ void AppendSubsamples(
class D3D11H264Picture : public H264Picture {
public:
D3D11H264Picture(D3D11PictureBuffer* picture)
- : picture(picture), level_(picture->level()) {
+ : picture(picture), picture_index_(picture->picture_index()) {
picture->set_in_picture_use(true);
}
D3D11PictureBuffer* picture;
- size_t level_;
+ size_t picture_index_;
protected:
~D3D11H264Picture() override;
@@ -63,16 +63,16 @@ D3D11H264Picture::~D3D11H264Picture() {
D3D11H264Accelerator::D3D11H264Accelerator(
D3D11VideoDecoderClient* client,
MediaLog* media_log,
- ComD3D11VideoDecoder video_decoder,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context)
: client_(client),
media_log_(media_log),
- video_decoder_(video_decoder),
video_device_(video_device),
video_context_(std::move(video_context)) {
DCHECK(client);
DCHECK(media_log_);
+ client->SetDecoderCB(base::BindRepeating(
+ &D3D11H264Accelerator::SetVideoDecoder, base::Unretained(this)));
}
D3D11H264Accelerator::~D3D11H264Accelerator() {}
@@ -135,7 +135,7 @@ DecoderStatus D3D11H264Accelerator::SubmitFrameMetadata(
D3D11H264Picture* our_ref_pic = static_cast<D3D11H264Picture*>(it->get());
if (!our_ref_pic->ref)
continue;
- ref_frame_list_[i].Index7Bits = our_ref_pic->level_;
+ ref_frame_list_[i].Index7Bits = our_ref_pic->picture_index_;
ref_frame_list_[i].AssociatedFlag = our_ref_pic->long_term;
field_order_cnt_list_[i][0] = our_ref_pic->top_field_order_cnt;
field_order_cnt_list_[i][1] = our_ref_pic->bottom_field_order_cnt;
@@ -281,7 +281,7 @@ void D3D11H264Accelerator::PicParamsFromSliceHeader(
void D3D11H264Accelerator::PicParamsFromPic(DXVA_PicParams_H264* pic_param,
scoped_refptr<H264Picture> pic) {
pic_param->CurrPic.Index7Bits =
- static_cast<D3D11H264Picture*>(pic.get())->level_;
+ static_cast<D3D11H264Picture*>(pic.get())->picture_index_;
pic_param->RefPicFlag = pic->ref;
pic_param->frame_num = pic->frame_num;
@@ -588,4 +588,8 @@ void D3D11H264Accelerator::RecordFailure(const std::string& reason,
MEDIA_LOG(ERROR, media_log_) << hr_string << ": " << reason;
}
+void D3D11H264Accelerator::SetVideoDecoder(ComD3D11VideoDecoder video_decoder) {
+ video_decoder_ = std::move(video_decoder);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.h b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
index cd9dd468755..00e2bd5cecd 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
@@ -34,7 +34,6 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
public:
D3D11H264Accelerator(D3D11VideoDecoderClient* client,
MediaLog* media_log,
- ComD3D11VideoDecoder video_decoder,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context);
~D3D11H264Accelerator() override;
@@ -78,6 +77,8 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
void PicParamsFromPic(DXVA_PicParams_H264* pic_param,
scoped_refptr<H264Picture> pic);
+ void SetVideoDecoder(ComD3D11VideoDecoder video_decoder);
+
private:
bool SubmitSliceData();
bool RetrieveBitstreamBuffer();
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.cc b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
index 7c0278b690e..60d1720e92f 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.cc
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.cc
@@ -24,15 +24,17 @@ namespace media {
D3D11PictureBuffer::D3D11PictureBuffer(
scoped_refptr<base::SequencedTaskRunner> delete_task_runner,
ComD3D11Texture2D texture,
+ size_t array_slice,
std::unique_ptr<Texture2DWrapper> texture_wrapper,
gfx::Size size,
- size_t level)
+ size_t picture_index)
: RefCountedDeleteOnSequence<D3D11PictureBuffer>(
std::move(delete_task_runner)),
texture_(std::move(texture)),
+ array_slice_(array_slice),
texture_wrapper_(std::move(texture_wrapper)),
size_(size),
- level_(level) {}
+ picture_index_(picture_index) {}
D3D11PictureBuffer::~D3D11PictureBuffer() {
}
@@ -46,7 +48,7 @@ bool D3D11PictureBuffer::Init(
D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC view_desc = {};
view_desc.DecodeProfile = decoder_guid;
view_desc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
- view_desc.Texture2D.ArraySlice = (UINT)level_;
+ view_desc.Texture2D.ArraySlice = array_slice_;
if (!texture_wrapper_->Init(std::move(gpu_task_runner),
std::move(get_helper_cb))) {
@@ -69,8 +71,9 @@ bool D3D11PictureBuffer::ProcessTexture(
const gfx::ColorSpace& input_color_space,
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) {
- return texture_wrapper_->ProcessTexture(Texture(), level_, input_color_space,
- mailbox_dest, output_color_space);
+ return texture_wrapper_->ProcessTexture(Texture(), array_slice_,
+ input_color_space, mailbox_dest,
+ output_color_space);
}
ComD3D11Texture2D D3D11PictureBuffer::Texture() const {
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.h b/chromium/media/gpu/windows/d3d11_picture_buffer.h
index d605772d147..08e2c307725 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.h
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.h
@@ -47,13 +47,18 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
public:
// |texture_wrapper| is responsible for controlling mailbox access to
// the ID3D11Texture2D,
- // |level| is the picturebuffer index inside the Array-type ID3D11Texture2D.
+ // |array_slice| is the picturebuffer index inside the Array-type
+ // ID3D11Texture2D. |picture_index| is a unique id used to identify this
+ // picture to the decoder. If a texture array is used, then it might as well
+ // be equal to the texture array index. Otherwise, any 0-based index is
+ // probably okay, though sequential makes sense.
D3D11PictureBuffer(
scoped_refptr<base::SequencedTaskRunner> delete_task_runner,
ComD3D11Texture2D texture,
+ size_t array_slice,
std::unique_ptr<Texture2DWrapper> texture_wrapper,
gfx::Size size,
- size_t level);
+ size_t picture_index);
bool Init(scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
GetCommandBufferHelperCB get_helper_cb,
@@ -71,7 +76,7 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
ComD3D11Texture2D Texture() const;
const gfx::Size& size() const { return size_; }
- size_t level() const { return level_; }
+ size_t picture_index() const { return picture_index_; }
// Is this PictureBuffer backing a VideoFrame right now?
bool in_client_use() const { return in_client_use_; }
@@ -97,11 +102,13 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
friend class base::DeleteHelper<D3D11PictureBuffer>;
ComD3D11Texture2D texture_;
+ uint32_t array_slice_;
+
std::unique_ptr<Texture2DWrapper> texture_wrapper_;
gfx::Size size_;
bool in_picture_use_ = false;
bool in_client_use_ = false;
- size_t level_;
+ size_t picture_index_;
ComD3D11VideoDecoderOutputView output_view_;
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.cc b/chromium/media/gpu/windows/d3d11_texture_selector.cc
index b3f0c78377d..dd3b88544ac 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.cc
@@ -152,7 +152,7 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
// If we're trying to produce an output texture that's different from what
// the decoder is providing, then we need to copy it.
- needs_texture_copy = (decoder_output_format != output_dxgi_format);
+ needs_texture_copy |= (decoder_output_format != output_dxgi_format);
// Force texture copy on if requested for debugging.
if (base::FeatureList::IsEnabled(kD3D11VideoDecoderAlwaysCopy))
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
index ab7ea22a87f..58f36986b9b 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
@@ -74,6 +74,9 @@ bool DefaultTexture2DWrapper::ProcessTexture(
if (received_error_)
return false;
+ // Temporary check to track down https://crbug.com/1077645
+ CHECK(texture);
+
// It's okay to post and forget this call, since it'll be ordered correctly
// with respect to any access on the gpu main thread.
gpu_resources_.Post(FROM_HERE, &GpuResources::PushNewTexture,
@@ -259,14 +262,24 @@ void DefaultTexture2DWrapper::GpuResources::Init(
void DefaultTexture2DWrapper::GpuResources::PushNewTexture(
ComD3D11Texture2D texture,
size_t array_slice) {
- if (!helper_ || !helper_->MakeContextCurrent()) {
- NotifyError(StatusCode::kCantMakeContextCurrent);
+ // If init didn't complete, then signal (another) error that will probably be
+ // ignored in favor of whatever we signalled earlier.
+ if (!gl_image_ || !stream_) {
+ NotifyError(StatusCode::kDecoderInitializeNeverCompleted);
return;
}
- // Notify |gl_image_| that it has a new texture.
+ // Notify |gl_image_| that it has a new texture. Do this unconditionally, so
+ // hat we can guarantee that the image isn't null. Nobody expects it to be,
+ // and failures will be noticed only asynchronously.
+ // https://crbug.com/1077645
gl_image_->SetTexture(texture, array_slice);
+ if (!helper_ || !helper_->MakeContextCurrent()) {
+ NotifyError(StatusCode::kCantMakeContextCurrent);
+ return;
+ }
+
// Notify angle that it has a new texture.
EGLAttrib frame_attributes[] = {
EGL_D3D_TEXTURE_SUBRESOURCE_ID_ANGLE,
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index 3ba6d9b3225..a98753cb255 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -9,6 +9,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/debug/crash_logging.h"
#include "base/debug/dump_without_crashing.h"
@@ -26,6 +27,7 @@
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
+#include "media/base/win/hresult_status_helper.h"
#include "media/gpu/windows/d3d11_picture_buffer.h"
#include "media/gpu/windows/d3d11_video_context_wrapper.h"
#include "media/gpu/windows/d3d11_video_decoder_impl.h"
@@ -166,23 +168,125 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
profile_ = config.profile();
if (config.codec() == kCodecVP9) {
accelerated_video_decoder_ = std::make_unique<VP9Decoder>(
- std::make_unique<D3D11VP9Accelerator>(this, media_log_.get(),
- video_decoder, video_device_,
- std::move(video_context)),
+ std::make_unique<D3D11VP9Accelerator>(
+ this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- return hr;
- }
-
- if (config.codec() == kCodecH264) {
+ } else if (config.codec() == kCodecH264) {
accelerated_video_decoder_ = std::make_unique<H264Decoder>(
- std::make_unique<D3D11H264Accelerator>(this, media_log_.get(),
- video_decoder, video_device_,
- std::move(video_context)),
+ std::make_unique<D3D11H264Accelerator>(
+ this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
- return hr;
+ } else {
+ return E_FAIL;
}
- return E_FAIL;
+ // Provide the initial video decoder object.
+ DCHECK(set_accelerator_decoder_cb_);
+ set_accelerator_decoder_cb_.Run(std::move(video_decoder));
+
+ return hr;
+}
+
+ErrorOr<std::tuple<ComD3D11VideoDecoder>>
+D3D11VideoDecoder::CreateD3D11Decoder() {
+ HRESULT hr;
+
+ // TODO: supported check?
+
+ decoder_configurator_ = D3D11DecoderConfigurator::Create(
+ gpu_preferences_, gpu_workarounds_, config_, media_log_.get());
+ if (!decoder_configurator_)
+ return StatusCode::kDecoderUnsupportedProfile;
+
+ if (!decoder_configurator_->SupportsDevice(video_device_))
+ return StatusCode::kDecoderUnsupportedCodec;
+
+ FormatSupportChecker format_checker(device_);
+ if (!format_checker.Initialize()) {
+ // Don't fail; it'll just return no support a lot.
+ MEDIA_LOG(WARNING, media_log_)
+ << "Could not create format checker, continuing";
+ }
+
+ // Use IsHDRSupported to guess whether the compositor can output HDR textures.
+ // See TextureSelector for notes about why the decoder should not care.
+ texture_selector_ = TextureSelector::Create(
+ gpu_preferences_, gpu_workarounds_,
+ decoder_configurator_->TextureFormat(),
+ is_hdr_supported_ ? TextureSelector::HDRMode::kSDROrHDR
+ : TextureSelector::HDRMode::kSDROnly,
+ &format_checker, media_log_.get());
+ if (!texture_selector_)
+ return StatusCode::kCannotCreateTextureSelector;
+
+ UINT config_count = 0;
+ hr = video_device_->GetVideoDecoderConfigCount(
+ decoder_configurator_->DecoderDescriptor(), &config_count);
+
+ if (FAILED(hr)) {
+ return Status(StatusCode::kCannotGetDecoderConfigCount)
+ .AddCause(HresultToStatus(hr));
+ }
+
+ if (config_count == 0)
+ return Status(StatusCode::kCannotGetDecoderConfigCount);
+
+ D3D11_VIDEO_DECODER_CONFIG dec_config = {};
+ bool found = false;
+
+ for (UINT i = 0; i < config_count; i++) {
+ hr = video_device_->GetVideoDecoderConfig(
+ decoder_configurator_->DecoderDescriptor(), i, &dec_config);
+ if (FAILED(hr)) {
+ return Status(StatusCode::kCannotGetDecoderConfig)
+ .AddCause(HresultToStatus(hr));
+ }
+
+ if (config_.codec() == kCodecVP9 && dec_config.ConfigBitstreamRaw == 1) {
+ // DXVA VP9 specification mentions ConfigBitstreamRaw "shall be 1".
+ found = true;
+ break;
+ }
+
+ if (config_.codec() == kCodecH264 && dec_config.ConfigBitstreamRaw == 2) {
+ // ConfigBitstreamRaw == 2 means the decoder uses DXVA_Slice_H264_Short.
+ found = true;
+ break;
+ }
+ }
+ if (!found)
+ return StatusCode::kDecoderUnsupportedConfig;
+
+ // Prefer whatever the config tells us about whether to use one Texture2D with
+ // multiple array slices, or multiple Texture2Ds with one slice each. If bit
+ // 14 is clear, then it's the former, else it's the latter.
+ //
+ // Let the workaround override array texture mode, if enabled.
+ //
+ // For more information, please see:
+ // https://download.microsoft.com/download/9/2/A/92A4E198-67E0-4ABD-9DB7-635D711C2752/DXVA_VPx.pdf
+ // https://download.microsoft.com/download/5/f/c/5fc4ec5c-bd8c-4624-8034-319c1bab7671/DXVA_H264.pdf
+ use_single_video_decoder_texture_ =
+ !!(dec_config.ConfigDecoderSpecific & (1 << 14)) ||
+ gpu_workarounds_.use_single_video_decoder_texture;
+ if (use_single_video_decoder_texture_)
+ MEDIA_LOG(INFO, media_log_) << "D3D11VideoDecoder is using single textures";
+ else
+ MEDIA_LOG(INFO, media_log_) << "D3D11VideoDecoder is using array texture";
+
+ Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder;
+ hr = video_device_->CreateVideoDecoder(
+ decoder_configurator_->DecoderDescriptor(), &dec_config, &video_decoder);
+
+ if (!video_decoder.Get())
+ return Status(StatusCode::kDecoderFailedCreation);
+
+ if (FAILED(hr)) {
+ return Status(StatusCode::kDecoderFailedCreation)
+ .AddCause(HresultToStatus(hr));
+ }
+
+ return {std::move(video_decoder)};
}
void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
@@ -257,57 +361,20 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- device_->GetImmediateContext(device_context_.ReleaseAndGetAddressOf());
+ device_->GetImmediateContext(&device_context_);
HRESULT hr;
// TODO(liberato): Handle cleanup better. Also consider being less chatty in
// the logs, since this will fall back.
- hr = device_.CopyTo(video_device_.ReleaseAndGetAddressOf());
- if (!SUCCEEDED(hr)) {
- NotifyError("Failed to get video device");
- return;
- }
-
- decoder_configurator_ = D3D11DecoderConfigurator::Create(
- gpu_preferences_, gpu_workarounds_, config, media_log_.get());
- if (!decoder_configurator_) {
- NotifyError("D3DD11: Config provided unsupported profile");
- return;
- }
-
- if (!decoder_configurator_->SupportsDevice(video_device_)) {
- NotifyError("D3D11: Device does not support decoder GUID");
- return;
- }
-
- FormatSupportChecker format_checker(device_);
- if (!format_checker.Initialize()) {
- // Don't fail; it'll just return no support a lot.
- MEDIA_LOG(WARNING, media_log_)
- << "Could not create format checker, continuing";
- }
-
- // Use IsHDRSupported to guess whether the compositor can output HDR textures.
- // See TextureSelector for notes about why the decoder should not care.
- texture_selector_ = TextureSelector::Create(
- gpu_preferences_, gpu_workarounds_,
- decoder_configurator_->TextureFormat(),
- is_hdr_supported_ ? TextureSelector::HDRMode::kSDROrHDR
- : TextureSelector::HDRMode::kSDROnly,
- &format_checker, media_log_.get());
- if (!texture_selector_) {
- NotifyError("D3DD11: Cannot get TextureSelector for format");
- return;
- }
-
// TODO(liberato): dxva does this. don't know if we need to.
if (!base::FeatureList::IsEnabled(kD3D11VideoDecoderSkipMultithreaded)) {
ComD3D11Multithread multi_threaded;
hr = device_->QueryInterface(IID_PPV_ARGS(&multi_threaded));
if (!SUCCEEDED(hr)) {
- NotifyError("Failed to query ID3D11Multithread");
+ NotifyError(Status(StatusCode::kCannotQueryID3D11Multithread)
+ .AddCause(HresultToStatus(hr)));
return;
}
// TODO(liberato): This is a hack, since the unittest returns
@@ -316,51 +383,20 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
multi_threaded->SetMultithreadProtected(TRUE);
}
- UINT config_count = 0;
- hr = video_device_->GetVideoDecoderConfigCount(
- decoder_configurator_->DecoderDescriptor(), &config_count);
- if (FAILED(hr) || config_count == 0) {
- NotifyError("Failed to get video decoder config count");
- return;
- }
-
- D3D11_VIDEO_DECODER_CONFIG dec_config = {};
- bool found = false;
-
- for (UINT i = 0; i < config_count; i++) {
- hr = video_device_->GetVideoDecoderConfig(
- decoder_configurator_->DecoderDescriptor(), i, &dec_config);
- if (FAILED(hr)) {
- NotifyError("Failed to get decoder config");
- return;
- }
-
- if (config.codec() == kCodecVP9 && dec_config.ConfigBitstreamRaw == 1) {
- // DXVA VP9 specification mentions ConfigBitstreamRaw "shall be 1".
- found = true;
- break;
- }
-
- if (config.codec() == kCodecH264 && dec_config.ConfigBitstreamRaw == 2) {
- // ConfigBitstreamRaw == 2 means the decoder uses DXVA_Slice_H264_Short.
- found = true;
- break;
- }
- }
- if (!found) {
- NotifyError("Failed to find decoder config");
+ hr = device_.As(&video_device_);
+ if (!SUCCEEDED(hr)) {
+ NotifyError("Failed to get video device");
return;
}
- Microsoft::WRL::ComPtr<ID3D11VideoDecoder> video_decoder;
- hr = video_device_->CreateVideoDecoder(
- decoder_configurator_->DecoderDescriptor(), &dec_config, &video_decoder);
- if (!video_decoder.Get()) {
- NotifyError("Failed to create a video decoder");
+ auto video_decoder_or_error = CreateD3D11Decoder();
+ if (video_decoder_or_error.has_error()) {
+ NotifyError(video_decoder_or_error.error());
return;
}
- hr = InitializeAcceleratedDecoder(config, video_decoder);
+ hr = InitializeAcceleratedDecoder(
+ config, std::move(std::get<0>(video_decoder_or_error.value())));
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get device context");
@@ -556,13 +592,35 @@ void D3D11VideoDecoder::DoDecode() {
return;
CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kConfigChange) {
+ // TODO(liberato): I think we support this now, as long as it's the same
+ // decoder. Should update |config_| though.
if (profile_ != accelerated_video_decoder_->GetProfile()) {
// TODO(crbug.com/1022246): Handle profile change.
LOG(ERROR) << "Profile change is not supported";
NotifyError("Profile change is not supported");
return;
}
- CreatePictureBuffers();
+ // Before the first frame, we get a config change that we should ignore.
+ // We only want to take action if this is a mid-stream config change. We
+ // could wait until now to allocate the first D3D11VideoDecoder, but we
+ // don't, so that init can fail rather than decoding if there's a problem
+ // creating it. If there's a config change at the start of the stream,
+ // then this might not work.
+ if (!picture_buffers_.size())
+ continue;
+
+ // Update the config.
+ const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
+ config_.set_coded_size(new_coded_size);
+ auto video_decoder_or_error = CreateD3D11Decoder();
+ if (video_decoder_or_error.has_error()) {
+ NotifyError(video_decoder_or_error.error());
+ return;
+ }
+ DCHECK(set_accelerator_decoder_cb_);
+ set_accelerator_decoder_cb_.Run(
+ std::move(std::get<0>(video_decoder_or_error.value())));
+ picture_buffers_.clear();
} else if (result == media::AcceleratedVideoDecoder::kTryAgain) {
LOG(ERROR) << "Try again is not supported";
NotifyError("Try again is not supported");
@@ -627,14 +685,6 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
DCHECK(texture_selector_);
gfx::Size size = accelerated_video_decoder_->GetPicSize();
- // Create an input texture array.
- ComD3D11Texture2D in_texture =
- decoder_configurator_->CreateOutputTexture(device_, size);
- if (!in_texture) {
- NotifyError("Failed to create a Texture2D for PictureBuffers");
- return;
- }
-
HDRMetadata stream_metadata;
if (config_.hdr_metadata())
stream_metadata = *config_.hdr_metadata();
@@ -653,8 +703,24 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
DCHECK(!buffer->in_picture_use());
picture_buffers_.clear();
+ ComD3D11Texture2D in_texture;
+
// Create each picture buffer.
for (size_t i = 0; i < D3D11DecoderConfigurator::BUFFER_COUNT; i++) {
+ // Create an input texture / texture array if we haven't already.
+ if (!in_texture) {
+ in_texture = decoder_configurator_->CreateOutputTexture(
+ device_, size,
+ use_single_video_decoder_texture_
+ ? 1
+ : D3D11DecoderConfigurator::BUFFER_COUNT);
+ }
+
+ if (!in_texture) {
+ NotifyError("Failed to create a Texture2D for PictureBuffers");
+ return;
+ }
+
auto tex_wrapper = texture_selector_->CreateTextureWrapper(
device_, video_device_, device_context_, size);
if (!tex_wrapper) {
@@ -662,8 +728,10 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
return;
}
- picture_buffers_.push_back(new D3D11PictureBuffer(
- decoder_task_runner_, in_texture, std::move(tex_wrapper), size, i));
+ const size_t array_slice = use_single_video_decoder_texture_ ? 0 : i;
+ picture_buffers_.push_back(
+ new D3D11PictureBuffer(decoder_task_runner_, in_texture, array_slice,
+ std::move(tex_wrapper), size, i /* level */));
if (!picture_buffers_[i]->Init(
gpu_task_runner_, get_helper_cb_, video_device_,
decoder_configurator_->DecoderGuid(), media_log_->Clone())) {
@@ -671,6 +739,11 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
return;
}
+ // If we're using one texture per buffer, rather than an array, then clear
+ // the ref to it so that we allocate a new one above.
+ if (use_single_video_decoder_texture_)
+ in_texture = nullptr;
+
// If we have display metadata, then tell the processor. Note that the
// order of these calls is important, and we must set the display metadata
// if we set the stream metadata, else it can crash on some AMD cards.
@@ -750,7 +823,7 @@ bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
frame->SetReleaseMailboxCB(
base::BindOnce(release_mailbox_cb_, std::move(wait_complete_cb)));
- frame->metadata()->SetBoolean(VideoFrameMetadata::POWER_EFFICIENT, true);
+ frame->metadata()->power_efficient = true;
// For NV12, overlay is allowed by default. If the decoder is going to support
// non-NV12 textures, then this may have to be conditionally set. Also note
// that ALLOW_OVERLAY is required for encrypted video path.
@@ -765,28 +838,33 @@ bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
// presenter decide if it wants to.
const bool allow_overlay =
base::FeatureList::IsEnabled(kD3D11VideoDecoderAllowOverlay);
- frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
- allow_overlay);
+ frame->metadata()->allow_overlay = allow_overlay;
frame->set_color_space(output_color_space);
output_cb_.Run(frame);
return true;
}
-// TODO(tmathmeyer) eventually have this take a Status and pass it through
-// to each of the callbacks.
+void D3D11VideoDecoder::SetDecoderCB(const SetAcceleratorDecoderCB& cb) {
+ set_accelerator_decoder_cb_ = cb;
+}
+
+// TODO(tmathmeyer): Please don't add new uses of this overload.
void D3D11VideoDecoder::NotifyError(const char* reason) {
+ NotifyError(Status(StatusCode::kDecoderInitializeNeverCompleted, reason));
+}
+
+void D3D11VideoDecoder::NotifyError(const Status& reason) {
TRACE_EVENT0("gpu", "D3D11VideoDecoder::NotifyError");
state_ = State::kError;
- DLOG(ERROR) << reason;
// TODO(tmathmeyer) - Remove this after plumbing Status through the
// decode_cb and input_buffer_queue cb's.
- MEDIA_LOG(ERROR, media_log_) << reason;
+ MEDIA_LOG(ERROR, media_log_)
+ << "D3D11VideoDecoder error: " << std::hex << reason.code();
if (init_cb_)
- std::move(init_cb_).Run(
- Status(StatusCode::kDecoderInitializeNeverCompleted, reason));
+ std::move(init_cb_).Run(reason);
current_buffer_ = nullptr;
if (current_decode_cb_)
@@ -876,97 +954,35 @@ D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
return {};
}
+ const auto supported_resolutions =
+ GetSupportedD3D11VideoDecoderResolutions(d3d11_device, gpu_workarounds);
+
std::vector<SupportedVideoDecoderConfig> configs;
- // VP9 has no default resolutions since it may not even be supported.
- ResolutionPair max_h264_resolutions(gfx::Size(1920, 1088), gfx::Size());
- ResolutionPair max_vp8_resolutions;
- ResolutionPair max_vp9_profile0_resolutions;
- ResolutionPair max_vp9_profile2_resolutions;
- const gfx::Size min_resolution(64, 64);
-
- GetResolutionsForDecoders(
- {D3D11_DECODER_PROFILE_H264_VLD_NOFGT}, d3d11_device, gpu_workarounds,
- &max_h264_resolutions, &max_vp8_resolutions,
- &max_vp9_profile0_resolutions, &max_vp9_profile2_resolutions);
-
- if (max_h264_resolutions.first.width() > 0) {
- // Push H264 configs, except HIGH10.
- // landscape
- configs.push_back(SupportedVideoDecoderConfig(
- H264PROFILE_MIN, // profile_min
- static_cast<VideoCodecProfile>(H264PROFILE_HIGH10PROFILE -
- 1), // profile_max
- min_resolution, // coded_size_min
- max_h264_resolutions.first, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- configs.push_back(SupportedVideoDecoderConfig(
- static_cast<VideoCodecProfile>(H264PROFILE_HIGH10PROFILE +
- 1), // profile_min
- H264PROFILE_MAX, // profile_max
- min_resolution, // coded_size_min
- max_h264_resolutions.first, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
-
- // portrait
- configs.push_back(SupportedVideoDecoderConfig(
- H264PROFILE_MIN, // profile_min
- static_cast<VideoCodecProfile>(H264PROFILE_HIGH10PROFILE -
- 1), // profile_max
- min_resolution, // coded_size_min
- max_h264_resolutions.second, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- configs.push_back(SupportedVideoDecoderConfig(
- static_cast<VideoCodecProfile>(H264PROFILE_HIGH10PROFILE +
- 1), // profile_min
- H264PROFILE_MAX, // profile_max
- min_resolution, // coded_size_min
- max_h264_resolutions.second, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- }
-
- // TODO(liberato): Fill this in for VP8.
-
- if (max_vp9_profile0_resolutions.first.width()) {
- // landscape
- configs.push_back(SupportedVideoDecoderConfig(
- VP9PROFILE_PROFILE0, // profile_min
- VP9PROFILE_PROFILE0, // profile_max
- min_resolution, // coded_size_min
- max_vp9_profile0_resolutions.first, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- // portrait
- configs.push_back(SupportedVideoDecoderConfig(
- VP9PROFILE_PROFILE0, // profile_min
- VP9PROFILE_PROFILE0, // profile_max
- min_resolution, // coded_size_min
- max_vp9_profile0_resolutions.second, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- }
-
- if (base::FeatureList::IsEnabled(kD3D11VideoDecoderVP9Profile2)) {
- if (max_vp9_profile2_resolutions.first.width()) {
- // landscape
- configs.push_back(SupportedVideoDecoderConfig(
- VP9PROFILE_PROFILE2, // profile_min
- VP9PROFILE_PROFILE2, // profile_max
- min_resolution, // coded_size_min
- max_vp9_profile2_resolutions.first, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
- // portrait
- configs.push_back(SupportedVideoDecoderConfig(
- VP9PROFILE_PROFILE2, // profile_min
- VP9PROFILE_PROFILE2, // profile_max
- min_resolution, // coded_size_min
- max_vp9_profile2_resolutions.second, // coded_size_max
- false, // allow_encrypted
- false)); // require_encrypted
+ for (const auto& kv : supported_resolutions) {
+ const auto profile = kv.first;
+ if (profile == VP9PROFILE_PROFILE2 &&
+ !base::FeatureList::IsEnabled(kD3D11VideoDecoderVP9Profile2)) {
+ continue;
+ }
+
+ // TODO(liberato): Add VP8 and AV1 support to D3D11VideoDecoder.
+ if (profile == VP8PROFILE_ANY ||
+ (profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX)) {
+ continue;
+ }
+
+ const auto& resolution_range = kv.second;
+ configs.emplace_back(profile, profile, resolution_range.min_resolution,
+ resolution_range.max_landscape_resolution,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
+ if (!resolution_range.max_portrait_resolution.IsEmpty() &&
+ resolution_range.max_portrait_resolution !=
+ resolution_range.max_landscape_resolution) {
+ configs.emplace_back(profile, profile, resolution_range.min_resolution,
+ resolution_range.max_portrait_resolution,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
}
}
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index d9ba26ab254..70e07b81300 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -85,6 +85,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
D3D11PictureBuffer* GetPicture() override;
bool OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) override;
+ void SetDecoderCB(const SetAcceleratorDecoderCB&) override;
static bool GetD3D11FeatureLevel(
ComD3D11Device dev,
@@ -142,6 +143,12 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
// gpu main thread.
void CreatePictureBuffers();
+ // Create a D3D11VideoDecoder, if possible, based on the current config.
+ // TODO(liberato): we use a tuple only because ErrorOr<ComD3D111VideoDecoder>
+ // doesn't work. Something about base::Optional trying to convert to void*,
+ // but the conversion is ambiguous.
+ ErrorOr<std::tuple<ComD3D11VideoDecoder>> CreateD3D11Decoder();
+
enum class NotSupportedReason {
kVideoIsSupported = 0,
@@ -205,8 +212,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
};
// Enter the kError state. This will fail any pending |init_cb_| and / or
- // pending decode as well.
+ // pending decode as well. Do not add new uses of the char* overload; send a
+ // Status instead.
void NotifyError(const char* reason);
+ void NotifyError(const Status& reason);
// The implementation, which lives on the GPU main thread.
base::SequenceBound<D3D11VideoDecoderImpl> impl_;
@@ -281,7 +290,15 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
SupportedConfigs supported_configs_;
// Should we assume that we're outputting to an HDR display?
- bool is_hdr_supported_;
+ bool is_hdr_supported_ = false;
+
+ // Should we use multiple single textures for the decoder output (true) or one
+ // texture with multiple array slices (false)?
+ bool use_single_video_decoder_texture_ = false;
+
+ // Word-salad callback to set / update D3D11 Video callback to the
+ // accelerator. Needed for config changes.
+ SetAcceleratorDecoderCB set_accelerator_decoder_cb_;
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this};
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_client.h b/chromium/media/gpu/windows/d3d11_video_decoder_client.h
index a80e8430c7f..0286ad41ade 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_client.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_client.h
@@ -5,7 +5,9 @@
#ifndef MEDIA_GPU_WINDOWS_D3D11_VIDEO_DECODER_CLIENT_H_
#define MEDIA_GPU_WINDOWS_D3D11_VIDEO_DECODER_CLIENT_H_
+#include "base/callback.h"
#include "media/base/video_color_space.h"
+#include "media/gpu/windows/d3d11_com_defs.h"
namespace media {
@@ -16,10 +18,18 @@ class D3D11PictureBuffer;
// required methods to D3D11VideoAccelerators.
class D3D11VideoDecoderClient {
public:
+ using SetAcceleratorDecoderCB =
+ base::RepeatingCallback<void(ComD3D11VideoDecoder)>;
+
virtual D3D11PictureBuffer* GetPicture() = 0;
virtual bool OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) = 0;
+ // Called by the accelerator to provide a callback that can be used to give
+ // the accelerator a D3D11VideoDecoder object. Must be called during
+ // construction of the accelerator.
+ virtual void SetDecoderCB(const SetAcceleratorDecoderCB&) = 0;
+
protected:
virtual ~D3D11VideoDecoderClient() = default;
};
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index 91908445262..bb950b7717c 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -283,7 +283,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithLegacyGPU) {
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
- gpu_workarounds_.disable_accelerated_vpx_decode = true;
+ gpu_workarounds_.disable_accelerated_vp9_decode = true;
VideoDecoderConfig configuration =
TestVideoConfig::NormalCodecProfile(kCodecVP9, VP9PROFILE_PROFILE0);
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
index eeec7896bde..7fe0f7f7eca 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
@@ -36,17 +36,17 @@ CreateSubsampleMappingBlock(const std::vector<SubsampleEntry>& from) {
D3D11VP9Accelerator::D3D11VP9Accelerator(
D3D11VideoDecoderClient* client,
MediaLog* media_log,
- ComD3D11VideoDecoder video_decoder,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context)
: client_(client),
media_log_(media_log),
status_feedback_(0),
- video_decoder_(std::move(video_decoder)),
video_device_(std::move(video_device)),
video_context_(std::move(video_context)) {
DCHECK(client);
DCHECK(media_log_);
+ client->SetDecoderCB(base::BindRepeating(
+ &D3D11VP9Accelerator::SetVideoDecoder, base::Unretained(this)));
}
D3D11VP9Accelerator::~D3D11VP9Accelerator() {}
@@ -111,7 +111,7 @@ void D3D11VP9Accelerator::CopyFrameParams(const D3D11VP9Picture& pic,
pic_params->BitDepthMinus8Luma = pic_params->BitDepthMinus8Chroma =
pic.frame_hdr->bit_depth - 8;
- pic_params->CurrPic.Index7Bits = pic.level();
+ pic_params->CurrPic.Index7Bits = pic.picture_index();
pic_params->frame_type = !pic.frame_hdr->IsKeyframe();
COPY_PARAM(subsampling_x);
@@ -150,7 +150,7 @@ void D3D11VP9Accelerator::CopyReferenceFrames(
if (ref_pic) {
scoped_refptr<D3D11VP9Picture> our_ref_pic(
static_cast<D3D11VP9Picture*>(ref_pic.get()));
- pic_params->ref_frame_map[i].Index7Bits = our_ref_pic->level();
+ pic_params->ref_frame_map[i].Index7Bits = our_ref_pic->picture_index();
pic_params->ref_frame_coded_width[i] = texture_descriptor.Width;
pic_params->ref_frame_coded_height[i] = texture_descriptor.Height;
} else {
@@ -185,19 +185,16 @@ void D3D11VP9Accelerator::CopyLoopFilterParams(
// base::size(...) doesn't work well in an array initializer.
DCHECK_EQ(4lu, base::size(pic_params->ref_deltas));
- int ref_deltas[4] = {0};
for (size_t i = 0; i < base::size(pic_params->ref_deltas); i++) {
- if (loop_filter_params.update_ref_deltas[i])
- ref_deltas[i] = loop_filter_params.ref_deltas[i];
- pic_params->ref_deltas[i] = ref_deltas[i];
+ // The update_ref_deltas[i] is _only_ for parsing! it allows omission of the
+ // 6 bytes that would otherwise be needed for a new value to overwrite the
+ // global one. It has nothing to do with setting the ref_deltas here.
+ pic_params->ref_deltas[i] = loop_filter_params.ref_deltas[i];
}
- int mode_deltas[2] = {0};
DCHECK_EQ(2lu, base::size(pic_params->mode_deltas));
for (size_t i = 0; i < base::size(pic_params->mode_deltas); i++) {
- if (loop_filter_params.update_mode_deltas[i])
- mode_deltas[i] = loop_filter_params.mode_deltas[i];
- pic_params->mode_deltas[i] = mode_deltas[i];
+ pic_params->mode_deltas[i] = loop_filter_params.mode_deltas[i];
}
}
@@ -381,4 +378,8 @@ bool D3D11VP9Accelerator::GetFrameContext(scoped_refptr<VP9Picture> picture,
return false;
}
+void D3D11VP9Accelerator::SetVideoDecoder(ComD3D11VideoDecoder video_decoder) {
+ video_decoder_ = std::move(video_decoder);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
index dc262d68d26..43c2c26e595 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
@@ -24,7 +24,6 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
public:
D3D11VP9Accelerator(D3D11VideoDecoderClient* client,
MediaLog* media_log,
- ComD3D11VideoDecoder video_decoder,
ComD3D11VideoDevice video_device,
std::unique_ptr<VideoContextWrapper> video_context);
~D3D11VP9Accelerator() override;
@@ -69,6 +68,8 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
void RecordFailure(const std::string& fail_type, const std::string& reason);
+ void SetVideoDecoder(ComD3D11VideoDecoder video_decoder);
+
D3D11VideoDecoderClient* client_;
MediaLog* const media_log_;
UINT status_feedback_;
diff --git a/chromium/media/gpu/windows/d3d11_vp9_picture.cc b/chromium/media/gpu/windows/d3d11_vp9_picture.cc
index 24ae6033294..5efa82b5be0 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_picture.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_picture.cc
@@ -7,7 +7,8 @@
namespace media {
D3D11VP9Picture::D3D11VP9Picture(D3D11PictureBuffer* picture_buffer)
- : picture_buffer_(picture_buffer), level_(picture_buffer_->level()) {
+ : picture_buffer_(picture_buffer),
+ picture_index_(picture_buffer_->picture_index()) {
picture_buffer_->set_in_picture_use(true);
}
diff --git a/chromium/media/gpu/windows/d3d11_vp9_picture.h b/chromium/media/gpu/windows/d3d11_vp9_picture.h
index 3d3bcbbb3f9..27b144402cc 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_picture.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_picture.h
@@ -19,14 +19,14 @@ class D3D11VP9Picture : public VP9Picture {
D3D11PictureBuffer* picture_buffer() const { return picture_buffer_; }
- size_t level() const { return level_; }
+ size_t picture_index() const { return picture_index_; }
protected:
~D3D11VP9Picture() override;
private:
D3D11PictureBuffer* picture_buffer_;
- size_t level_;
+ size_t picture_index_;
};
} // namespace media
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index ff451f0bb17..350458a8598 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -7,10 +7,6 @@
#include <algorithm>
#include <memory>
-#if !defined(OS_WIN)
-#error This file should only be built on Windows.
-#endif // !defined(OS_WIN)
-
#include <codecapi.h>
#include <dxgi1_2.h>
#include <ks.h>
@@ -121,13 +117,6 @@ DEFINE_GUID(MF_XVP_PLAYBACK_MODE,
0xcc,
0xe9);
-// Defines the GUID for the Intel H264 DXVA device.
-static const GUID DXVA2_Intel_ModeH264_E = {
- 0x604F8E68,
- 0x4951,
- 0x4c54,
- {0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6}};
-
static const CLSID CLSID_CAV1DecoderMFT = {
0xC843981A,
0x3359,
@@ -184,7 +173,7 @@ HRESULT g_last_device_removed_reason;
namespace media {
-static const VideoCodecProfile kSupportedProfiles[] = {
+constexpr VideoCodecProfile kSupportedProfiles[] = {
H264PROFILE_BASELINE, H264PROFILE_MAIN, H264PROFILE_HIGH,
VP8PROFILE_ANY, VP9PROFILE_PROFILE0, VP9PROFILE_PROFILE2,
AV1PROFILE_PROFILE_MAIN, AV1PROFILE_PROFILE_HIGH, AV1PROFILE_PROFILE_PRO};
@@ -606,7 +595,11 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
enable_low_latency_(gpu_preferences.enable_low_latency_dxva),
support_share_nv12_textures_(
gpu_preferences.enable_zero_copy_dxgi_video &&
- !workarounds.disable_dxgi_zero_copy_video),
+ !workarounds.disable_dxgi_zero_copy_video &&
+ /* Sharing will use an array texture, so avoid it if arrays are being
+ * worked around. https://crbug.com/971952 .
+ */
+ !workarounds.use_single_video_decoder_texture),
num_picture_buffers_requested_(support_share_nv12_textures_
? kNumPictureBuffersForZeroCopy
: kNumPictureBuffers),
@@ -619,8 +612,12 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
use_keyed_mutex_(false),
using_angle_device_(false),
using_debug_device_(false),
- enable_accelerated_vpx_decode_(
- !workarounds.disable_accelerated_vpx_decode),
+ enable_accelerated_av1_decode_(
+ !workarounds.disable_accelerated_av1_decode),
+ enable_accelerated_vp8_decode_(
+ !workarounds.disable_accelerated_vp8_decode),
+ enable_accelerated_vp9_decode_(
+ !workarounds.disable_accelerated_vp9_decode),
processing_config_changed_(false),
use_empty_video_hdr_metadata_(workarounds.use_empty_video_hdr_metadata) {
weak_ptr_ = weak_this_factory_.GetWeakPtr();
@@ -1343,84 +1340,28 @@ DXVAVideoDecodeAccelerator::GetSupportedProfiles(
}
}
- // On Windows 7 the maximum resolution supported by media foundation is
- // 1920 x 1088. We use 1088 to account for 16x16 macroblocks.
- ResolutionPair max_h264_resolutions(gfx::Size(1920, 1088), gfx::Size());
-
- // VP8/VP9 has no default resolutions since it may not even be supported.
- ResolutionPair max_vp8_resolutions;
- ResolutionPair max_vp9_profile0_resolutions;
- ResolutionPair max_vp9_profile2_resolutions;
-
- GetResolutionsForDecoders({DXVA2_ModeH264_E, DXVA2_Intel_ModeH264_E},
- gl::QueryD3D11DeviceObjectFromANGLE(), workarounds,
- &max_h264_resolutions, &max_vp8_resolutions,
- &max_vp9_profile0_resolutions,
- &max_vp9_profile2_resolutions);
-
- for (const auto& supported_profile : kSupportedProfiles) {
- const bool is_h264 = supported_profile >= H264PROFILE_MIN &&
- supported_profile <= H264PROFILE_MAX;
- const bool is_vp9 = supported_profile >= VP9PROFILE_MIN &&
- supported_profile <= VP9PROFILE_MAX;
- const bool is_vp8 = supported_profile == VP8PROFILE_ANY;
- const bool is_av1 = supported_profile >= AV1PROFILE_MIN &&
- supported_profile <= AV1PROFILE_MAX;
- DCHECK(is_h264 || is_vp9 || is_vp8 || is_av1);
-
- ResolutionPair max_resolutions;
- if (is_h264) {
- max_resolutions = max_h264_resolutions;
- } else if (supported_profile == VP9PROFILE_PROFILE0) {
- max_resolutions = max_vp9_profile0_resolutions;
- } else if (supported_profile == VP9PROFILE_PROFILE2) {
- max_resolutions = max_vp9_profile2_resolutions;
- } else if (is_vp8) {
- max_resolutions = max_vp8_resolutions;
- } else if (is_av1) {
- if (!base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding))
- continue;
-
- // TODO(dalecurtis): Update GetResolutionsForDecoders() to support AV1.
- SupportedProfile profile;
- profile.profile = supported_profile;
- profile.min_resolution = gfx::Size();
- profile.max_resolution = gfx::Size(8192, 8192);
- profiles.push_back(profile);
- continue;
- }
-
- // Skip adding VPx profiles if it's not supported or disabled.
- if ((is_vp9 || is_vp8) && max_resolutions.first.IsEmpty())
- continue;
-
- // Windows Media Foundation H.264 decoding does not support decoding videos
- // with any dimension smaller than 48 pixels:
- // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815
- //
- // TODO(dalecurtis): These values are too low. We should only be using
- // hardware decode for videos above ~360p, see http://crbug.com/684792.
- const gfx::Size min_resolution =
- is_h264 ? gfx::Size(48, 48) : gfx::Size(16, 16);
-
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ gl::QueryD3D11DeviceObjectFromANGLE(), workarounds);
+ for (const auto& kv : supported_resolutions) {
+ const auto& resolution_range = kv.second;
{
SupportedProfile profile;
- profile.profile = supported_profile;
- profile.min_resolution = min_resolution;
- profile.max_resolution = max_resolutions.first;
+ profile.profile = kv.first;
+ profile.min_resolution = resolution_range.min_resolution;
+ profile.max_resolution = resolution_range.max_landscape_resolution;
profiles.push_back(profile);
}
- const gfx::Size portrait_max_resolution = max_resolutions.second;
- if (!portrait_max_resolution.IsEmpty()) {
+ if (!resolution_range.max_portrait_resolution.IsEmpty() &&
+ resolution_range.max_portrait_resolution !=
+ resolution_range.max_landscape_resolution) {
SupportedProfile profile;
- profile.profile = supported_profile;
- profile.min_resolution = min_resolution;
- profile.max_resolution = portrait_max_resolution;
+ profile.profile = kv.first;
+ profile.min_resolution = resolution_range.min_resolution;
+ profile.max_resolution = resolution_range.max_portrait_resolution;
profiles.push_back(profile);
}
}
-
return profiles;
}
@@ -1475,18 +1416,21 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
"blacklisted version of msmpeg2vdec.dll 6.1.7140", false);
codec_ = kCodecH264;
clsid = __uuidof(CMSH264DecoderMFT);
- } else if (enable_accelerated_vpx_decode_ &&
- ((profile >= VP9PROFILE_PROFILE0 &&
- profile <= VP9PROFILE_PROFILE3) ||
- profile == VP8PROFILE_ANY)) {
+ } else if ((profile >= VP9PROFILE_PROFILE0 &&
+ profile <= VP9PROFILE_PROFILE3) ||
+ profile == VP8PROFILE_ANY) {
codec_ = profile == VP8PROFILE_ANY ? kCodecVP8 : kCodecVP9;
- clsid = CLSID_MSVPxDecoder;
- decoder_dll = ::LoadLibrary(kMSVPxDecoderDLLName);
- if (decoder_dll)
- using_ms_vpx_mft_ = true;
+ if ((codec_ == kCodecVP8 && enable_accelerated_vp8_decode_) ||
+ (codec_ == kCodecVP9 && enable_accelerated_vp9_decode_)) {
+ clsid = CLSID_MSVPxDecoder;
+ decoder_dll = ::LoadLibrary(kMSVPxDecoderDLLName);
+ if (decoder_dll)
+ using_ms_vpx_mft_ = true;
+ }
}
- if (base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
+ if (enable_accelerated_av1_decode_ &&
+ base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
(profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX)) {
codec_ = kCodecAV1;
clsid = CLSID_CAV1DecoderMFT;
@@ -1512,17 +1456,15 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
CHECK(create_dxgi_device_manager_);
if (media_log_)
MEDIA_LOG(INFO, media_log_) << "Using D3D11 device for DXVA";
- RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(),
- "Failed to initialize DX11 device and manager",
- PLATFORM_FAILURE, false);
+ RETURN_ON_FAILURE(CreateDX11DevManager(),
+ "Failed to initialize DX11 device and manager", false);
device_manager_to_use =
reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.Get());
} else {
if (media_log_)
MEDIA_LOG(INFO, media_log_) << "Using D3D9 device for DXVA";
- RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
- "Failed to initialize D3D device and manager",
- PLATFORM_FAILURE, false);
+ RETURN_ON_FAILURE(CreateD3DDevManager(),
+ "Failed to initialize D3D device and manager", false);
device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.Get());
}
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index 4377744a8fd..21d3ec0d3fb 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -7,18 +7,12 @@
#include <d3d11_1.h>
#include <d3d9.h>
+#include <dxva2api.h>
#include <initguid.h>
+#include <mfidl.h>
#include <stdint.h>
#include <wrl/client.h>
-// Work around bug in this header by disabling the relevant warning for it.
-// https://connect.microsoft.com/VisualStudio/feedback/details/911260/dxva2api-h-in-win8-sdk-triggers-c4201-with-w4
-#pragma warning(push)
-#pragma warning(disable : 4201)
-#include <dxva2api.h>
-#pragma warning(pop)
-#include <mfidl.h>
-
#include <list>
#include <map>
#include <memory>
@@ -588,8 +582,12 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
bool using_angle_device_;
bool using_debug_device_;
- // Enables hardware acceleration for VP9 video decoding.
- const bool enable_accelerated_vpx_decode_;
+ // Enables hardware acceleration for AV1 video decoding.
+ const bool enable_accelerated_av1_decode_;
+
+ // Enables hardware acceleration for VP8/VP9 video decoding.
+ const bool enable_accelerated_vp8_decode_;
+ const bool enable_accelerated_vp9_decode_;
// The media foundation H.264 decoder has problems handling changes like
// resolution change, bitrate change etc. If we reinitialize the decoder
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.cc b/chromium/media/gpu/windows/supported_profile_helpers.cc
index 5004c5799b7..7d8622286f1 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers.cc
@@ -9,16 +9,14 @@
#include <memory>
#include <utility>
+#include <d3d9.h>
+#include <dxva2api.h>
+
#include "base/feature_list.h"
#include "base/trace_event/trace_event.h"
#include "base/win/windows_version.h"
-#include "build/build_config.h"
-#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/base/media_switches.h"
-
-#if !defined(OS_WIN)
-#error This file should only be built on Windows.
-#endif // !defined(OS_WIN)
+#include "media/gpu/windows/av1_guids.h"
namespace {
@@ -26,7 +24,7 @@ namespace {
// or earlier, and don't handle resolutions higher than 1920 x 1088 well.
//
// NOTE: This list must be kept in sorted order.
-static const uint16_t kLegacyAmdGpuList[] = {
+constexpr uint16_t kLegacyAmdGpuList[] = {
0x130f, 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707,
0x6708, 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x6720, 0x6721,
0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, 0x6729, 0x6738,
@@ -67,14 +65,18 @@ static const uint16_t kLegacyAmdGpuList[] = {
// 1920 x 1088 are supported. Updated based on crash reports.
//
// NOTE: This list must be kept in sorted order.
-static const uint16_t kLegacyIntelGpuList[] = {
+constexpr uint16_t kLegacyIntelGpuList[] = {
0x102, 0x106, 0x116, 0x126, 0x152, 0x156, 0x166,
0x402, 0x406, 0x416, 0x41e, 0xa06, 0xa16, 0xf31,
};
-} // namespace
-
-namespace media {
+// Windows Media Foundation H.264 decoding does not support decoding videos
+// with any dimension smaller than 48 pixels:
+// http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815
+//
+// TODO(dalecurtis): These values are too low. We should only be using
+// hardware decode for videos above ~360p, see http://crbug.com/684792.
+constexpr gfx::Size kMinResolution(64, 64);
// Certain AMD GPU drivers like R600, R700, Evergreen and Cayman and some second
// generation Intel GPU drivers crash if we create a video device with a
@@ -157,32 +159,12 @@ bool IsResolutionSupportedForDevice(const gfx::Size& resolution_to_test,
config_count > 0;
}
-// Returns a tuple of (LandscapeMax, PortraitMax). If landscape maximum can not
-// be computed, the value of |default_max| is returned for the landscape maximum
-// and a zero size value is returned for portrait max (erring conservatively).
-ResolutionPair GetMaxResolutionsForGUIDs(
- const gfx::Size& default_max,
+media::SupportedResolutionRange GetResolutionsForGUID(
ID3D11VideoDevice* video_device,
- const std::vector<GUID>& valid_guids,
+ const GUID& decoder_guid,
const std::vector<gfx::Size>& resolutions_to_test,
- DXGI_FORMAT format) {
- ResolutionPair result(default_max, gfx::Size());
-
- // Enumerate supported video profiles and look for the profile.
- GUID decoder_guid = GUID_NULL;
- UINT profile_count = video_device->GetVideoDecoderProfileCount();
- for (UINT profile_idx = 0; profile_idx < profile_count; profile_idx++) {
- GUID profile_id = {};
- if (SUCCEEDED(
- video_device->GetVideoDecoderProfile(profile_idx, &profile_id)) &&
- std::find(valid_guids.begin(), valid_guids.end(), profile_id) !=
- valid_guids.end()) {
- decoder_guid = profile_id;
- break;
- }
- }
- if (decoder_guid == GUID_NULL)
- return result;
+ DXGI_FORMAT format = DXGI_FORMAT_NV12) {
+ media::SupportedResolutionRange result;
// Verify input is in ascending order by height.
DCHECK(std::is_sorted(resolutions_to_test.begin(), resolutions_to_test.end(),
@@ -195,32 +177,54 @@ ResolutionPair GetMaxResolutionsForGUIDs(
format)) {
break;
}
- result.first = res;
+ result.max_landscape_resolution = res;
}
// The max supported portrait resolution should be just be a w/h flip of the
// max supported landscape resolution.
- gfx::Size flipped(result.first.height(), result.first.width());
- if (IsResolutionSupportedForDevice(flipped, decoder_guid, video_device,
+ const gfx::Size flipped(result.max_landscape_resolution.height(),
+ result.max_landscape_resolution.width());
+ if (flipped == result.max_landscape_resolution ||
+ IsResolutionSupportedForDevice(flipped, decoder_guid, video_device,
format)) {
- result.second = flipped;
+ result.max_portrait_resolution = flipped;
}
+ if (!result.max_landscape_resolution.IsEmpty())
+ result.min_resolution = kMinResolution;
+
return result;
}
-// TODO(tmathmeyer) refactor this so that we don'ty call
-// GetMaxResolutionsForGUIDS so many times.
-void GetResolutionsForDecoders(std::vector<GUID> h264_guids,
- ComD3D11Device device,
- const gpu::GpuDriverBugWorkarounds& workarounds,
- ResolutionPair* h264_resolutions,
- ResolutionPair* vp8_resolutions,
- ResolutionPair* vp9_0_resolutions,
- ResolutionPair* vp9_2_resolutions) {
- TRACE_EVENT0("gpu,startup", "GetResolutionsForDecoders");
+} // namespace
+
+namespace media {
+
+SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
+ ComD3D11Device device,
+ const gpu::GpuDriverBugWorkarounds& workarounds) {
+ TRACE_EVENT0("gpu,startup", "GetSupportedD3D11VideoDecoderResolutions");
+ SupportedResolutionRangeMap supported_resolutions;
+
+ // We always insert support for H.264 regardless of the tests below. It's old
+ // enough to be ubiquitous.
+ //
+ // On Windows 7 the maximum resolution supported by media foundation is
+ // 1920 x 1088. We use 1088 to account for 16x16 macro-blocks.
+ constexpr gfx::Size kDefaultMaxH264Resolution(1920, 1088);
+ SupportedResolutionRange h264_profile;
+ h264_profile.min_resolution = kMinResolution;
+ h264_profile.max_landscape_resolution = kDefaultMaxH264Resolution;
+
+ // We don't have a way to map DXVA support to specific H.264 profiles, so just
+ // mark all the common ones with the same level of support.
+ constexpr VideoCodecProfile kSupportedH264Profiles[] = {
+ H264PROFILE_BASELINE, H264PROFILE_MAIN, H264PROFILE_HIGH};
+ for (const auto profile : kSupportedH264Profiles)
+ supported_resolutions[profile] = h264_profile;
+
if (base::win::GetVersion() <= base::win::Version::WIN7)
- return;
+ return supported_resolutions;
// To detect if a driver supports the desired resolutions, we try and create
// a DXVA decoder instance for that resolution and profile. If that succeeds
@@ -228,43 +232,99 @@ void GetResolutionsForDecoders(std::vector<GUID> h264_guids,
// Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while
// creating surfaces larger than 1920 x 1088.
if (!device || IsLegacyGPU(device.Get()))
- return;
+ return supported_resolutions;
ComD3D11VideoDevice video_device;
if (FAILED(device.As(&video_device)))
- return;
+ return supported_resolutions;
- *h264_resolutions = GetMaxResolutionsForGUIDs(
- h264_resolutions->first, video_device.Get(), h264_guids,
- {gfx::Size(2560, 1440), gfx::Size(3840, 2160), gfx::Size(4096, 2160),
- gfx::Size(4096, 2304)});
+ const std::vector<gfx::Size> kModernResolutions = {
+ gfx::Size(4096, 2160), gfx::Size(4096, 2304), gfx::Size(7680, 4320),
+ gfx::Size(8192, 4320), gfx::Size(8192, 8192)};
- if (workarounds.disable_accelerated_vpx_decode)
- return;
+ const bool should_test_for_av1_support =
+ base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
+ !workarounds.disable_accelerated_av1_decode;
- if (base::FeatureList::IsEnabled(kMediaFoundationVP8Decoding)) {
- *vp8_resolutions = GetMaxResolutionsForGUIDs(
- vp8_resolutions->first, video_device.Get(),
- {D3D11_DECODER_PROFILE_VP8_VLD},
- {gfx::Size(4096, 2160), gfx::Size(4096, 2304)});
+ // Enumerate supported video profiles and look for the known profile for each
+ // codec. We first look through the the decoder profiles so we don't run N
+ // resolution tests for a profile that's unsupported.
+ UINT profile_count = video_device->GetVideoDecoderProfileCount();
+ for (UINT i = 0; i < profile_count; i++) {
+ GUID profile_id;
+ if (FAILED(video_device->GetVideoDecoderProfile(i, &profile_id)))
+ continue;
+
+ if (profile_id == D3D11_DECODER_PROFILE_H264_VLD_NOFGT) {
+ const auto result = GetResolutionsForGUID(
+ video_device.Get(), profile_id,
+ {gfx::Size(2560, 1440), gfx::Size(3840, 2160), gfx::Size(4096, 2160),
+ gfx::Size(4096, 2304), gfx::Size(4096, 4096)});
+
+ // Unlike the other codecs, H.264 support is assumed up to 1080p, even if
+ // our initial queries fail. If they fail, we use the defaults set above.
+ if (!result.max_landscape_resolution.IsEmpty()) {
+ for (const auto profile : kSupportedH264Profiles)
+ supported_resolutions[profile] = result;
+ }
+ continue;
+ }
+
+ // Note: Each bit depth of AV1 uses a different DXGI_FORMAT, here we only
+ // test for the 8-bit one (NV12).
+ if (should_test_for_av1_support) {
+ if (profile_id == DXVA_ModeAV1_VLD_Profile0) {
+ supported_resolutions[AV1PROFILE_PROFILE_MAIN] = GetResolutionsForGUID(
+ video_device.Get(), profile_id, kModernResolutions);
+ continue;
+ }
+ if (profile_id == DXVA_ModeAV1_VLD_Profile1) {
+ supported_resolutions[AV1PROFILE_PROFILE_HIGH] = GetResolutionsForGUID(
+ video_device.Get(), profile_id, kModernResolutions);
+ continue;
+ }
+ if (profile_id == DXVA_ModeAV1_VLD_Profile2) {
+ // TODO(dalecurtis): 12-bit profile 2 support is complicated. Ideally,
+ // we should test DXVA_ModeAV1_VLD_12bit_Profile2 and
+ // DXVA_ModeAV1_VLD_12bit_Profile2_420 when the bit depth of the content
+ // is 12-bit. However we don't know the bit depth or pixel format until
+ // too late. In these cases we'll end up initializing the decoder and
+ // failing on the first decode (which will trigger software fallback).
+ supported_resolutions[AV1PROFILE_PROFILE_PRO] = GetResolutionsForGUID(
+ video_device.Get(), profile_id, kModernResolutions);
+ continue;
+ }
+ }
+
+ if (!workarounds.disable_accelerated_vp8_decode &&
+ profile_id == D3D11_DECODER_PROFILE_VP8_VLD &&
+ base::FeatureList::IsEnabled(kMediaFoundationVP8Decoding)) {
+ supported_resolutions[VP8PROFILE_ANY] =
+ GetResolutionsForGUID(video_device.Get(), profile_id,
+ {gfx::Size(4096, 2160), gfx::Size(4096, 2304),
+ gfx::Size(4096, 4096)});
+ continue;
+ }
+
+ if (workarounds.disable_accelerated_vp9_decode)
+ continue;
+
+ if (profile_id == D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0) {
+ supported_resolutions[VP9PROFILE_PROFILE0] = GetResolutionsForGUID(
+ video_device.Get(), profile_id, kModernResolutions);
+ continue;
+ }
+
+ // RS3 has issues with VP9.2 decoding. See https://crbug.com/937108.
+ if (profile_id == D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2 &&
+ base::win::GetVersion() != base::win::Version::WIN10_RS3) {
+ supported_resolutions[VP9PROFILE_PROFILE2] = GetResolutionsForGUID(
+ video_device.Get(), profile_id, kModernResolutions, DXGI_FORMAT_P010);
+ continue;
+ }
}
- *vp9_0_resolutions = GetMaxResolutionsForGUIDs(
- vp9_0_resolutions->first, video_device.Get(),
- {D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0},
- {gfx::Size(4096, 2160), gfx::Size(4096, 2304), gfx::Size(7680, 4320),
- gfx::Size(8192, 4320), gfx::Size(8192, 8192)});
-
- // RS3 has issues with VP9.2 decoding. See https://crbug.com/937108.
- if (base::win::GetVersion() == base::win::Version::WIN10_RS3)
- return;
-
- *vp9_2_resolutions = GetMaxResolutionsForGUIDs(
- vp9_2_resolutions->first, video_device.Get(),
- {D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2},
- {gfx::Size(4096, 2160), gfx::Size(4096, 2304), gfx::Size(7680, 4320),
- gfx::Size(8192, 4320), gfx::Size(8192, 8192)},
- DXGI_FORMAT_P010);
+ return supported_resolutions;
}
} // namespace media
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.h b/chromium/media/gpu/windows/supported_profile_helpers.h
index 1834f0ba62a..6e521d08ff6 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.h
+++ b/chromium/media/gpu/windows/supported_profile_helpers.h
@@ -5,47 +5,35 @@
#ifndef MEDIA_GPU_WINDOWS_SUPPORTED_PROFILE_HELPERS_H_
#define MEDIA_GPU_WINDOWS_SUPPORTED_PROFILE_HELPERS_H_
-#include <d3d11_1.h>
-#include <wrl/client.h>
-#include <memory>
-#include <utility>
-#include <vector>
-
+#include "base/containers/flat_map.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "media/base/video_codecs.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/windows/d3d11_com_defs.h"
-#include "ui/gfx/geometry/rect.h"
-
+#include "ui/gfx/geometry/size.h"
namespace media {
-using ResolutionPair = std::pair<gfx::Size, gfx::Size>;
-
-bool IsLegacyGPU(ID3D11Device* device);
-
-// Returns true if a ID3D11VideoDecoder can be created for |resolution_to_test|
-// on the given |video_device|.
-bool IsResolutionSupportedForDevice(const gfx::Size& resolution_to_test,
- const GUID& decoder_guid,
- ID3D11VideoDevice* video_device,
- DXGI_FORMAT format);
-
-ResolutionPair GetMaxResolutionsForGUIDs(
- const gfx::Size& default_max,
- ID3D11VideoDevice* video_device,
- const std::vector<GUID>& valid_guids,
- const std::vector<gfx::Size>& resolutions_to_test,
- DXGI_FORMAT format = DXGI_FORMAT_NV12);
-
-// TODO(dalecurtis): This function should be changed to use return values.
+struct SupportedResolutionRange {
+ gfx::Size min_resolution;
+ gfx::Size max_landscape_resolution;
+ gfx::Size max_portrait_resolution;
+};
+
+using SupportedResolutionRangeMap =
+ base::flat_map<VideoCodecProfile, SupportedResolutionRange>;
+
+// Enumerates the extent of hardware decoding support for H.264, VP8, VP9, and
+// AV1. If a codec is supported, its minimum and maximum supported resolutions
+// are returned under the appropriate VideoCodecProfile entry.
+//
+// Notes:
+// - VP8 and AV1 are only tested if their base::Feature entries are enabled.
+// - Only baseline, main, and high H.264 profiles are supported.
MEDIA_GPU_EXPORT
-void GetResolutionsForDecoders(std::vector<GUID> h264_guids,
- ComD3D11Device device,
- const gpu::GpuDriverBugWorkarounds& workarounds,
- ResolutionPair* h264_resolutions,
- ResolutionPair* vp8_resolutions,
- ResolutionPair* vp9_0_resolutions,
- ResolutionPair* vp9_2_resolutions);
+SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
+ ComD3D11Device device,
+ const gpu::GpuDriverBugWorkarounds& workarounds);
} // namespace media
diff --git a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
index 67aeb7d45b8..0d5da5b6e47 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
@@ -15,6 +15,7 @@
#include "media/base/media_switches.h"
#include "media/base/test_helpers.h"
#include "media/base/win/d3d11_mocks.h"
+#include "media/gpu/windows/av1_guids.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::testing::_;
@@ -31,25 +32,28 @@ using ::testing::WithArgs;
return; \
} while (0)
-HRESULT SetIfSizeLessThan(D3D11_VIDEO_DECODER_DESC* desc, UINT* count) {
- *count = 1;
- return S_OK;
-}
+namespace {
+
+using PciId = std::pair<uint16_t, uint16_t>;
+constexpr PciId kLegacyIntelGpu = {0x8086, 0x102};
+constexpr PciId kRecentIntelGpu = {0x8086, 0x100};
+constexpr PciId kLegacyAmdGpu = {0x1022, 0x130f};
+constexpr PciId kRecentAmdGpu = {0x1022, 0x130e};
+
+constexpr gfx::Size kMinResolution(64, 64);
+constexpr gfx::Size kFullHd(1920, 1088);
+constexpr gfx::Size kSquare4k(4096, 4096);
+constexpr gfx::Size kSquare8k(8192, 8192);
+
+} // namespace
namespace media {
+constexpr VideoCodecProfile kSupportedH264Profiles[] = {
+ H264PROFILE_BASELINE, H264PROFILE_MAIN, H264PROFILE_HIGH};
+
class SupportedResolutionResolverTest : public ::testing::Test {
public:
- const std::pair<uint16_t, uint16_t> LegacyIntelGPU = {0x8086, 0x102};
- const std::pair<uint16_t, uint16_t> RecentIntelGPU = {0x8086, 0x100};
- const std::pair<uint16_t, uint16_t> LegacyAMDGPU = {0x1022, 0x130f};
- const std::pair<uint16_t, uint16_t> RecentAMDGPU = {0x1022, 0x130e};
-
- const ResolutionPair ten_eighty = {{1920, 1080}, {1080, 1920}};
- const ResolutionPair zero = {{0, 0}, {0, 0}};
- const ResolutionPair tall4k = {{4096, 2304}, {2304, 4096}};
- const ResolutionPair eightKsquare = {{8192, 8192}, {8192, 8192}};
-
void SetUp() override {
gpu_workarounds_.disable_dxgi_zero_copy_video = false;
mock_d3d11_device_ = CreateD3D11Mock<NiceMock<D3D11DeviceMock>>();
@@ -68,11 +72,11 @@ class SupportedResolutionResolverTest : public ::testing::Test {
ON_CALL(*mock_dxgi_device_.Get(), GetAdapter(_))
.WillByDefault(SetComPointeeAndReturnOk<0>(mock_dxgi_adapter_.Get()));
- SetGPUProfile(RecentIntelGPU);
- SetMaxResolutionForGUID(D3D11_DECODER_PROFILE_H264_VLD_NOFGT, {4096, 4096});
+ SetGpuProfile(kRecentIntelGpu);
+ SetMaxResolution(D3D11_DECODER_PROFILE_H264_VLD_NOFGT, kSquare4k);
}
- void SetMaxResolutionForGUID(const GUID& g, const gfx::Size& max_res) {
+ void SetMaxResolution(const GUID& g, const gfx::Size& max_res) {
max_size_for_guids_[g] = max_res;
ON_CALL(*mock_d3d11_video_device_.Get(), GetVideoDecoderConfigCount(_, _))
.WillByDefault(
@@ -110,7 +114,7 @@ class SupportedResolutionResolverTest : public ::testing::Test {
})));
}
- void SetGPUProfile(std::pair<uint16_t, uint16_t> vendor_and_gpu) {
+ void SetGpuProfile(std::pair<uint16_t, uint16_t> vendor_and_gpu) {
mock_adapter_desc_.DeviceId = static_cast<UINT>(vendor_and_gpu.second);
mock_adapter_desc_.VendorId = static_cast<UINT>(vendor_and_gpu.first);
@@ -119,6 +123,39 @@ class SupportedResolutionResolverTest : public ::testing::Test {
DoAll(SetArgPointee<0>(mock_adapter_desc_), Return(S_OK)));
}
+ void AssertDefaultSupport(
+ const SupportedResolutionRangeMap& supported_resolutions,
+ size_t expected_size = 3u) {
+ ASSERT_EQ(expected_size, supported_resolutions.size());
+ for (const auto profile : kSupportedH264Profiles) {
+ auto it = supported_resolutions.find(profile);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kMinResolution, it->second.min_resolution);
+ EXPECT_EQ(kFullHd, it->second.max_landscape_resolution);
+ EXPECT_EQ(gfx::Size(), it->second.max_portrait_resolution);
+ }
+ }
+
+ void TestDecoderSupport(const GUID& decoder,
+ VideoCodecProfile profile,
+ const gfx::Size& max_res = kSquare4k,
+ const gfx::Size& max_landscape_res = kSquare4k,
+ const gfx::Size& max_portrait_res = kSquare4k) {
+ EnableDecoders({decoder});
+ SetMaxResolution(decoder, max_res);
+
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_);
+ AssertDefaultSupport(supported_resolutions,
+ base::size(kSupportedH264Profiles) + 1);
+
+ auto it = supported_resolutions.find(profile);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kMinResolution, it->second.min_resolution);
+ EXPECT_EQ(max_landscape_res, it->second.max_landscape_resolution);
+ EXPECT_EQ(max_portrait_res, it->second.max_portrait_resolution);
+ }
+
Microsoft::WRL::ComPtr<D3D11DeviceMock> mock_d3d11_device_;
Microsoft::WRL::ComPtr<DXGIAdapterMock> mock_dxgi_adapter_;
Microsoft::WRL::ComPtr<DXGIDeviceMock> mock_dxgi_device_;
@@ -131,144 +168,128 @@ class SupportedResolutionResolverTest : public ::testing::Test {
return memcmp(&a, &b, sizeof(GUID)) < 0;
}
};
- std::map<GUID, gfx::Size, GUIDComparison> max_size_for_guids_;
+ base::flat_map<GUID, gfx::Size, GUIDComparison> max_size_for_guids_;
};
-TEST_F(SupportedResolutionResolverTest, NoDeviceAllDefault) {
+TEST_F(SupportedResolutionResolverTest, HasH264SupportByDefault) {
DONT_RUN_ON_WIN_7();
+ AssertDefaultSupport(
+ GetSupportedD3D11VideoDecoderResolutions(nullptr, gpu_workarounds_));
- ResolutionPair h264_res_expected = {{1, 2}, {3, 4}};
- ResolutionPair h264_res = {{1, 2}, {3, 4}};
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT}, nullptr,
- gpu_workarounds_, &h264_res, &vp8_res, &vp9_0_res,
- &vp9_2_res);
-
- ASSERT_EQ(h264_res, h264_res_expected);
- ASSERT_EQ(vp8_res, zero);
- ASSERT_EQ(vp9_0_res, zero);
- ASSERT_EQ(vp9_0_res, zero);
-}
-
-TEST_F(SupportedResolutionResolverTest, LegacyGPUAllDefault) {
- DONT_RUN_ON_WIN_7();
+ SetGpuProfile(kLegacyIntelGpu);
+ AssertDefaultSupport(GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_));
- SetGPUProfile(LegacyIntelGPU);
-
- ResolutionPair h264_res_expected = {{1, 2}, {3, 4}};
- ResolutionPair h264_res = {{1, 2}, {3, 4}};
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT},
- mock_d3d11_device_, gpu_workarounds_, &h264_res,
- &vp8_res, &vp9_0_res, &vp9_2_res);
-
- ASSERT_EQ(h264_res, h264_res_expected);
- ASSERT_EQ(vp8_res, zero);
- ASSERT_EQ(vp9_2_res, zero);
- ASSERT_EQ(vp9_0_res, zero);
+ SetGpuProfile(kLegacyAmdGpu);
+ AssertDefaultSupport(GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_));
}
TEST_F(SupportedResolutionResolverTest, WorkaroundsDisableVpx) {
DONT_RUN_ON_WIN_7();
- gpu_workarounds_.disable_dxgi_zero_copy_video = true;
- EnableDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT});
+ gpu_workarounds_.disable_accelerated_vp8_decode = true;
+ gpu_workarounds_.disable_accelerated_vp9_decode = true;
+ EnableDecoders({D3D11_DECODER_PROFILE_VP8_VLD,
+ D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0,
+ D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2});
- ResolutionPair h264_res;
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT},
- mock_d3d11_device_, gpu_workarounds_, &h264_res,
- &vp8_res, &vp9_0_res, &vp9_2_res);
+ AssertDefaultSupport(GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_));
+}
- ASSERT_EQ(h264_res, tall4k);
+TEST_F(SupportedResolutionResolverTest, H264Supports4k) {
+ DONT_RUN_ON_WIN_7();
- ASSERT_EQ(vp8_res, zero);
- ASSERT_EQ(vp9_0_res, zero);
- ASSERT_EQ(vp9_2_res, zero);
+ EnableDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT});
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_);
+
+ ASSERT_EQ(3u, supported_resolutions.size());
+ for (const auto profile : kSupportedH264Profiles) {
+ auto it = supported_resolutions.find(profile);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kMinResolution, it->second.min_resolution);
+ EXPECT_EQ(kSquare4k, it->second.max_landscape_resolution);
+ EXPECT_EQ(kSquare4k, it->second.max_portrait_resolution);
+ }
}
-TEST_F(SupportedResolutionResolverTest, VP8_Supports4k) {
+TEST_F(SupportedResolutionResolverTest, VP8Supports4k) {
DONT_RUN_ON_WIN_7();
base::test::ScopedFeatureList scoped_feature_list;
scoped_feature_list.InitAndEnableFeature(kMediaFoundationVP8Decoding);
+ TestDecoderSupport(D3D11_DECODER_PROFILE_VP8_VLD, VP8PROFILE_ANY);
+}
- EnableDecoders(
- {D3D11_DECODER_PROFILE_H264_VLD_NOFGT, D3D11_DECODER_PROFILE_VP8_VLD});
- SetMaxResolutionForGUID(D3D11_DECODER_PROFILE_VP8_VLD, {4096, 4096});
-
- ResolutionPair h264_res;
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT},
- mock_d3d11_device_, gpu_workarounds_, &h264_res,
- &vp8_res, &vp9_0_res, &vp9_2_res);
-
- ASSERT_EQ(h264_res, tall4k);
-
- ASSERT_EQ(vp8_res, tall4k);
-
- ASSERT_EQ(vp9_0_res, zero);
+TEST_F(SupportedResolutionResolverTest, VP9Profile0Supports8k) {
+ DONT_RUN_ON_WIN_7();
+ TestDecoderSupport(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0,
+ VP9PROFILE_PROFILE0, kSquare8k, kSquare8k, kSquare8k);
+}
- ASSERT_EQ(vp9_2_res, zero);
+TEST_F(SupportedResolutionResolverTest, VP9Profile2Supports8k) {
+ DONT_RUN_ON_WIN_7();
+ TestDecoderSupport(D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2,
+ VP9PROFILE_PROFILE2, kSquare8k, kSquare8k, kSquare8k);
}
-TEST_F(SupportedResolutionResolverTest, VP9_0Supports8k) {
+TEST_F(SupportedResolutionResolverTest, MultipleCodecs) {
DONT_RUN_ON_WIN_7();
+ SetGpuProfile(kRecentAmdGpu);
+
+ // H.264 and VP9.0 are the most common supported codecs.
EnableDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT,
D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0});
- SetMaxResolutionForGUID(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0, {8192, 8192});
-
- ResolutionPair h264_res;
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT},
- mock_d3d11_device_, gpu_workarounds_, &h264_res,
- &vp8_res, &vp9_0_res, &vp9_2_res);
-
- ASSERT_EQ(h264_res, tall4k);
-
- ASSERT_EQ(vp8_res, zero);
-
- ASSERT_EQ(vp9_0_res, eightKsquare);
+ SetMaxResolution(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0, kSquare8k);
+
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_);
+
+ ASSERT_EQ(base::size(kSupportedH264Profiles) + 1,
+ supported_resolutions.size());
+ for (const auto profile : kSupportedH264Profiles) {
+ auto it = supported_resolutions.find(profile);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kMinResolution, it->second.min_resolution);
+ EXPECT_EQ(kSquare4k, it->second.max_landscape_resolution);
+ EXPECT_EQ(kSquare4k, it->second.max_portrait_resolution);
+ }
- ASSERT_EQ(vp9_2_res, zero);
+ auto it = supported_resolutions.find(VP9PROFILE_PROFILE0);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kMinResolution, it->second.min_resolution);
+ EXPECT_EQ(kSquare8k, it->second.max_landscape_resolution);
+ EXPECT_EQ(kSquare8k, it->second.max_portrait_resolution);
}
-TEST_F(SupportedResolutionResolverTest, BothVP9ProfilesSupported) {
+TEST_F(SupportedResolutionResolverTest, AV1ProfileMainSupports8k) {
DONT_RUN_ON_WIN_7();
- EnableDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT,
- D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0,
- D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2});
- SetMaxResolutionForGUID(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0, {8192, 8192});
- SetMaxResolutionForGUID(D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2,
- {8192, 8192});
-
- ResolutionPair h264_res;
- ResolutionPair vp8_res;
- ResolutionPair vp9_0_res;
- ResolutionPair vp9_2_res;
- GetResolutionsForDecoders({D3D11_DECODER_PROFILE_H264_VLD_NOFGT},
- mock_d3d11_device_, gpu_workarounds_, &h264_res,
- &vp8_res, &vp9_0_res, &vp9_2_res);
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kMediaFoundationAV1Decoding);
+ TestDecoderSupport(DXVA_ModeAV1_VLD_Profile0, AV1PROFILE_PROFILE_MAIN,
+ kSquare8k, kSquare8k, kSquare8k);
+}
- ASSERT_EQ(h264_res, tall4k);
+TEST_F(SupportedResolutionResolverTest, AV1ProfileHighSupports8k) {
+ DONT_RUN_ON_WIN_7();
- ASSERT_EQ(vp8_res, zero);
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kMediaFoundationAV1Decoding);
+ TestDecoderSupport(DXVA_ModeAV1_VLD_Profile1, AV1PROFILE_PROFILE_HIGH,
+ kSquare8k, kSquare8k, kSquare8k);
+}
- ASSERT_EQ(vp9_0_res, eightKsquare);
+TEST_F(SupportedResolutionResolverTest, AV1ProfileProSupports8k) {
+ DONT_RUN_ON_WIN_7();
- ASSERT_EQ(vp9_2_res, eightKsquare);
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kMediaFoundationAV1Decoding);
+ TestDecoderSupport(DXVA_ModeAV1_VLD_Profile2, AV1PROFILE_PROFILE_PRO,
+ kSquare8k, kSquare8k, kSquare8k);
}
} // namespace media
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index ef8fbe4de4a..c66e189ebb0 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -29,7 +29,6 @@ media_subcomponent_deps = [
"//media/muxers",
"//media/renderers",
"//media/video",
- "//media/webcodecs",
]
if (is_fuchsia) {
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index 68fd43a30e6..6f470bae8ab 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -27,6 +27,8 @@ jumbo_source_set("clients") {
# TODO(liberato): can we avoid this?
"//content/test/*",
+
+ "//third_party/blink/renderer/modules/webcodecs",
]
sources = [
diff --git a/chromium/media/mojo/clients/mojo_cdm.cc b/chromium/media/mojo/clients/mojo_cdm.cc
index acc9d1ca9de..df477ee2584 100644
--- a/chromium/media/mojo/clients/mojo_cdm.cc
+++ b/chromium/media/mojo/clients/mojo_cdm.cc
@@ -23,7 +23,6 @@
#include "media/mojo/mojom/decryptor.mojom.h"
#include "services/service_manager/public/cpp/connect.h"
#include "services/service_manager/public/mojom/interface_provider.mojom.h"
-#include "url/origin.h"
namespace media {
@@ -39,7 +38,6 @@ void RecordConnectionError(bool connection_error_happened) {
// static
void MojoCdm::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
mojo::PendingRemote<mojom::ContentDecryptionModule> remote_cdm,
const SessionMessageCB& session_message_cb,
@@ -55,8 +53,7 @@ void MojoCdm::Create(
auto promise = std::make_unique<CdmInitializedPromise>(
std::move(cdm_created_cb), mojo_cdm);
- mojo_cdm->InitializeCdm(key_system, security_origin, cdm_config,
- std::move(promise));
+ mojo_cdm->InitializeCdm(key_system, cdm_config, std::move(promise));
}
MojoCdm::MojoCdm(mojo::PendingRemote<mojom::ContentDecryptionModule> remote_cdm,
@@ -103,7 +100,6 @@ MojoCdm::~MojoCdm() {
// error handler can't be invoked and callbacks won't be dispatched.
void MojoCdm::InitializeCdm(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
std::unique_ptr<CdmInitializedPromise> promise) {
DVLOG(1) << __func__ << ": " << key_system;
@@ -127,7 +123,7 @@ void MojoCdm::InitializeCdm(const std::string& key_system,
pending_init_promise_ = std::move(promise);
remote_cdm_->Initialize(
- key_system, security_origin, cdm_config,
+ key_system, cdm_config,
base::BindOnce(&MojoCdm::OnCdmInitialized, base::Unretained(this)));
}
diff --git a/chromium/media/mojo/clients/mojo_cdm.h b/chromium/media/mojo/clients/mojo_cdm.h
index 45f0fcb09f1..bee79ff2cf1 100644
--- a/chromium/media/mojo/clients/mojo_cdm.h
+++ b/chromium/media/mojo/clients/mojo_cdm.h
@@ -29,10 +29,6 @@ namespace base {
class SingleThreadTaskRunner;
}
-namespace url {
-class Origin;
-}
-
namespace media {
class MojoDecryptor;
@@ -48,7 +44,6 @@ class MojoCdm : public ContentDecryptionModule,
static void Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
mojo::PendingRemote<mojom::ContentDecryptionModule> remote_cdm,
const SessionMessageCB& session_message_cb,
@@ -94,7 +89,6 @@ class MojoCdm : public ContentDecryptionModule,
~MojoCdm() final;
void InitializeCdm(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
std::unique_ptr<CdmInitializedPromise> promise);
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.cc b/chromium/media/mojo/clients/mojo_cdm_factory.cc
index d606e735eb1..2d04068fb22 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.cc
@@ -15,7 +15,6 @@
#include "media/mojo/clients/mojo_cdm.h"
#include "media/mojo/mojom/interface_factory.mojom.h"
#include "mojo/public/cpp/bindings/interface_request.h"
-#include "url/origin.h"
namespace media {
@@ -29,7 +28,6 @@ MojoCdmFactory::~MojoCdmFactory() = default;
void MojoCdmFactory::Create(
const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
@@ -38,13 +36,6 @@ void MojoCdmFactory::Create(
CdmCreatedCB cdm_created_cb) {
DVLOG(2) << __func__ << ": " << key_system;
- if (security_origin.opaque()) {
- base::ThreadTaskRunnerHandle::Get()->PostTask(
- FROM_HERE,
- base::BindOnce(std::move(cdm_created_cb), nullptr, "Invalid origin."));
- return;
- }
-
// If AesDecryptor can be used, always use it here in the local process.
// Note: We should not run AesDecryptor in the browser process except for
// testing. See http://crbug.com/441957.
@@ -63,9 +54,8 @@ void MojoCdmFactory::Create(
interface_factory_->CreateCdm(
key_system, cdm_pending_remote.InitWithNewPipeAndPassReceiver());
- MojoCdm::Create(key_system, security_origin, cdm_config,
- std::move(cdm_pending_remote), session_message_cb,
- session_closed_cb, session_keys_change_cb,
+ MojoCdm::Create(key_system, cdm_config, std::move(cdm_pending_remote),
+ session_message_cb, session_closed_cb, session_keys_change_cb,
session_expiration_update_cb, std::move(cdm_created_cb));
}
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.h b/chromium/media/mojo/clients/mojo_cdm_factory.h
index c3b8d055428..0d987bc51dd 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.h
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.h
@@ -21,7 +21,6 @@ class MojoCdmFactory : public CdmFactory {
// CdmFactory implementation.
void Create(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
const SessionMessageCB& session_message_cb,
const SessionClosedCB& session_closed_cb,
diff --git a/chromium/media/mojo/clients/mojo_cdm_unittest.cc b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
index f7b402b4f54..99fd9646162 100644
--- a/chromium/media/mojo/clients/mojo_cdm_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
@@ -24,7 +24,6 @@
#include "mojo/public/cpp/bindings/receiver.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
-#include "url/origin.h"
using ::testing::_;
using ::testing::DoAll;
@@ -48,7 +47,6 @@ namespace media {
namespace {
const char kClearKeyKeySystem[] = "org.w3.clearkey";
-const char kTestSecurityOrigin[] = "https://www.test.com";
// Random key ID used to create a session.
const uint8_t kKeyId[] = {
@@ -97,8 +95,8 @@ class MojoCdmTest : public ::testing::Test {
}
}
- MojoCdm::Create(key_system, url::Origin::Create(GURL(kTestSecurityOrigin)),
- CdmConfig(), cdm_receiver_.BindNewPipeAndPassRemote(),
+ MojoCdm::Create(key_system, CdmConfig(),
+ cdm_receiver_.BindNewPipeAndPassRemote(),
base::Bind(&MockCdmClient::OnSessionMessage,
base::Unretained(&cdm_client_)),
base::Bind(&MockCdmClient::OnSessionClosed,
@@ -125,8 +123,6 @@ class MojoCdmTest : public ::testing::Test {
mojo_cdm_ = cdm;
remote_cdm_ = cdm_factory_.GetCreatedCdm();
EXPECT_EQ(kClearKeyKeySystem, remote_cdm_->GetKeySystem());
- EXPECT_EQ(kTestSecurityOrigin,
- remote_cdm_->GetSecurityOrigin().Serialize());
}
void ForceConnectionError() {
diff --git a/chromium/media/mojo/clients/mojo_renderer.cc b/chromium/media/mojo/clients/mojo_renderer.cc
index b6d1a3d7777..aa60a7d2a76 100644
--- a/chromium/media/mojo/clients/mojo_renderer.cc
+++ b/chromium/media/mojo/clients/mojo_renderer.cc
@@ -10,6 +10,7 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/single_thread_task_runner.h"
+#include "media/base/cdm_context.h"
#include "media/base/media_resource.h"
#include "media/base/pipeline_status.h"
#include "media/base/renderer_client.h"
diff --git a/chromium/media/mojo/clients/mojo_renderer_unittest.cc b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
index 5925bc6aa88..f59e1be83bd 100644
--- a/chromium/media/mojo/clients/mojo_renderer_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
@@ -33,7 +33,6 @@
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
-#include "url/origin.h"
using ::base::test::RunCallback;
using ::base::test::RunOnceCallback;
@@ -184,7 +183,6 @@ class MojoRendererTest : public ::testing::Test {
void CreateCdm() {
cdm_receiver_.Bind(cdm_remote_.BindNewPipeAndPassReceiver());
cdm_remote_->Initialize(kClearKeyKeySystem,
- url::Origin::Create(GURL("https://www.test.com")),
CdmConfig(),
base::BindOnce(&MojoRendererTest::OnCdmCreated,
base::Unretained(this)));
diff --git a/chromium/media/mojo/clients/mojo_renderer_wrapper.cc b/chromium/media/mojo/clients/mojo_renderer_wrapper.cc
index f23fade4203..2971244b9fa 100644
--- a/chromium/media/mojo/clients/mojo_renderer_wrapper.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_wrapper.cc
@@ -14,9 +14,9 @@ MojoRendererWrapper::MojoRendererWrapper(
MojoRendererWrapper::~MojoRendererWrapper() = default;
-void MojoRendererWrapper::Initialize(media::MediaResource* media_resource,
- media::RendererClient* client,
- media::PipelineStatusCallback init_cb) {
+void MojoRendererWrapper::Initialize(MediaResource* media_resource,
+ RendererClient* client,
+ PipelineStatusCallback init_cb) {
mojo_renderer_->Initialize(media_resource, client, std::move(init_cb));
}
@@ -36,8 +36,8 @@ void MojoRendererWrapper::SetVolume(float volume) {
mojo_renderer_->SetVolume(volume);
}
-void MojoRendererWrapper::SetCdm(media::CdmContext* cdm_context,
- media::CdmAttachedCB cdm_attached_cb) {
+void MojoRendererWrapper::SetCdm(CdmContext* cdm_context,
+ CdmAttachedCB cdm_attached_cb) {
mojo_renderer_->SetCdm(cdm_context, std::move(cdm_attached_cb));
}
diff --git a/chromium/media/mojo/clients/mojo_renderer_wrapper.h b/chromium/media/mojo/clients/mojo_renderer_wrapper.h
index 552a8f0513f..891b4ff0de7 100644
--- a/chromium/media/mojo/clients/mojo_renderer_wrapper.h
+++ b/chromium/media/mojo/clients/mojo_renderer_wrapper.h
@@ -13,8 +13,8 @@
namespace media {
// Simple wrapper around a MojoRenderer.
-// Provides a default behavior for forwarding all media::Renderer calls to a
-// media::Renderer instance in a different process, through |mojo_renderer_|.
+// Provides a default behavior for forwarding all Renderer calls to a
+// Renderer instance in a different process, through |mojo_renderer_|.
// Used as a base class to reduce boiler plate code for derived types, which can
// override only the methods they need to specialize.
class MojoRendererWrapper : public Renderer {
@@ -24,7 +24,7 @@ class MojoRendererWrapper : public Renderer {
// Renderer implementation.
void Initialize(MediaResource* media_resource,
- media::RendererClient* client,
+ RendererClient* client,
PipelineStatusCallback init_cb) override;
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
diff --git a/chromium/media/mojo/mojom/BUILD.gn b/chromium/media/mojo/mojom/BUILD.gn
index f86298e18d2..c7cb64d1dca 100644
--- a/chromium/media/mojo/mojom/BUILD.gn
+++ b/chromium/media/mojo/mojom/BUILD.gn
@@ -62,6 +62,7 @@ mojom("mojom") {
"//gpu/ipc/common:interfaces",
"//media/learning/mojo/public/mojom",
"//mojo/public/mojom/base",
+ "//services/network/public/mojom",
"//services/service_manager/public/mojom",
"//ui/gfx/geometry/mojom",
"//ui/gfx/mojom",
@@ -79,11 +80,212 @@ mojom("mojom") {
enabled_features = [ "enable_cast_renderer" ]
}
+ shared_typemaps = [
+ {
+ types = [
+ {
+ mojom = "media.mojom.VideoFrameMetadata"
+ cpp = "::media::VideoFrameMetadata"
+ },
+ ]
+ traits_headers = [ "video_frame_metadata_mojom_traits.h" ]
+ traits_public_deps = [ ":shared_mojom_traits" ]
+ },
+ ]
+
+ cpp_typemaps = [
+ {
+ types = [
+ {
+ mojom = "media.mojom.VideoRotation"
+ cpp = "::media::VideoRotation"
+ },
+ ]
+ traits_headers = [ "media_types_enum_mojom_traits.h" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.PipelineStatistics"
+ cpp = "::media::PipelineStatistics"
+ },
+ {
+ mojom = "media.mojom.PipelineDecoderInfo"
+ cpp = "::media::PipelineDecoderInfo"
+ },
+ ]
+ traits_headers = [ "pipeline_status_mojom_traits.h" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.Status"
+ cpp = "::media::Status"
+ },
+ ]
+ traits_headers = [ "status_mojom_traits.h" ]
+ traits_sources = [ "status_mojom_traits.cc" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.VideoColorSpace.PrimaryID"
+ cpp = "::media::VideoColorSpace::PrimaryID"
+ },
+ {
+ mojom = "media.mojom.VideoColorSpace.TransferID:"
+ cpp = "::media::VideoColorSpace::TransferID"
+ },
+ {
+ mojom = "media.mojom.VideoColorSpace.MatrixID:"
+ cpp = "::media::VideoColorSpace::MatrixID"
+ },
+ {
+ mojom = "media.mojom.VideoColorSpace.RangeID:"
+ cpp = "::gfx::ColorSpace::RangeID"
+ },
+ {
+ mojom = "media.mojom.VideoColorSpace:"
+ cpp = "::media::VideoColorSpace"
+ },
+ ]
+ traits_headers = [ "video_color_space_mojom_traits.h" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.AudioCodec"
+ cpp = "::media::AudioCodec"
+ },
+ {
+ mojom = "media.mojom.AudioCodecProfile"
+ cpp = "::media::AudioCodecProfile"
+ },
+ {
+ mojom = "media.mojom.BufferingState"
+ cpp = "::media::BufferingState"
+ },
+ {
+ mojom = "media.mojom.BufferingStateChangeReason"
+ cpp = "::media::BufferingStateChangeReason"
+ },
+ {
+ mojom = "media.mojom.ChannelLayout"
+ cpp = "::media::ChannelLayout"
+ },
+ {
+ mojom = "media.mojom.DecodeStatus"
+ cpp = "::media::DecodeStatus"
+ },
+ {
+ mojom = "media.mojom.EncryptionScheme"
+ cpp = "::media::EncryptionScheme"
+ },
+ {
+ mojom = "media.mojom.MediaContainerName"
+ cpp = "::media::container_names::MediaContainerName"
+ },
+ {
+ mojom = "media.mojom.MediaLogRecord"
+ cpp = "::media::MediaLogRecord"
+ },
+ {
+ mojom = "media.mojom.OutputDeviceStatus"
+ cpp = "::media::OutputDeviceStatus"
+ },
+ {
+ mojom = "media.mojom.PipelineStatus"
+ cpp = "::media::PipelineStatus"
+ },
+ {
+ mojom = "media.mojom.SampleFormat"
+ cpp = "::media::SampleFormat"
+ },
+ {
+ mojom = "media.mojom.SubsampleEntry"
+ cpp = "::media::SubsampleEntry"
+ },
+ {
+ mojom = "media.mojom.VideoCodec"
+ cpp = "::media::VideoCodec"
+ },
+ {
+ mojom = "media.mojom.VideoCodecProfile"
+ cpp = "::media::VideoCodecProfile"
+ },
+ {
+ mojom = "media.mojom.VideoPixelFormat"
+ cpp = "::media::VideoPixelFormat"
+ },
+ {
+ mojom = "media.mojom.VideoTransformation"
+ cpp = "::media::VideoTransformation"
+ },
+ {
+ mojom = "media.mojom.WaitingReason"
+ cpp = "::media::WaitingReason"
+ },
+ {
+ mojom = "media.mojom.WatchTimeKey"
+ cpp = "::media::WatchTimeKey"
+ },
+ {
+ mojom = "media.mojom.MediaStatusState"
+ cpp = "::media::MediaStatus::State"
+ },
+ {
+ mojom = "media.mojom.StatusCode"
+ cpp = "::media::StatusCode"
+ },
+ ]
+ traits_headers = [
+ "video_transformation_mojom_traits.h",
+ "//media/base/ipc/media_param_traits_macros.h",
+ ]
+ traits_sources = [ "video_transformation_mojom_traits.cc" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.VideoFrame"
+ cpp = "::scoped_refptr<::media::VideoFrame>"
+ nullable_is_same_type = true
+ },
+ ]
+ traits_headers = [ "video_frame_mojom_traits.h" ]
+ traits_sources = [ "video_frame_mojom_traits.cc" ]
+ traits_public_deps = [
+ "//media/mojo/common:mojo_shared_buffer_video_frame",
+ "//ui/gfx/geometry/mojom",
+ ]
+ },
+ ]
+
+ cpp_typemaps += shared_typemaps
+ blink_cpp_typemaps = shared_typemaps
+
export_class_attribute_blink = "BLINK_PLATFORM_EXPORT"
export_define_blink = "BLINK_PLATFORM_IMPLEMENTATION=1"
export_header_blink = "third_party/blink/public/platform/web_common.h"
}
+source_set("shared_mojom_traits") {
+ sources = [
+ "video_frame_metadata_mojom_traits.cc",
+ "video_frame_metadata_mojom_traits.h",
+ ]
+
+ public_deps = [
+ ":mojom_shared",
+ "//gpu/ipc/common:common",
+ "//gpu/ipc/common:mojom_traits",
+ "//media",
+ "//media/base/ipc:ipc",
+ "//mojo/public/mojom/base",
+ "//ui/gfx/geometry/mojom:mojom_traits",
+ ]
+}
+
mojom("remoting_common") {
sources = [ "remoting_common.mojom" ]
}
@@ -97,7 +299,10 @@ mojom("mirror_service_remoting") {
mojom("remoting") {
sources = [ "remoting.mojom" ]
- public_deps = [ ":remoting_common" ]
+ public_deps = [
+ ":mojom",
+ ":remoting_common",
+ ]
}
mojom("test_interfaces") {
@@ -114,6 +319,7 @@ source_set("unit_tests") {
"cdm_key_information_mojom_traits_unittest.cc",
"video_decoder_config_mojom_traits_unittest.cc",
"video_encode_accelerator_mojom_traits_unittest.cc",
+ "video_frame_metadata_mojom_traits_unittest.cc",
"video_frame_mojom_traits_unittest.cc",
]
diff --git a/chromium/media/mojo/mojom/content_decryption_module.mojom b/chromium/media/mojo/mojom/content_decryption_module.mojom
index ca8d3080472..6cf18916b66 100644
--- a/chromium/media/mojo/mojom/content_decryption_module.mojom
+++ b/chromium/media/mojo/mojom/content_decryption_module.mojom
@@ -5,7 +5,6 @@
module media.mojom;
import "media/mojo/mojom/decryptor.mojom";
-import "url/mojom/origin.mojom";
import "url/mojom/url.mojom";
// See media::EmeInitDataType.
@@ -79,9 +78,7 @@ interface ContentDecryptionModule {
// will be zero. Upon success, |cdm_id| will be non-zero and will later be
// used to locate the CDM at the remote side. |decryptor| is the remote
// Decryptor.
- Initialize(string key_system,
- url.mojom.Origin security_origin,
- CdmConfig cdm_config)
+ Initialize(string key_system, CdmConfig cdm_config)
=> (CdmPromiseResult result, int32 cdm_id,
pending_remote<Decryptor>? decryptor);
diff --git a/chromium/media/mojo/mojom/frame_interface_factory.mojom b/chromium/media/mojo/mojom/frame_interface_factory.mojom
index 49a153bd05f..6b56a3e06ca 100644
--- a/chromium/media/mojo/mojom/frame_interface_factory.mojom
+++ b/chromium/media/mojo/mojom/frame_interface_factory.mojom
@@ -7,6 +7,7 @@ module media.mojom;
import "media/mojo/mojom/cdm_storage.mojom";
import "media/mojo/mojom/provision_fetcher.mojom";
import "mojo/public/mojom/base/generic_pending_receiver.mojom";
+import "url/mojom/origin.mojom";
// A factory for acquiring media mojo interfaces that are bound to a
// RenderFrameHost.
@@ -18,6 +19,10 @@ interface FrameInterfaceFactory {
// CDM storage available.
CreateCdmStorage(pending_receiver<CdmStorage> cdm_storage);
+ // Gets the origin of the frame associated with the CDM.
+ [Sync]
+ GetCdmOrigin() => (url.mojom.Origin cdm_origin);
+
// Binds a generic media frame-bound interface. This is to allow //content
// embedders to provide additional interfaces.
BindEmbedderReceiver(mojo_base.mojom.GenericPendingReceiver receiver);
diff --git a/chromium/media/mojo/mojom/media_types.mojom b/chromium/media/mojo/mojom/media_types.mojom
index 6b083bfe691..2696addb60e 100644
--- a/chromium/media/mojo/mojom/media_types.mojom
+++ b/chromium/media/mojo/mojom/media_types.mojom
@@ -8,6 +8,7 @@ import "gpu/ipc/common/mailbox_holder.mojom";
import "gpu/ipc/common/vulkan_ycbcr_info.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/values.mojom";
+import "mojo/public/mojom/base/unguessable_token.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
import "ui/gfx/mojom/buffer_types.mojom";
import "ui/gfx/mojom/color_space.mojom";
@@ -65,8 +66,12 @@ enum VideoCodecProfile;
enum VideoPixelFormat;
// See media/base/video_transformation.h for descriptions.
-[Native]
-enum VideoRotation;
+enum VideoRotation {
+ kVideoRotation0,
+ kVideoRotation90,
+ kVideoRotation180,
+ kVideoRotation270,
+};
// See media/base/video_transformation.h for descriptions.
struct VideoTransformation {
@@ -255,6 +260,80 @@ struct AudioDataS16 {
array<int16> data;
};
+// See media/base/video_frame_metadata.h for a description of fields.
+// TODO(crbug.com/657632): Remove |has_*| values and use nullable types.
+struct VideoFrameMetadata {
+ bool allow_overlay;
+
+ mojo_base.mojom.TimeTicks? capture_begin_time;
+ mojo_base.mojom.TimeTicks? capture_end_time;
+
+ bool has_capture_counter;
+ int32 capture_counter;
+
+ gfx.mojom.Rect? capture_update_rect;
+
+ bool copy_required;
+
+ bool end_of_stream;
+
+ mojo_base.mojom.TimeDelta? frame_duration;
+
+ bool has_frame_rate;
+ double frame_rate;
+
+ bool interactive_content;
+
+ mojo_base.mojom.TimeTicks? reference_time;
+
+ bool has_resource_utilization;
+ double resource_utilization;
+
+ bool read_lock_fences_enabled;
+
+ bool has_rotation;
+ VideoRotation rotation;
+
+ bool texture_owner;
+
+ bool wants_promotion_hint;
+
+ bool protected_video;
+
+ bool hw_protected;
+
+ mojo_base.mojom.UnguessableToken? overlay_plane_id;
+
+ bool power_efficient;
+
+ bool has_device_scale_factor;
+ double device_scale_factor;
+
+ bool has_page_scale_factor;
+ double page_scale_factor;
+
+ bool has_root_scroll_offset_x;
+ double root_scroll_offset_x;
+
+ bool has_root_scroll_offset_y;
+ double root_scroll_offset_y;
+
+ bool has_top_controls_visible_height;
+ double top_controls_visible_height;
+
+ mojo_base.mojom.TimeTicks? decode_begin_time;
+ mojo_base.mojom.TimeTicks? decode_end_time;
+
+ mojo_base.mojom.TimeDelta? processing_time;
+
+ bool has_rtp_timestamp;
+ double rtp_timestamp;
+
+ mojo_base.mojom.TimeTicks? receive_time;
+
+ mojo_base.mojom.TimeDelta? wallclock_frame_duration;
+};
+
// This defines a mojo transport format for media::VideoFrame.
struct VideoFrame {
// Format of the frame.
@@ -276,7 +355,7 @@ struct VideoFrame {
VideoFrameData data;
// Extra properties associated with the VideoFrame.
- mojo_base.mojom.DictionaryValue metadata;
+ VideoFrameMetadata metadata;
gfx.mojom.ColorSpace color_space;
HDRMetadata? hdr_metadata;
diff --git a/chromium/media/mojo/mojom/media_types.typemap b/chromium/media/mojo/mojom/media_types.typemap
deleted file mode 100644
index 46c27d067cf..00000000000
--- a/chromium/media/mojo/mojom/media_types.typemap
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/mojom/media_types.mojom"
-
-public_headers = [
- "//media/base/audio_codecs.h",
- "//media/base/buffering_state.h",
- "//media/base/channel_layout.h",
- "//media/base/container_names.h",
- "//media/base/decode_status.h",
- "//media/base/decrypt_config.h",
- "//media/base/encryption_pattern.h",
- "//media/base/encryption_scheme.h",
- "//media/base/hdr_metadata.h",
- "//media/base/media_log_record.h",
- "//media/base/media_status.h",
- "//media/base/output_device_info.h",
- "//media/base/pipeline_status.h",
- "//media/base/sample_format.h",
- "//media/base/subsample_entry.h",
- "//media/base/video_codecs.h",
- "//media/base/video_transformation.h",
- "//media/base/video_types.h",
- "//media/base/waiting.h",
- "//media/base/watch_time_keys.h",
- "//media/base/status.h",
- "//media/base/status_codes.h",
-]
-
-traits_headers = [
- "//media/base/ipc/media_param_traits_macros.h",
- "//media/mojo/mojom/video_transformation_mojom_traits.h",
-]
-
-public_deps = [
- "//media",
- "//media/base/ipc",
-]
-
-sources = [
- "//media/mojo/mojom/video_transformation_mojom_traits.cc",
- "//media/mojo/mojom/video_transformation_mojom_traits.h",
-]
-
-type_mappings = [
- "media.mojom.AudioCodec=::media::AudioCodec",
- "media.mojom.AudioCodecProfile=::media::AudioCodecProfile",
- "media.mojom.BufferingState=::media::BufferingState",
- "media.mojom.BufferingStateChangeReason=::media::BufferingStateChangeReason",
- "media.mojom.ChannelLayout=::media::ChannelLayout",
- "media.mojom.DecodeStatus=::media::DecodeStatus",
- "media.mojom.EncryptionScheme=::media::EncryptionScheme",
- "media.mojom.MediaContainerName=::media::container_names::MediaContainerName",
- "media.mojom.MediaLogRecord=::media::MediaLogRecord",
- "media.mojom.OutputDeviceStatus=::media::OutputDeviceStatus",
- "media.mojom.PipelineStatus=::media::PipelineStatus",
- "media.mojom.SampleFormat=::media::SampleFormat",
- "media.mojom.SubsampleEntry=::media::SubsampleEntry",
- "media.mojom.VideoCodec=::media::VideoCodec",
- "media.mojom.VideoCodecProfile=::media::VideoCodecProfile",
- "media.mojom.VideoPixelFormat=::media::VideoPixelFormat",
- "media.mojom.VideoRotation=::media::VideoRotation",
- "media.mojom.VideoTransformation=::media::VideoTransformation",
- "media.mojom.WaitingReason=::media::WaitingReason",
- "media.mojom.WatchTimeKey=::media::WatchTimeKey",
- "media.mojom.MediaStatusState=::media::MediaStatus::State",
- "media.mojom.StatusCode=::media::StatusCode",
-]
diff --git a/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h b/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h
new file mode 100644
index 00000000000..d048fd01e4d
--- /dev/null
+++ b/chromium/media/mojo/mojom/media_types_enum_mojom_traits.h
@@ -0,0 +1,63 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_MOJOM_MEDIA_TYPES_ENUM_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_MEDIA_TYPES_ENUM_MOJOM_TRAITS_H_
+
+#include "base/notreached.h"
+#include "media/base/video_transformation.h"
+#include "media/mojo/mojom/media_types.mojom-shared.h"
+
+// Most enums have automatically generated traits, in media_types.mojom.h, due
+// to their [native] attribute. This file defines traits for enums that are used
+// in files that cannot directly include media_types.mojom.h.
+
+namespace mojo {
+
+template <>
+struct EnumTraits<media::mojom::VideoRotation, ::media::VideoRotation> {
+ static media::mojom::VideoRotation ToMojom(::media::VideoRotation input) {
+ switch (input) {
+ case ::media::VideoRotation::VIDEO_ROTATION_0:
+ return media::mojom::VideoRotation::kVideoRotation0;
+ case ::media::VideoRotation::VIDEO_ROTATION_90:
+ return media::mojom::VideoRotation::kVideoRotation90;
+ case ::media::VideoRotation::VIDEO_ROTATION_180:
+ return media::mojom::VideoRotation::kVideoRotation180;
+ case ::media::VideoRotation::VIDEO_ROTATION_270:
+ return media::mojom::VideoRotation::kVideoRotation270;
+ }
+
+ NOTREACHED();
+ return static_cast<media::mojom::VideoRotation>(input);
+ }
+
+ // Returning false results in deserialization failure and causes the
+ // message pipe receiving it to be disconnected.
+ static bool FromMojom(media::mojom::VideoRotation input,
+ media::VideoRotation* output) {
+ switch (input) {
+ case media::mojom::VideoRotation::kVideoRotation0:
+ *output = ::media::VideoRotation::VIDEO_ROTATION_0;
+ return true;
+ case media::mojom::VideoRotation::kVideoRotation90:
+ *output = ::media::VideoRotation::VIDEO_ROTATION_90;
+ return true;
+ case media::mojom::VideoRotation::kVideoRotation180:
+ *output = ::media::VideoRotation::VIDEO_ROTATION_180;
+ return true;
+ case media::mojom::VideoRotation::kVideoRotation270:
+ *output = ::media::VideoRotation::VIDEO_ROTATION_270;
+ return true;
+ }
+
+ NOTREACHED();
+ *output = static_cast<::media::VideoRotation>(input);
+ return true;
+ }
+};
+
+} // namespace mojo
+
+#endif // MEDIA_MOJO_MOJOM_MEDIA_TYPES_ENUM_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/mojom/pipeline_status.typemap b/chromium/media/mojo/mojom/pipeline_status.typemap
deleted file mode 100644
index be73386b42f..00000000000
--- a/chromium/media/mojo/mojom/pipeline_status.typemap
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/mojom/media_types.mojom"
-public_headers = [ "//media/base/pipeline_status.h" ]
-traits_headers = [ "//media/mojo/mojom/pipeline_status_mojom_traits.h" ]
-type_mappings = [
- "media.mojom.PipelineStatistics=::media::PipelineStatistics",
- "media.mojom.PipelineDecoderInfo=::media::PipelineDecoderInfo",
-]
diff --git a/chromium/media/mojo/mojom/remoting.mojom b/chromium/media/mojo/mojom/remoting.mojom
index 6146cbd70d4..860eb94e022 100644
--- a/chromium/media/mojo/mojom/remoting.mojom
+++ b/chromium/media/mojo/mojom/remoting.mojom
@@ -4,7 +4,9 @@
module media.mojom;
+import "media/mojo/mojom/media_types.mojom";
import "media/mojo/mojom/remoting_common.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
interface RemoterFactory {
// Create a new Remoter associated with the given RemotingSource and bind it
@@ -95,3 +97,59 @@ interface RemotingSource {
// have caused remoting to end.
OnStopped(RemotingStopReason reason);
};
+
+// Interface that is implemented by the host of RemotingSink and
+// RemotingDataStreamReceiver. Remotee implementation would be implemented in
+// the browser process in order to receive serialized RPC messages and frame
+// data from the sender.
+interface Remotee {
+ // Used by RemotingSink to notify Remotee that it is ready for remoting.
+ OnRemotingSinkReady(pending_remote<RemotingSink> sink);
+
+ // Used by the RemotingSink to send a serialized RPC message to sender side.
+ // |message| is a serialized protobuf from src/media/remoting/proto.
+ SendMessageToSource(array<uint8> message);
+
+ // Initialize the data pipe for RemotingDataStreamReceiver to allow Remotee to
+ // send frame data to it.
+ // Remoting media could be audio-only media, video-only media, or media has
+ // both audio and video. So, at least one of audio stream or video stream
+ // should be passed.
+ StartDataStreams(
+ pending_remote<RemotingDataStreamReceiver>? audio_stream,
+ pending_remote<RemotingDataStreamReceiver>? video_stream);
+
+ // Used by RemotingSink to notify Remotee that FlushUntil is happening in
+ // order to not send NACK for the frames that are ignored. The implementation
+ // should also forward the |{audio/video}_frame_count| to FlushUntil() of
+ // {audio|video} streams which are RemotingDataStreamReceiver implementations.
+ OnFlushUntil(uint32 audio_frame_count, uint32 video_frame_count);
+
+ // Used by RemotingSink to notify Remotee that VideoNaturalSizeChange is
+ // happening.
+ OnVideoNaturalSizeChange(gfx.mojom.Size size);
+};
+
+// Interface that is used to receive messages from the sender.
+interface RemotingSink {
+ // Used by the Remotee to send a serialized RPC message to the sink.
+ // |message| is a serialized protobuf from src/media/remoting/proto.
+ OnMessageFromSource(array<uint8> message);
+};
+
+// Interface that is implemented by either an audio or a video demuxer stream at
+// the receiver side to receive frame data. Passed to a Remotee which will send
+// it frame data.
+interface RemotingDataStreamReceiver {
+ // Used by the Remotee implementation to bind a data pipe to
+ // RemotingDataStreamReceiver.
+ InitializeDataPipe(handle<data_pipe_consumer> data_pipe);
+
+ // Used by the Remotee implementation to send frame data to
+ // RemotingDataStreamReceiver.
+ ReceiveFrame(uint32 frame_count, DecoderBuffer buffer);
+
+ // Used by the Remotee implementation to flush frames until the given frame
+ // count.
+ FlushUntil(uint32 frame_count);
+};
diff --git a/chromium/media/mojo/mojom/speech_recognition_service.mojom b/chromium/media/mojo/mojom/speech_recognition_service.mojom
index 6835c22c5ce..34dc344a8a7 100644
--- a/chromium/media/mojo/mojom/speech_recognition_service.mojom
+++ b/chromium/media/mojo/mojom/speech_recognition_service.mojom
@@ -5,15 +5,18 @@
module media.mojom;
import "media/mojo/mojom/media_types.mojom";
+import "services/network/public/mojom/url_loader_factory.mojom";
// The main interface a client uses to interact with a speech recognition
// service process. Every renderer can own one or more
// Remote<SpeechRecognitionContext>, with the receiver bound through the
-// BrowserInterfaceBroker.
+// BrowserInterfaceBroker. Returns a flag indicating whether multichannel
+// audio is supported by the speech recognition service.
interface SpeechRecognitionContext {
// Bind the recognizers to the speech recognition service.
BindRecognizer(pending_receiver<SpeechRecognitionRecognizer> receiver,
- pending_remote<SpeechRecognitionRecognizerClient> client);
+ pending_remote<SpeechRecognitionRecognizerClient> client)
+ => (bool is_multichannel_supported);
};
// The main interface to a speech secognition service process.
@@ -22,6 +25,23 @@ interface SpeechRecognitionContext {
interface SpeechRecognitionService {
// Bind the context to a new instance of the speech recognition.
BindContext(pending_receiver<SpeechRecognitionContext> context);
+
+ // Sets the URL loader factory used to create network requests.
+ SetUrlLoaderFactory(
+ pending_remote<network.mojom.URLLoaderFactory> url_loader_factory);
+
+ // Binds the speech recognition service client used by the speech
+ // recognition service to send messages back to the client.
+ BindSpeechRecognitionServiceClient(
+ pending_remote<SpeechRecognitionServiceClient> client);
+};
+
+// The interface used to send messages from the speech recognition service
+// back to the consumer of the service.
+interface SpeechRecognitionServiceClient {
+ // Executed when the network service crashes, prompting the client to
+ // reset the URL loader factory.
+ OnNetworkServiceDisconnect();
};
// The interface used to pass raw audio from the renderer to the speech
@@ -46,5 +66,9 @@ interface SpeechRecognitionRecognizerClient {
// renderer.
struct SpeechRecognitionResult {
string transcription;
+
+ // A flag indicating whether the result is final. If true, the result is
+ // locked in and the next result returned will not overlap with the previous
+ // final result.
bool is_final;
};
diff --git a/chromium/media/mojo/mojom/status.typemap b/chromium/media/mojo/mojom/status.typemap
deleted file mode 100644
index 431610801b0..00000000000
--- a/chromium/media/mojo/mojom/status.typemap
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/mojom/media_types.mojom"
-
-public_headers = [
- "//media/base/status.h",
- "//media/base/status_codes.h",
-]
-
-traits_headers = [ "//media/mojo/mojom/status_mojom_traits.h" ]
-
-sources = [ "//media/mojo/mojom/status_mojom_traits.cc" ]
-
-public_deps = [
- "//base",
- "//media",
-]
-
-deps = [ "//media/base/ipc" ]
-
-# See media_types.typemap for enum mappings.
-type_mappings = [ "media.mojom.Status=::media::Status" ]
diff --git a/chromium/media/mojo/mojom/traits_test_service.mojom b/chromium/media/mojo/mojom/traits_test_service.mojom
index 734581a6220..8e02156cb1f 100644
--- a/chromium/media/mojo/mojom/traits_test_service.mojom
+++ b/chromium/media/mojo/mojom/traits_test_service.mojom
@@ -10,3 +10,10 @@ interface TraitsTestService {
[Sync]
EchoVideoFrame(VideoFrame? f) => (VideoFrame? pass);
};
+
+// Test service to help with verifying VideoFrameMetadata traits.
+interface VideoFrameMetadataTraitsTestService {
+ // Serializes and deserializes VideoFrameMedata.
+ [Sync]
+ EchoVideoFrameMetadata(VideoFrameMetadata vfm) => (VideoFrameMetadata pass);
+};
diff --git a/chromium/media/mojo/mojom/typemaps.gni b/chromium/media/mojo/mojom/typemaps.gni
index 44e4010102d..1bd18e7fc3d 100644
--- a/chromium/media/mojo/mojom/typemaps.gni
+++ b/chromium/media/mojo/mojom/typemaps.gni
@@ -13,15 +13,10 @@ typemaps = [
"//media/mojo/mojom/demuxer_stream.typemap",
"//media/mojo/mojom/encryption_pattern.typemap",
"//media/mojo/mojom/hdr_metadata.typemap",
- "//media/mojo/mojom/media_types.typemap",
- "//media/mojo/mojom/pipeline_status.typemap",
- "//media/mojo/mojom/video_color_space.typemap",
"//media/mojo/mojom/video_decoder.typemap",
"//media/mojo/mojom/video_decoder_config.typemap",
"//media/mojo/mojom/video_encode_accelerator.typemap",
"//media/mojo/mojom/video_encoder_info.typemap",
- "//media/mojo/mojom/video_frame.typemap",
- "//media/mojo/mojom/status.typemap",
]
if (enable_media_drm_storage) {
diff --git a/chromium/media/mojo/mojom/video_color_space.typemap b/chromium/media/mojo/mojom/video_color_space.typemap
deleted file mode 100644
index 12be2cb1cbf..00000000000
--- a/chromium/media/mojo/mojom/video_color_space.typemap
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/mojom/media_types.mojom"
-public_headers = [
- "//media/base/video_color_space.h",
- "//ui/gfx/color_space.h",
-]
-traits_headers = [ "//media/mojo/mojom/video_color_space_mojom_traits.h" ]
-type_mappings = [
- "media.mojom.VideoColorSpace.PrimaryID=::media::VideoColorSpace::PrimaryID",
- "media.mojom.VideoColorSpace.TransferID=::media::VideoColorSpace::TransferID",
- "media.mojom.VideoColorSpace.MatrixID=::media::VideoColorSpace::MatrixID",
- "media.mojom.VideoColorSpace.RangeID=::gfx::ColorSpace::RangeID",
- "media.mojom.VideoColorSpace=::media::VideoColorSpace",
-]
diff --git a/chromium/media/mojo/mojom/video_frame.typemap b/chromium/media/mojo/mojom/video_frame.typemap
deleted file mode 100644
index d465abcc3cc..00000000000
--- a/chromium/media/mojo/mojom/video_frame.typemap
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/mojom/media_types.mojom"
-
-# Additional headers required by any code which would depend on the mojom
-# definition of media.mojom.VideoFrame now that the typemap is applied. Any
-# headers required for the native target type definition should be listed here.
-public_headers = [
- "//base/memory/ref_counted.h",
- "//media/base/video_frame.h",
-]
-
-# Headers which contain the relevant StructTraits specialization(s) for any
-# type mappings described by this file.
-traits_headers = [ "//media/mojo/mojom/video_frame_mojom_traits.h" ]
-
-sources = [
- "video_frame_mojom_traits.cc",
-]
-
-# Target dependencies exposed by the public_headers and traits_headers.
-public_deps = [
- "//base",
- "//media",
-]
-
-deps = [
- "//gpu/ipc/common:common",
- "//gpu/ipc/common:mojom_traits",
- "//media/base/ipc",
- "//media/mojo/common:mojo_shared_buffer_video_frame",
- "//ui/gfx/geometry/mojom:mojom_traits",
-]
-
-type_mappings = [ "media.mojom.VideoFrame=::scoped_refptr<::media::VideoFrame>[nullable_is_same_type]" ]
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
new file mode 100644
index 00000000000..0fe7306b64c
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
@@ -0,0 +1,86 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/mojom/video_frame_metadata_mojom_traits.h"
+
+#include <utility>
+
+#include "base/bind_helpers.h"
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "mojo/public/cpp/base/time_mojom_traits.h"
+#include "mojo/public/cpp/base/unguessable_token_mojom_traits.h"
+
+namespace mojo {
+
+// Deserializes has_field and field into a base::Optional.
+#define DESERIALIZE_INTO_OPT(field) \
+ if (input.has_##field()) \
+ output->field = input.field()
+
+#define READ_AND_ASSIGN_OPT(type, field, FieldInCamelCase) \
+ base::Optional<type> field; \
+ if (!input.Read##FieldInCamelCase(&field)) \
+ return false; \
+ \
+ output->field = field
+
+// static
+bool StructTraits<media::mojom::VideoFrameMetadataDataView,
+ media::VideoFrameMetadata>::
+ Read(media::mojom::VideoFrameMetadataDataView input,
+ media::VideoFrameMetadata* output) {
+ // int.
+ DESERIALIZE_INTO_OPT(capture_counter);
+
+ // bool.
+ output->allow_overlay = input.allow_overlay();
+ output->copy_required = input.copy_required();
+ output->end_of_stream = input.end_of_stream();
+ output->texture_owner = input.texture_owner();
+ output->wants_promotion_hint = input.wants_promotion_hint();
+ output->protected_video = input.protected_video();
+ output->hw_protected = input.hw_protected();
+ output->power_efficient = input.power_efficient();
+ output->read_lock_fences_enabled = input.read_lock_fences_enabled();
+ output->interactive_content = input.interactive_content();
+
+ // double.
+ DESERIALIZE_INTO_OPT(device_scale_factor);
+ DESERIALIZE_INTO_OPT(page_scale_factor);
+ DESERIALIZE_INTO_OPT(root_scroll_offset_x);
+ DESERIALIZE_INTO_OPT(root_scroll_offset_y);
+ DESERIALIZE_INTO_OPT(top_controls_visible_height);
+ DESERIALIZE_INTO_OPT(resource_utilization);
+ DESERIALIZE_INTO_OPT(frame_rate);
+ DESERIALIZE_INTO_OPT(rtp_timestamp);
+
+ if (input.has_rotation()) {
+ media::VideoRotation rotation;
+ if (!input.ReadRotation(&rotation))
+ return false;
+
+ output->rotation = rotation;
+ }
+
+ READ_AND_ASSIGN_OPT(base::UnguessableToken, overlay_plane_id, OverlayPlaneId);
+
+ READ_AND_ASSIGN_OPT(gfx::Rect, capture_update_rect, CaptureUpdateRect);
+
+ READ_AND_ASSIGN_OPT(base::TimeTicks, receive_time, ReceiveTime);
+ READ_AND_ASSIGN_OPT(base::TimeTicks, capture_begin_time, CaptureBeginTime);
+ READ_AND_ASSIGN_OPT(base::TimeTicks, capture_end_time, CaptureEndTime);
+ READ_AND_ASSIGN_OPT(base::TimeTicks, decode_begin_time, DecodeBeginTime);
+ READ_AND_ASSIGN_OPT(base::TimeTicks, decode_end_time, DecodeEndTime);
+ READ_AND_ASSIGN_OPT(base::TimeTicks, reference_time, ReferenceTime);
+
+ READ_AND_ASSIGN_OPT(base::TimeDelta, processing_time, ProcessingTime);
+ READ_AND_ASSIGN_OPT(base::TimeDelta, frame_duration, FrameDuration);
+ READ_AND_ASSIGN_OPT(base::TimeDelta, wallclock_frame_duration,
+ WallclockFrameDuration);
+
+ return true;
+}
+
+} // namespace mojo \ No newline at end of file
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h
new file mode 100644
index 00000000000..8b07ef38e1e
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h
@@ -0,0 +1,148 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_FRAME_METADATA_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_FRAME_METADATA_MOJOM_TRAITS_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/optional.h"
+#include "media/base/ipc/media_param_traits_macros.h"
+#include "media/base/video_frame_metadata.h"
+#include "media/mojo/mojom/media_types.mojom-shared.h"
+#include "media/mojo/mojom/media_types_enum_mojom_traits.h"
+#include "mojo/public/cpp/bindings/struct_traits.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
+
+namespace mojo {
+
+// Creates a has_foo() and a foo() to serialize a foo base::Optional<>.
+#define GENERATE_OPT_SERIALIZATION(type, field, default_value) \
+ static bool has_##field(const media::VideoFrameMetadata& input) { \
+ return input.field.has_value(); \
+ } \
+ \
+ static type field(const media::VideoFrameMetadata& input) { \
+ return input.field.value_or(default_value); \
+ }
+
+template <>
+struct StructTraits<media::mojom::VideoFrameMetadataDataView,
+ media::VideoFrameMetadata> {
+ static bool allow_overlay(const media::VideoFrameMetadata& input) {
+ return input.allow_overlay;
+ }
+
+ static bool copy_required(const media::VideoFrameMetadata& input) {
+ return input.copy_required;
+ }
+
+ static bool end_of_stream(const media::VideoFrameMetadata& input) {
+ return input.end_of_stream;
+ }
+
+ static bool texture_owner(const media::VideoFrameMetadata& input) {
+ return input.texture_owner;
+ }
+
+ static bool wants_promotion_hint(const media::VideoFrameMetadata& input) {
+ return input.wants_promotion_hint;
+ }
+
+ static bool protected_video(const media::VideoFrameMetadata& input) {
+ return input.protected_video;
+ }
+
+ static bool hw_protected(const media::VideoFrameMetadata& input) {
+ return input.hw_protected;
+ }
+
+ static bool power_efficient(const media::VideoFrameMetadata& input) {
+ return input.power_efficient;
+ }
+
+ static bool read_lock_fences_enabled(const media::VideoFrameMetadata& input) {
+ return input.read_lock_fences_enabled;
+ }
+
+ static bool interactive_content(const media::VideoFrameMetadata& input) {
+ return input.interactive_content;
+ }
+
+ GENERATE_OPT_SERIALIZATION(int, capture_counter, 0)
+
+ GENERATE_OPT_SERIALIZATION(media::VideoRotation,
+ rotation,
+ media::VideoRotation::VIDEO_ROTATION_0)
+
+ GENERATE_OPT_SERIALIZATION(double, device_scale_factor, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, page_scale_factor, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, root_scroll_offset_x, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, root_scroll_offset_y, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, top_controls_visible_height, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, resource_utilization, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, frame_rate, 0.0)
+ GENERATE_OPT_SERIALIZATION(double, rtp_timestamp, 0.0)
+
+ static base::Optional<gfx::Rect> capture_update_rect(
+ const media::VideoFrameMetadata& input) {
+ return input.capture_update_rect;
+ }
+
+ static base::Optional<base::UnguessableToken> overlay_plane_id(
+ const media::VideoFrameMetadata& input) {
+ return input.overlay_plane_id;
+ }
+
+ static base::Optional<base::TimeTicks> receive_time(
+ const media::VideoFrameMetadata& input) {
+ return input.receive_time;
+ }
+
+ static base::Optional<base::TimeTicks> capture_begin_time(
+ const media::VideoFrameMetadata& input) {
+ return input.capture_begin_time;
+ }
+
+ static base::Optional<base::TimeTicks> capture_end_time(
+ const media::VideoFrameMetadata& input) {
+ return input.capture_end_time;
+ }
+
+ static base::Optional<base::TimeTicks> decode_begin_time(
+ const media::VideoFrameMetadata& input) {
+ return input.decode_begin_time;
+ }
+
+ static base::Optional<base::TimeTicks> decode_end_time(
+ const media::VideoFrameMetadata& input) {
+ return input.decode_end_time;
+ }
+
+ static base::Optional<base::TimeTicks> reference_time(
+ const media::VideoFrameMetadata& input) {
+ return input.reference_time;
+ }
+
+ static base::Optional<base::TimeDelta> processing_time(
+ const media::VideoFrameMetadata& input) {
+ return input.processing_time;
+ }
+
+ static base::Optional<base::TimeDelta> frame_duration(
+ const media::VideoFrameMetadata& input) {
+ return input.frame_duration;
+ }
+
+ static base::Optional<base::TimeDelta> wallclock_frame_duration(
+ const media::VideoFrameMetadata& input) {
+ return input.wallclock_frame_duration;
+ }
+
+ static bool Read(media::mojom::VideoFrameMetadataDataView input,
+ media::VideoFrameMetadata* output);
+};
+
+} // namespace mojo
+
+#endif // MEDIA_MOJO_MOJOM_VIDEO_FRAME_METADATA_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
new file mode 100644
index 00000000000..4eaa51cc06e
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
@@ -0,0 +1,193 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/mojom/video_frame_metadata_mojom_traits.h"
+
+#include "base/bind_helpers.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/test/task_environment.h"
+#include "build/build_config.h"
+#include "media/mojo/mojom/traits_test_service.mojom.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+#include "mojo/public/cpp/bindings/remote.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/geometry/rect.h"
+
+namespace media {
+
+namespace {
+
+class VideoFrameMetadataStructTraitsTest
+ : public testing::Test,
+ public media::mojom::VideoFrameMetadataTraitsTestService {
+ public:
+ VideoFrameMetadataStructTraitsTest() = default;
+
+ protected:
+ mojo::Remote<mojom::VideoFrameMetadataTraitsTestService>
+ GetTraitsTestRemote() {
+ mojo::Remote<mojom::VideoFrameMetadataTraitsTestService> remote;
+ traits_test_receivers_.Add(this, remote.BindNewPipeAndPassReceiver());
+ return remote;
+ }
+
+ bool RoundTrip(const VideoFrameMetadata& in, VideoFrameMetadata* out) {
+ mojo::Remote<mojom::VideoFrameMetadataTraitsTestService> remote =
+ GetTraitsTestRemote();
+ return remote->EchoVideoFrameMetadata(in, out);
+ }
+
+ private:
+ void EchoVideoFrameMetadata(
+ const VideoFrameMetadata& vfm,
+ EchoVideoFrameMetadataCallback callback) override {
+ std::move(callback).Run(vfm);
+ }
+
+ base::test::TaskEnvironment task_environment_;
+ mojo::ReceiverSet<VideoFrameMetadataTraitsTestService> traits_test_receivers_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoFrameMetadataStructTraitsTest);
+};
+
+} // namespace
+
+TEST_F(VideoFrameMetadataStructTraitsTest, EmptyMetadata) {
+ VideoFrameMetadata metadata_in;
+ VideoFrameMetadata metadata_out;
+
+ ASSERT_TRUE(RoundTrip(metadata_in, &metadata_out));
+
+ EXPECT_FALSE(metadata_out.capture_counter.has_value());
+ EXPECT_FALSE(metadata_out.capture_update_rect.has_value());
+ EXPECT_FALSE(metadata_out.rotation.has_value());
+ EXPECT_FALSE(metadata_out.allow_overlay);
+ EXPECT_FALSE(metadata_out.copy_required);
+ EXPECT_FALSE(metadata_out.end_of_stream);
+ EXPECT_FALSE(metadata_out.texture_owner);
+ EXPECT_FALSE(metadata_out.wants_promotion_hint);
+ EXPECT_FALSE(metadata_out.protected_video);
+ EXPECT_FALSE(metadata_out.hw_protected);
+ EXPECT_FALSE(metadata_out.power_efficient);
+ EXPECT_FALSE(metadata_out.read_lock_fences_enabled);
+ EXPECT_FALSE(metadata_out.interactive_content);
+ EXPECT_FALSE(metadata_out.overlay_plane_id.has_value());
+ EXPECT_FALSE(metadata_out.device_scale_factor.has_value());
+ EXPECT_FALSE(metadata_out.page_scale_factor.has_value());
+ EXPECT_FALSE(metadata_out.root_scroll_offset_x.has_value());
+ EXPECT_FALSE(metadata_out.root_scroll_offset_y.has_value());
+ EXPECT_FALSE(metadata_out.top_controls_visible_height.has_value());
+ EXPECT_FALSE(metadata_out.resource_utilization.has_value());
+ EXPECT_FALSE(metadata_out.frame_rate.has_value());
+ EXPECT_FALSE(metadata_out.rtp_timestamp.has_value());
+ EXPECT_FALSE(metadata_out.receive_time.has_value());
+ EXPECT_FALSE(metadata_out.capture_begin_time.has_value());
+ EXPECT_FALSE(metadata_out.capture_end_time.has_value());
+ EXPECT_FALSE(metadata_out.decode_begin_time.has_value());
+ EXPECT_FALSE(metadata_out.decode_end_time.has_value());
+ EXPECT_FALSE(metadata_out.reference_time.has_value());
+ EXPECT_FALSE(metadata_out.processing_time.has_value());
+ EXPECT_FALSE(metadata_out.frame_duration.has_value());
+ EXPECT_FALSE(metadata_out.wallclock_frame_duration.has_value());
+}
+
+TEST_F(VideoFrameMetadataStructTraitsTest, ValidMetadata) {
+ // Assign a non-default, distinct (when possible), value to all fields, and
+ // make sure values are preserved across serialization.
+ VideoFrameMetadata metadata_in;
+
+ // ints
+ metadata_in.capture_counter = 123;
+
+ // gfx::Rects
+ metadata_in.capture_update_rect = gfx::Rect(12, 34, 360, 480);
+
+ // media::VideoRotations
+ metadata_in.rotation = media::VideoRotation::VIDEO_ROTATION_90;
+
+ // bools
+ metadata_in.allow_overlay = true;
+ metadata_in.copy_required = true;
+ metadata_in.end_of_stream = true;
+ metadata_in.texture_owner = true;
+ metadata_in.wants_promotion_hint = true;
+ metadata_in.protected_video = true;
+ metadata_in.hw_protected = true;
+ metadata_in.power_efficient = true;
+ metadata_in.read_lock_fences_enabled = true;
+ metadata_in.interactive_content = true;
+
+ // base::UnguessableTokens
+ metadata_in.overlay_plane_id = base::UnguessableToken::Create();
+
+ // doubles
+ metadata_in.device_scale_factor = 2.0;
+ metadata_in.page_scale_factor = 2.1;
+ metadata_in.root_scroll_offset_x = 100.2;
+ metadata_in.root_scroll_offset_y = 200.1;
+ metadata_in.top_controls_visible_height = 25.5;
+ metadata_in.resource_utilization = 95.8;
+ metadata_in.frame_rate = 29.94;
+ metadata_in.rtp_timestamp = 1.0;
+
+ // base::TimeTicks
+ base::TimeTicks now = base::TimeTicks::Now();
+ metadata_in.receive_time = now + base::TimeDelta::FromMilliseconds(10);
+ metadata_in.capture_begin_time = now + base::TimeDelta::FromMilliseconds(20);
+ metadata_in.capture_end_time = now + base::TimeDelta::FromMilliseconds(30);
+ metadata_in.decode_begin_time = now + base::TimeDelta::FromMilliseconds(40);
+ metadata_in.decode_end_time = now + base::TimeDelta::FromMilliseconds(50);
+ metadata_in.reference_time = now + base::TimeDelta::FromMilliseconds(60);
+
+ // base::TimeDeltas
+ metadata_in.processing_time = base::TimeDelta::FromMilliseconds(500);
+ metadata_in.frame_duration = base::TimeDelta::FromMilliseconds(16);
+ metadata_in.wallclock_frame_duration = base::TimeDelta::FromMilliseconds(17);
+
+ VideoFrameMetadata metadata_out;
+
+ ASSERT_TRUE(RoundTrip(metadata_in, &metadata_out));
+
+ EXPECT_EQ(metadata_in.capture_counter, metadata_out.capture_counter);
+ EXPECT_EQ(metadata_in.capture_update_rect, metadata_out.capture_update_rect);
+ EXPECT_EQ(metadata_in.rotation, metadata_out.rotation);
+ EXPECT_EQ(metadata_in.allow_overlay, metadata_out.allow_overlay);
+ EXPECT_EQ(metadata_in.copy_required, metadata_out.copy_required);
+ EXPECT_EQ(metadata_in.end_of_stream, metadata_out.end_of_stream);
+ EXPECT_EQ(metadata_in.texture_owner, metadata_out.texture_owner);
+ EXPECT_EQ(metadata_in.wants_promotion_hint,
+ metadata_out.wants_promotion_hint);
+ EXPECT_EQ(metadata_in.protected_video, metadata_out.protected_video);
+ EXPECT_EQ(metadata_in.hw_protected, metadata_out.hw_protected);
+ EXPECT_EQ(metadata_in.power_efficient, metadata_out.power_efficient);
+ EXPECT_EQ(metadata_in.read_lock_fences_enabled,
+ metadata_out.read_lock_fences_enabled);
+ EXPECT_EQ(metadata_in.interactive_content, metadata_out.interactive_content);
+ EXPECT_EQ(metadata_in.overlay_plane_id, metadata_out.overlay_plane_id);
+ EXPECT_EQ(metadata_in.device_scale_factor, metadata_out.device_scale_factor);
+ EXPECT_EQ(metadata_in.page_scale_factor, metadata_out.page_scale_factor);
+ EXPECT_EQ(metadata_in.root_scroll_offset_x,
+ metadata_out.root_scroll_offset_x);
+ EXPECT_EQ(metadata_in.root_scroll_offset_y,
+ metadata_out.root_scroll_offset_y);
+ EXPECT_EQ(metadata_in.top_controls_visible_height,
+ metadata_out.top_controls_visible_height);
+ EXPECT_EQ(metadata_in.resource_utilization,
+ metadata_out.resource_utilization);
+ EXPECT_EQ(metadata_in.frame_rate, metadata_out.frame_rate);
+ EXPECT_EQ(metadata_in.rtp_timestamp, metadata_out.rtp_timestamp);
+ EXPECT_EQ(metadata_in.receive_time, metadata_out.receive_time);
+ EXPECT_EQ(metadata_in.capture_begin_time, metadata_out.capture_begin_time);
+ EXPECT_EQ(metadata_in.capture_end_time, metadata_out.capture_end_time);
+ EXPECT_EQ(metadata_in.decode_begin_time, metadata_out.decode_begin_time);
+ EXPECT_EQ(metadata_in.decode_end_time, metadata_out.decode_end_time);
+ EXPECT_EQ(metadata_in.reference_time, metadata_out.reference_time);
+ EXPECT_EQ(metadata_in.processing_time, metadata_out.processing_time);
+ EXPECT_EQ(metadata_in.frame_duration, metadata_out.frame_duration);
+ EXPECT_EQ(metadata_in.wallclock_frame_duration,
+ metadata_out.wallclock_frame_duration);
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
index b2c93afe712..0f07949191d 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
@@ -15,8 +15,8 @@
#include "media/base/format_utils.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
#include "media/mojo/mojom/hdr_metadata_mojom_traits.h"
+#include "media/mojo/mojom/video_frame_metadata_mojom_traits.h"
#include "mojo/public/cpp/base/time_mojom_traits.h"
-#include "mojo/public/cpp/base/values_mojom_traits.h"
#include "mojo/public/cpp/system/handle.h"
#include "ui/gfx/mojom/buffer_types_mojom_traits.h"
#include "ui/gfx/mojom/color_space_mojom_traits.h"
@@ -31,7 +31,7 @@ namespace {
media::mojom::VideoFrameDataPtr MakeVideoFrameData(
const media::VideoFrame* input) {
- if (input->metadata()->IsTrue(media::VideoFrameMetadata::END_OF_STREAM)) {
+ if (input->metadata()->end_of_stream) {
return media::mojom::VideoFrameData::NewEosData(
media::mojom::EosVideoFrameData::New());
}
@@ -275,11 +275,11 @@ bool StructTraits<media::mojom::VideoFrameDataView,
if (!frame)
return false;
- base::Value metadata;
+ media::VideoFrameMetadata metadata;
if (!input.ReadMetadata(&metadata))
return false;
- frame->metadata()->MergeInternalValuesFrom(metadata);
+ frame->set_metadata(metadata);
gfx::ColorSpace color_space;
if (!input.ReadColorSpace(&color_space))
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.h b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
index c064e76b79a..461bd9c6d3b 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
@@ -75,9 +75,11 @@ struct StructTraits<media::mojom::VideoFrameDataView,
static media::mojom::VideoFrameDataPtr data(
const scoped_refptr<media::VideoFrame>& input);
- static const base::Value& metadata(
+ // TODO(https://crbug.com/1096727): Change VideoFrame::Metadata() to return a
+ // const &.
+ static const media::VideoFrameMetadata& metadata(
const scoped_refptr<media::VideoFrame>& input) {
- return input->metadata()->GetInternalValues();
+ return *(input->metadata());
}
static bool Read(media::mojom::VideoFrameDataView input,
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
index 329fbcb7cc2..aaf290e2ddb 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
@@ -77,7 +77,7 @@ TEST_F(VideoFrameStructTraitsTest, EOS) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_TRUE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_TRUE(frame->metadata()->end_of_stream);
}
TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
@@ -86,15 +86,12 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
scoped_refptr<VideoFrame> frame =
MojoSharedBufferVideoFrame::CreateDefaultForTesting(
format, gfx::Size(100, 100), base::TimeDelta::FromSeconds(100));
- frame->metadata()->SetDouble(VideoFrameMetadata::FRAME_RATE, 42.0);
+ frame->metadata()->frame_rate = 42.0;
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
- double frame_rate = 0.0;
- EXPECT_TRUE(frame->metadata()->GetDouble(VideoFrameMetadata::FRAME_RATE,
- &frame_rate));
- EXPECT_EQ(frame_rate, 42.0);
+ EXPECT_FALSE(frame->metadata()->end_of_stream);
+ EXPECT_EQ(*frame->metadata()->frame_rate, 42.0);
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
EXPECT_EQ(frame->timestamp(), base::TimeDelta::FromSeconds(100));
@@ -129,7 +126,7 @@ TEST_F(VideoFrameStructTraitsTest, DmabufVideoFrame) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_FALSE(frame->metadata()->end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->coded_size(), gfx::Size(1280, 720));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(0, 0, 1280, 720));
@@ -151,7 +148,7 @@ TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_FALSE(frame->metadata()->end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_ARGB);
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(10, 10, 80, 80));
@@ -185,7 +182,7 @@ TEST_F(VideoFrameStructTraitsTest, GpuMemoryBufferVideoFrame) {
ASSERT_TRUE(frame);
ASSERT_EQ(frame->storage_type(), VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_TRUE(frame->HasGpuMemoryBuffer());
- EXPECT_FALSE(frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ EXPECT_FALSE(frame->metadata()->end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
diff --git a/chromium/media/mojo/mojom/video_transformation_mojom_traits.h b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
index f788f6fc6a0..11ddd8126c9 100644
--- a/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
@@ -8,6 +8,7 @@
#include "media/base/ipc/media_param_traits.h"
#include "media/base/video_transformation.h"
#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/media_types_enum_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index 3f39a35e113..f91fa57a6bf 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -56,6 +56,8 @@ jumbo_component("services") {
"mojo_video_encode_accelerator_provider.h",
"mojo_video_encode_accelerator_service.cc",
"mojo_video_encode_accelerator_service.h",
+ "playback_events_recorder.cc",
+ "playback_events_recorder.h",
"test_mojo_media_client.cc",
"test_mojo_media_client.h",
"video_decode_perf_history.cc",
@@ -109,10 +111,6 @@ jumbo_component("services") {
]
}
- if (is_fuchsia) {
- deps += [ "//media/fuchsia/metrics" ]
- }
-
if (enable_media_drm_storage) {
sources += [
"mojo_media_drm_storage.cc",
@@ -155,6 +153,7 @@ source_set("unit_tests") {
"mojo_audio_output_stream_provider_unittest.cc",
"mojo_audio_output_stream_unittest.cc",
"mojo_video_encode_accelerator_service_unittest.cc",
+ "playback_events_recorder_test.cc",
"test_helpers.cc",
"test_helpers.h",
"video_decode_perf_history_unittest.cc",
diff --git a/chromium/media/mojo/services/cdm_service_unittest.cc b/chromium/media/mojo/services/cdm_service_unittest.cc
index 02aec3d22e6..b98189d7b2b 100644
--- a/chromium/media/mojo/services/cdm_service_unittest.cc
+++ b/chromium/media/mojo/services/cdm_service_unittest.cc
@@ -17,7 +17,6 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
-#include "url/origin.h"
namespace media {
@@ -33,7 +32,6 @@ MATCHER_P(MatchesResult, success, "") {
const char kClearKeyKeySystem[] = "org.w3.clearkey";
const char kInvalidKeySystem[] = "invalid.key.system";
-const char kSecurityOrigin[] = "https://foo.com";
class MockCdmServiceClient : public media::CdmService::Client {
public:
@@ -99,10 +97,9 @@ class CdmServiceTest : public testing::Test {
&CdmServiceTest::CdmConnectionClosed, base::Unretained(this)));
EXPECT_CALL(*this, OnCdmInitialized(MatchesResult(expected_result), _, _))
.WillOnce(InvokeWithoutArgs(&run_loop, &base::RunLoop::Quit));
- cdm_remote_->Initialize(
- key_system, url::Origin::Create(GURL(kSecurityOrigin)), CdmConfig(),
- base::BindOnce(&CdmServiceTest::OnCdmInitialized,
- base::Unretained(this)));
+ cdm_remote_->Initialize(key_system, CdmConfig(),
+ base::BindOnce(&CdmServiceTest::OnCdmInitialized,
+ base::Unretained(this)));
run_loop.Run();
}
diff --git a/chromium/media/mojo/services/deferred_destroy_unique_receiver_set.h b/chromium/media/mojo/services/deferred_destroy_unique_receiver_set.h
index 5e974fcf489..e7801c90625 100644
--- a/chromium/media/mojo/services/deferred_destroy_unique_receiver_set.h
+++ b/chromium/media/mojo/services/deferred_destroy_unique_receiver_set.h
@@ -11,6 +11,7 @@
#include <memory>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/memory/weak_ptr.h"
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index 293591767dc..16a146b921c 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -243,7 +243,7 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
// requiring the provider to set |is_vulkan| in the ImageRecord.
auto frame_info_helper =
FrameInfoHelper::Create(gpu_task_runner_, std::move(get_stub_cb));
- video_decoder = std::make_unique<MediaCodecVideoDecoder>(
+ video_decoder = MediaCodecVideoDecoder::Create(
gpu_preferences_, gpu_feature_info_, media_log->Clone(),
DeviceInfo::GetInstance(),
CodecAllocator::GetInstance(gpu_task_runner_),
@@ -269,7 +269,7 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
command_buffer_id->route_id));
video_decoder = ChromeosVideoDecoderFactory::Create(
task_runner, std::move(frame_pool), std::move(frame_converter),
- gpu_memory_buffer_factory_);
+ media_log->Clone());
} else {
video_decoder = VdaVideoDecoder::Create(
task_runner, gpu_task_runner_, media_log->Clone(),
diff --git a/chromium/media/mojo/services/media_metrics_provider.cc b/chromium/media/mojo/services/media_metrics_provider.cc
index e0920fce86b..3b641080dde 100644
--- a/chromium/media/mojo/services/media_metrics_provider.cc
+++ b/chromium/media/mojo/services/media_metrics_provider.cc
@@ -11,6 +11,7 @@
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "build/build_config.h"
+#include "build/chromecast_buildflags.h"
#include "media/learning/mojo/mojo_learning_task_controller_service.h"
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "media/mojo/services/watch_time_recorder.h"
@@ -22,9 +23,9 @@
#include "media/filters/decrypting_video_decoder.h"
#endif // !defined(OS_ANDROID)
-#if defined(OS_FUCHSIA)
-#include "media/fuchsia/metrics/fuchsia_playback_events_recorder.h"
-#endif // defined(OS_FUCHSIA)
+#if defined(OS_FUCHSIA) || (BUILDFLAG(IS_CHROMECAST) && defined(OS_ANDROID))
+#include "media/mojo/services/playback_events_recorder.h"
+#endif
namespace media {
@@ -50,7 +51,7 @@ MediaMetricsProvider::MediaMetricsProvider(
source_id_(source_id),
origin_(origin),
save_cb_(std::move(save_cb)),
- learning_session_cb_(learning_session_cb),
+ learning_session_cb_(std::move(learning_session_cb)),
record_playback_cb_(std::move(record_playback_cb)),
uma_info_(is_incognito == BrowsingMode::kIncognito) {}
@@ -89,37 +90,31 @@ std::string MediaMetricsProvider::GetUMANameForAVStream(
const PipelineInfo& player_info) {
constexpr char kPipelineUmaPrefix[] = "Media.PipelineStatus.AudioVideo.";
std::string uma_name = kPipelineUmaPrefix;
- if (player_info.video_codec == kCodecVP8) {
+ if (player_info.video_codec == kCodecVP8)
uma_name += "VP8.";
- } else if (player_info.video_codec == kCodecVP9) {
+ else if (player_info.video_codec == kCodecVP9)
uma_name += "VP9.";
- } else if (player_info.video_codec == kCodecH264) {
+ else if (player_info.video_codec == kCodecH264)
uma_name += "H264.";
- } else if (player_info.video_codec == kCodecAV1) {
+ else if (player_info.video_codec == kCodecAV1)
uma_name += "AV1.";
- } else {
+ else
return uma_name + "Other";
- }
#if !defined(OS_ANDROID)
if (player_info.video_pipeline_info.decoder_name ==
- media::DecryptingVideoDecoder::kDecoderName) {
+ DecryptingVideoDecoder::kDecoderName) {
return uma_name + "DVD";
}
#endif
- if (player_info.video_pipeline_info.has_decrypting_demuxer_stream) {
+ if (player_info.video_pipeline_info.has_decrypting_demuxer_stream)
uma_name += "DDS.";
- }
// Note that HW essentially means 'platform' anyway. MediaCodec has been
// reported as HW forever, regardless of the underlying platform
// implementation.
- if (player_info.video_pipeline_info.is_platform_decoder) {
- uma_name += "HW";
- } else {
- uma_name += "SW";
- }
+ uma_name += player_info.video_pipeline_info.is_platform_decoder ? "HW" : "SW";
return uma_name;
}
@@ -127,22 +122,22 @@ void MediaMetricsProvider::ReportPipelineUMA() {
if (uma_info_.has_video && uma_info_.has_audio) {
base::UmaHistogramExactLinear(GetUMANameForAVStream(uma_info_),
uma_info_.last_pipeline_status,
- media::PIPELINE_STATUS_MAX + 1);
+ PIPELINE_STATUS_MAX + 1);
} else if (uma_info_.has_audio) {
base::UmaHistogramExactLinear("Media.PipelineStatus.AudioOnly",
uma_info_.last_pipeline_status,
- media::PIPELINE_STATUS_MAX + 1);
+ PIPELINE_STATUS_MAX + 1);
} else if (uma_info_.has_video) {
base::UmaHistogramExactLinear("Media.PipelineStatus.VideoOnly",
uma_info_.last_pipeline_status,
- media::PIPELINE_STATUS_MAX + 1);
+ PIPELINE_STATUS_MAX + 1);
} else {
// Note: This metric can be recorded as a result of normal operation with
// Media Source Extensions. If a site creates a MediaSource object but never
// creates a source buffer or appends data, PIPELINE_OK will be recorded.
base::UmaHistogramExactLinear("Media.PipelineStatus.Unsupported",
uma_info_.last_pipeline_status,
- media::PIPELINE_STATUS_MAX + 1);
+ PIPELINE_STATUS_MAX + 1);
}
// Report whether video decoder fallback happened, but only if a video decoder
@@ -154,15 +149,13 @@ void MediaMetricsProvider::ReportPipelineUMA() {
// Report whether this player ever saw a playback event. Used to measure the
// effectiveness of efforts to reduce loaded-but-never-used players.
- if (uma_info_.has_reached_have_enough) {
+ if (uma_info_.has_reached_have_enough)
base::UmaHistogramBoolean("Media.HasEverPlayed", uma_info_.has_ever_played);
- }
// Report whether an encrypted playback is in incognito window, excluding
// never-used players.
- if (uma_info_.is_eme && uma_info_.has_ever_played) {
+ if (uma_info_.is_eme && uma_info_.has_ever_played)
base::UmaHistogramBoolean("Media.EME.IsIncognito", uma_info_.is_incognito);
- }
}
// static
@@ -178,7 +171,8 @@ void MediaMetricsProvider::Create(
mojo::MakeSelfOwnedReceiver(
std::make_unique<MediaMetricsProvider>(
is_incognito, is_top_frame, get_source_id_cb.Run(),
- get_origin_cb.Run(), std::move(save_cb), learning_session_cb,
+ get_origin_cb.Run(), std::move(save_cb),
+ std::move(learning_session_cb),
std::move(get_record_playback_cb).Run()),
std::move(receiver));
}
@@ -296,15 +290,14 @@ void MediaMetricsProvider::AcquireVideoDecodeStatsRecorder(
void MediaMetricsProvider::AcquirePlaybackEventsRecorder(
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver) {
-#if defined(OS_FUCHSIA)
- FuchsiaPlaybackEventsRecorder::Create(std::move(receiver));
+#if defined(OS_FUCHSIA) || (BUILDFLAG(IS_CHROMECAST) && defined(OS_ANDROID))
+ PlaybackEventsRecorder::Create(std::move(receiver));
#endif
}
void MediaMetricsProvider::AcquireLearningTaskController(
const std::string& taskName,
- mojo::PendingReceiver<media::learning::mojom::LearningTaskController>
- receiver) {
+ mojo::PendingReceiver<learning::mojom::LearningTaskController> receiver) {
learning::LearningSession* session = learning_session_cb_.Run();
if (!session) {
DVLOG(3) << __func__ << " Ignoring request, unable to get LearningSession.";
diff --git a/chromium/media/mojo/services/media_metrics_provider.h b/chromium/media/mojo/services/media_metrics_provider.h
index c9857f2c771..ef0ce4b523a 100644
--- a/chromium/media/mojo/services/media_metrics_provider.h
+++ b/chromium/media/mojo/services/media_metrics_provider.h
@@ -125,8 +125,8 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver) override;
void AcquireLearningTaskController(
const std::string& taskName,
- mojo::PendingReceiver<media::learning::mojom::LearningTaskController>
- receiver) override;
+ mojo::PendingReceiver<learning::mojom::LearningTaskController> receiver)
+ override;
void ReportPipelineUMA();
std::string GetUMANameForAVStream(const PipelineInfo& player_info);
diff --git a/chromium/media/mojo/services/media_service_unittest.cc b/chromium/media/mojo/services/media_service_unittest.cc
index ef5f39687a0..085efa69c07 100644
--- a/chromium/media/mojo/services/media_service_unittest.cc
+++ b/chromium/media/mojo/services/media_service_unittest.cc
@@ -11,8 +11,8 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/task/post_task.h"
#include "base/test/task_environment.h"
+#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/cdm_config.h"
#include "media/base/mock_filters.h"
@@ -35,7 +35,6 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
-#include "url/origin.h"
namespace media {
@@ -59,8 +58,6 @@ const char kClearKeyKeySystem[] = "org.w3.clearkey";
const char kInvalidKeySystem[] = "invalid.key.system";
#endif
-const char kSecurityOrigin[] = "https://foo.com";
-
class MockRendererClient : public mojom::RendererClient {
public:
MockRendererClient() = default;
@@ -90,7 +87,8 @@ class MockRendererClient : public mojom::RendererClient {
};
ACTION_P(QuitLoop, run_loop) {
- base::PostTask(FROM_HERE, run_loop->QuitClosure());
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ run_loop->QuitClosure());
}
// Tests MediaService using TestMojoMediaClient, which supports CDM creation
@@ -139,8 +137,7 @@ class MediaServiceTest : public testing::Test {
// cdm_id" out and then call DoAll.
EXPECT_CALL(*this, OnCdmInitialized(MatchesResult(expected_result), _, _))
.WillOnce(WithArg<1>(DoAll(SaveArg<0>(&cdm_id), QuitLoop(&run_loop))));
- cdm_->Initialize(key_system, url::Origin::Create(GURL(kSecurityOrigin)),
- CdmConfig(),
+ cdm_->Initialize(key_system, CdmConfig(),
base::BindOnce(&MediaServiceTest::OnCdmInitialized,
base::Unretained(this)));
run_loop.Run();
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.h b/chromium/media/mojo/services/mojo_audio_decoder_service.h
index c3ec39b1c1e..b51beb83992 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.h
@@ -20,7 +20,6 @@
namespace media {
-class CdmContextRef;
class MojoCdmServiceContext;
class MojoDecoderBufferReader;
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
index 04fa66ef823..4bc9f34f0a3 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
@@ -7,15 +7,16 @@
#include <utility>
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/audio/audio_output_delegate.h"
#include "media/base/audio_parameters.h"
-#include "mojo/core/embedder/embedder.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/remote.h"
+#include "mojo/public/cpp/system/functions.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -75,7 +76,7 @@ std::unique_ptr<AudioOutputDelegate> CreateFakeDelegate(
TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
base::test::SingleThreadTaskEnvironment task_environment;
bool got_bad_message = false;
- mojo::core::SetDefaultProcessErrorCallback(
+ mojo::SetDefaultProcessErrorHandler(
base::BindRepeating([](bool* got_bad_message,
const std::string& s) { *got_bad_message = true; },
&got_bad_message));
@@ -104,15 +105,14 @@ TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
- mojo::core::SetDefaultProcessErrorCallback(
- mojo::core::ProcessErrorCallback());
+ mojo::SetDefaultProcessErrorHandler(base::NullCallback());
}
TEST(MojoAudioOutputStreamProviderTest,
Bitstream_BadMessageOnNonAndoirdPlatforms) {
base::test::SingleThreadTaskEnvironment task_environment;
bool got_bad_message = false;
- mojo::core::SetDefaultProcessErrorCallback(
+ mojo::SetDefaultProcessErrorHandler(
base::BindRepeating([](bool* got_bad_message,
const std::string& s) { *got_bad_message = true; },
&got_bad_message));
@@ -145,8 +145,7 @@ TEST(MojoAudioOutputStreamProviderTest,
EXPECT_TRUE(got_bad_message);
Mock::VerifyAndClear(&deleter);
#endif
- mojo::core::SetDefaultProcessErrorCallback(
- mojo::core::ProcessErrorCallback());
+ mojo::SetDefaultProcessErrorHandler(base::NullCallback());
}
} // namespace media
diff --git a/chromium/media/mojo/services/mojo_cdm_helper.cc b/chromium/media/mojo/services/mojo_cdm_helper.cc
index 2d5761e44e5..ac7c20ea8e5 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper.cc
+++ b/chromium/media/mojo/services/mojo_cdm_helper.cc
@@ -4,6 +4,7 @@
#include "media/mojo/services/mojo_cdm_helper.h"
+#include "base/macros.h"
#include "base/stl_util.h"
#include "media/base/cdm_context.h"
#include "media/cdm/cdm_helpers.h"
@@ -37,6 +38,15 @@ cdm::FileIO* MojoCdmHelper::CreateCdmFileIO(cdm::FileIOClient* client) {
return cdm_file_io;
}
+url::Origin MojoCdmHelper::GetCdmOrigin() {
+ url::Origin cdm_origin;
+ // Since the CDM is created asynchronously, by the time this function is
+ // called, the render frame host in the browser process may already be gone.
+ // It's safe to ignore the error since the origin is used for crash reporting.
+ ignore_result(frame_interfaces_->GetCdmOrigin(&cdm_origin));
+ return cdm_origin;
+}
+
cdm::Buffer* MojoCdmHelper::CreateCdmBuffer(size_t capacity) {
return GetAllocator()->CreateCdmBuffer(capacity);
}
diff --git a/chromium/media/mojo/services/mojo_cdm_helper.h b/chromium/media/mojo/services/mojo_cdm_helper.h
index 25bf7d13790..0676ec9f1e6 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper.h
+++ b/chromium/media/mojo/services/mojo_cdm_helper.h
@@ -35,6 +35,7 @@ class MEDIA_MOJO_EXPORT MojoCdmHelper final : public CdmAuxiliaryHelper,
// CdmAuxiliaryHelper implementation.
void SetFileReadCB(FileReadCB file_read_cb) final;
cdm::FileIO* CreateCdmFileIO(cdm::FileIOClient* client) final;
+ url::Origin GetCdmOrigin() final;
cdm::Buffer* CreateCdmBuffer(size_t capacity) final;
std::unique_ptr<VideoFrameImpl> CreateCdmVideoFrame() final;
void QueryStatus(QueryStatusCB callback) final;
diff --git a/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc b/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
index 1b52d254b27..b41e3160d72 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
+++ b/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
@@ -15,6 +15,7 @@
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "url/origin.h"
using Status = cdm::FileIOClient::Status;
@@ -73,6 +74,7 @@ class TestFrameInterfaceFactory : public mojom::FrameInterfaceFactory {
mojo::MakeSelfOwnedReceiver(std::make_unique<MockCdmStorage>(),
std::move(receiver));
}
+ void GetCdmOrigin(GetCdmOriginCallback callback) override {}
void BindEmbedderReceiver(mojo::GenericPendingReceiver) override {}
};
diff --git a/chromium/media/mojo/services/mojo_cdm_service.cc b/chromium/media/mojo/services/mojo_cdm_service.cc
index 16e60b82eb9..ba20eea6c0f 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.cc
+++ b/chromium/media/mojo/services/mojo_cdm_service.cc
@@ -19,7 +19,6 @@
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "url/origin.h"
namespace media {
@@ -58,7 +57,6 @@ void MojoCdmService::SetClient(
}
void MojoCdmService::Initialize(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
InitializeCallback callback) {
DVLOG(1) << __func__ << ": " << key_system;
@@ -68,7 +66,7 @@ void MojoCdmService::Initialize(const std::string& key_system,
auto weak_this = weak_factory_.GetWeakPtr();
cdm_factory_->Create(
- key_system, security_origin, cdm_config,
+ key_system, cdm_config,
base::Bind(&MojoCdmService::OnSessionMessage, weak_this),
base::Bind(&MojoCdmService::OnSessionClosed, weak_this),
base::Bind(&MojoCdmService::OnSessionKeysChange, weak_this),
diff --git a/chromium/media/mojo/services/mojo_cdm_service.h b/chromium/media/mojo/services/mojo_cdm_service.h
index 20903a51b13..c38939f3966 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_service.h
@@ -47,7 +47,6 @@ class MEDIA_MOJO_EXPORT MojoCdmService : public mojom::ContentDecryptionModule {
mojo::PendingAssociatedRemote<mojom::ContentDecryptionModuleClient>
client) final;
void Initialize(const std::string& key_system,
- const url::Origin& security_origin,
const CdmConfig& cdm_config,
InitializeCallback callback) final;
void SetServerCertificate(const std::vector<uint8_t>& certificate_data,
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 03263ee3c63..0888eb397d1 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -42,6 +42,10 @@ using SupportedVideoDecoderConfigMap =
base::flat_map<VideoDecoderImplementation,
std::vector<SupportedVideoDecoderConfig>>;
+// Provides a way for MediaService to create concrete (e.g. platform specific)
+// media components’ implementations. When MediaService is created, a
+// MojoMediaClient must be passed in so that MediaService knows how to create
+// the media components.
class MEDIA_MOJO_EXPORT MojoMediaClient {
public:
// Called before the host application is scheduled to quit.
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index bd3b8202578..c35d0b555c2 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -337,7 +337,7 @@ void MojoVideoDecoderService::OnDecoderOutput(scoped_refptr<VideoFrame> frame) {
// All MojoVideoDecoder-based decoders are hardware decoders. If you're the
// first to implement an out-of-process decoder that is not power efficent,
// you can remove this DCHECK.
- DCHECK(frame->metadata()->IsTrue(VideoFrameMetadata::POWER_EFFICIENT));
+ DCHECK(frame->metadata()->power_efficient);
base::Optional<base::UnguessableToken> release_token;
if (frame->HasReleaseMailboxCB() && video_frame_handle_releaser_) {
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.h b/chromium/media/mojo/services/mojo_video_decoder_service.h
index 871b9aea43a..1285874f083 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.h
@@ -26,7 +26,6 @@
namespace media {
-class CdmContextRef;
class DecoderBuffer;
class MojoCdmServiceContext;
class MojoDecoderBufferReader;
diff --git a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.cc b/chromium/media/mojo/services/playback_events_recorder.cc
index d883b08b38a..6080dca225a 100644
--- a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.cc
+++ b/chromium/media/mojo/services/playback_events_recorder.cc
@@ -2,9 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/fuchsia/metrics/fuchsia_playback_events_recorder.h"
+#include "media/mojo/services/playback_events_recorder.h"
#include "base/metrics/user_metrics.h"
+#include "base/strings/strcat.h"
+#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
@@ -15,8 +17,9 @@ namespace {
void RecordEventWithValueAt(const char* name,
int64_t value,
base::TimeTicks time) {
- base::RecordComputedActionAt(
- base::StringPrintf("WebEngine.Media.%s:%ld", name, value), time);
+ base::RecordComputedActionAt(base::StrCat({"WebEngine.Media.", name, ":",
+ base::NumberToString(value)}),
+ time);
}
void RecordEventWithValue(const char* name, int64_t value) {
@@ -28,10 +31,10 @@ constexpr base::TimeDelta kBitrateReportPeriod =
} // namespace
-FuchsiaPlaybackEventsRecorder::BitrateEstimator::BitrateEstimator() {}
-FuchsiaPlaybackEventsRecorder::BitrateEstimator::~BitrateEstimator() {}
+PlaybackEventsRecorder::BitrateEstimator::BitrateEstimator() {}
+PlaybackEventsRecorder::BitrateEstimator::~BitrateEstimator() {}
-void FuchsiaPlaybackEventsRecorder::BitrateEstimator::Update(
+void PlaybackEventsRecorder::BitrateEstimator::Update(
const PipelineStatistics& stats) {
base::TimeTicks now = base::TimeTicks::Now();
@@ -63,38 +66,38 @@ void FuchsiaPlaybackEventsRecorder::BitrateEstimator::Update(
last_stats_time_ = now;
}
-void FuchsiaPlaybackEventsRecorder::BitrateEstimator::OnPause() {
+void PlaybackEventsRecorder::BitrateEstimator::OnPause() {
last_stats_ = {};
}
// static
-void FuchsiaPlaybackEventsRecorder::Create(
+void PlaybackEventsRecorder::Create(
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver) {
- mojo::MakeSelfOwnedReceiver(std::make_unique<FuchsiaPlaybackEventsRecorder>(),
+ mojo::MakeSelfOwnedReceiver(std::make_unique<PlaybackEventsRecorder>(),
std::move(receiver));
}
-FuchsiaPlaybackEventsRecorder::FuchsiaPlaybackEventsRecorder() = default;
-FuchsiaPlaybackEventsRecorder::~FuchsiaPlaybackEventsRecorder() = default;
+PlaybackEventsRecorder::PlaybackEventsRecorder() = default;
+PlaybackEventsRecorder::~PlaybackEventsRecorder() = default;
-void FuchsiaPlaybackEventsRecorder::OnPlaying() {
+void PlaybackEventsRecorder::OnPlaying() {
base::RecordComputedAction("WebEngine.Media.Playing");
}
-void FuchsiaPlaybackEventsRecorder::OnPaused() {
+void PlaybackEventsRecorder::OnPaused() {
base::RecordComputedAction("WebEngine.Media.Pause");
bitrate_estimator_.OnPause();
}
-void FuchsiaPlaybackEventsRecorder::OnSeeking() {
+void PlaybackEventsRecorder::OnSeeking() {
buffering_state_ = BufferingState::kInitialBuffering;
}
-void FuchsiaPlaybackEventsRecorder::OnEnded() {
+void PlaybackEventsRecorder::OnEnded() {
base::RecordComputedAction("WebEngine.Media.Ended");
}
-void FuchsiaPlaybackEventsRecorder::OnBuffering() {
+void PlaybackEventsRecorder::OnBuffering() {
DCHECK(buffering_state_ == BufferingState::kBuffered);
buffering_start_time_ = base::TimeTicks::Now();
@@ -103,7 +106,7 @@ void FuchsiaPlaybackEventsRecorder::OnBuffering() {
bitrate_estimator_.OnPause();
}
-void FuchsiaPlaybackEventsRecorder::OnBufferingComplete() {
+void PlaybackEventsRecorder::OnBufferingComplete() {
auto now = base::TimeTicks::Now();
if (buffering_state_ == BufferingState::kBuffering) {
@@ -121,17 +124,17 @@ void FuchsiaPlaybackEventsRecorder::OnBufferingComplete() {
last_buffering_end_time_ = now;
}
-void FuchsiaPlaybackEventsRecorder::OnError(PipelineStatus status) {
+void PlaybackEventsRecorder::OnError(PipelineStatus status) {
RecordEventWithValue("Error", status);
}
-void FuchsiaPlaybackEventsRecorder::OnNaturalSizeChanged(
- const gfx::Size& size) {
+void PlaybackEventsRecorder::OnNaturalSizeChanged(const gfx::Size& size) {
+ int encoded_video_resolution = (size.width() << 16) | size.height();
base::RecordComputedAction(base::StringPrintf(
- "WebEngine.Media.VideoResolution:%dx%d", size.width(), size.height()));
+ "WebEngine.Media.VideoResolution:%d", encoded_video_resolution));
}
-void FuchsiaPlaybackEventsRecorder::OnPipelineStatistics(
+void PlaybackEventsRecorder::OnPipelineStatistics(
const PipelineStatistics& stats) {
bitrate_estimator_.Update(stats);
}
diff --git a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.h b/chromium/media/mojo/services/playback_events_recorder.h
index 2686bafec9e..25b5e7a3c51 100644
--- a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder.h
+++ b/chromium/media/mojo/services/playback_events_recorder.h
@@ -2,26 +2,27 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_FUCHSIA_METRICS_FUCHSIA_PLAYBACK_EVENTS_RECORDER_H_
-#define MEDIA_FUCHSIA_METRICS_FUCHSIA_PLAYBACK_EVENTS_RECORDER_H_
+#ifndef MEDIA_MOJO_SERVICES_PLAYBACK_EVENTS_RECORDER_H_
+#define MEDIA_MOJO_SERVICES_PLAYBACK_EVENTS_RECORDER_H_
#include "media/mojo/mojom/playback_events_recorder.mojom.h"
+#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/remote.h"
namespace media {
-class FuchsiaPlaybackEventsRecorder : public mojom::PlaybackEventsRecorder {
+class MEDIA_MOJO_EXPORT PlaybackEventsRecorder : public mojom::PlaybackEventsRecorder {
public:
static void Create(
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver);
- FuchsiaPlaybackEventsRecorder();
- ~FuchsiaPlaybackEventsRecorder() final;
+ PlaybackEventsRecorder();
+ ~PlaybackEventsRecorder() final;
- FuchsiaPlaybackEventsRecorder(const FuchsiaPlaybackEventsRecorder&) = delete;
- FuchsiaPlaybackEventsRecorder& operator=(
- const FuchsiaPlaybackEventsRecorder&) = delete;
+ PlaybackEventsRecorder(const PlaybackEventsRecorder&) = delete;
+ PlaybackEventsRecorder& operator=(const PlaybackEventsRecorder&) =
+ delete;
// mojom::PlaybackEventsRecorder implementation.
void OnPlaying() final;
@@ -67,4 +68,4 @@ class FuchsiaPlaybackEventsRecorder : public mojom::PlaybackEventsRecorder {
} // namespace media
-#endif // MEDIA_FUCHSIA_METRICS_FUCHSIA_PLAYBACK_EVENTS_RECORDER_H_ \ No newline at end of file
+#endif // MEDIA_MOJO_SERVICES_PLAYBACK_EVENTS_RECORDER_H_
diff --git a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder_test.cc b/chromium/media/mojo/services/playback_events_recorder_test.cc
index a18704575a2..5d41dc2c48c 100644
--- a/chromium/media/fuchsia/metrics/fuchsia_playback_events_recorder_test.cc
+++ b/chromium/media/mojo/services/playback_events_recorder_test.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/fuchsia/metrics/fuchsia_playback_events_recorder.h"
+#include "media/mojo/services/playback_events_recorder.h"
#include "base/metrics/user_metrics.h"
#include "base/test/simple_test_tick_clock.h"
@@ -13,9 +13,9 @@ namespace media {
constexpr base::TimeDelta kSecond = base::TimeDelta::FromSeconds(1);
-class FuchsiaPlaybackEventsRecorderTest : public testing::Test {
+class PlaybackEventsRecorderTest : public testing::Test {
public:
- FuchsiaPlaybackEventsRecorderTest()
+ PlaybackEventsRecorderTest()
: task_environment_(base::test::TaskEnvironment::MainThreadType::IO,
base::test::TaskEnvironment::TimeSource::MOCK_TIME) {
time_base_ = base::TimeTicks::Now();
@@ -23,11 +23,11 @@ class FuchsiaPlaybackEventsRecorderTest : public testing::Test {
base::SetRecordActionTaskRunner(
task_environment_.GetMainThreadTaskRunner());
action_callback_ = base::BindRepeating(
- &FuchsiaPlaybackEventsRecorderTest::OnAction, base::Unretained(this));
+ &PlaybackEventsRecorderTest::OnAction, base::Unretained(this));
base::AddActionCallback(action_callback_);
}
- ~FuchsiaPlaybackEventsRecorderTest() override {
+ ~PlaybackEventsRecorderTest() override {
base::RemoveActionCallback(action_callback_);
}
@@ -61,24 +61,25 @@ class FuchsiaPlaybackEventsRecorderTest : public testing::Test {
base::TimeTicks time_base_;
base::ActionCallback action_callback_;
- FuchsiaPlaybackEventsRecorder recorder_;
+ PlaybackEventsRecorder recorder_;
std::vector<Event> recorded_events_;
};
-TEST_F(FuchsiaPlaybackEventsRecorderTest, PlayPause) {
+TEST_F(PlaybackEventsRecorderTest, PlayPause) {
recorder_.OnNaturalSizeChanged(gfx::Size(640, 480));
recorder_.OnPlaying();
task_environment_.AdvanceClock(2 * kSecond);
recorder_.OnPaused();
ExpectEvents({
- {time_base_, "WebEngine.Media.VideoResolution:640x480"},
+ // VideoResolution value should be encoded as (640 << 16) + 480.
+ {time_base_, "WebEngine.Media.VideoResolution:41943520"},
{time_base_, "WebEngine.Media.Playing"},
{time_base_ + 2 * kSecond, "WebEngine.Media.Pause"},
});
}
-TEST_F(FuchsiaPlaybackEventsRecorderTest, Error) {
+TEST_F(PlaybackEventsRecorderTest, Error) {
recorder_.OnPlaying();
task_environment_.AdvanceClock(2 * kSecond);
recorder_.OnError(PIPELINE_ERROR_DECODE);
@@ -89,7 +90,7 @@ TEST_F(FuchsiaPlaybackEventsRecorderTest, Error) {
});
}
-TEST_F(FuchsiaPlaybackEventsRecorderTest, Buffering) {
+TEST_F(PlaybackEventsRecorderTest, Buffering) {
recorder_.OnPlaying();
recorder_.OnBufferingComplete();
task_environment_.AdvanceClock(2 * kSecond);
@@ -105,7 +106,7 @@ TEST_F(FuchsiaPlaybackEventsRecorderTest, Buffering) {
});
}
-TEST_F(FuchsiaPlaybackEventsRecorderTest, Bitrate) {
+TEST_F(PlaybackEventsRecorderTest, Bitrate) {
recorder_.OnPlaying();
recorder_.OnBufferingComplete();
@@ -127,7 +128,7 @@ TEST_F(FuchsiaPlaybackEventsRecorderTest, Bitrate) {
});
}
-TEST_F(FuchsiaPlaybackEventsRecorderTest, BitrateAfterPause) {
+TEST_F(PlaybackEventsRecorderTest, BitrateAfterPause) {
recorder_.OnPlaying();
recorder_.OnBufferingComplete();
@@ -163,7 +164,7 @@ TEST_F(FuchsiaPlaybackEventsRecorderTest, BitrateAfterPause) {
});
}
-TEST_F(FuchsiaPlaybackEventsRecorderTest, BitrateAfterBuffering) {
+TEST_F(PlaybackEventsRecorderTest, BitrateAfterBuffering) {
recorder_.OnPlaying();
recorder_.OnBufferingComplete();
@@ -199,4 +200,4 @@ TEST_F(FuchsiaPlaybackEventsRecorderTest, BitrateAfterBuffering) {
{time_base_ + 16 * kSecond, "WebEngine.Media.VideoBitrate:80"},
});
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/muxers/webm_muxer.cc b/chromium/media/muxers/webm_muxer.cc
index d0b3f4a6339..c257bc59fd1 100644
--- a/chromium/media/muxers/webm_muxer.cc
+++ b/chromium/media/muxers/webm_muxer.cc
@@ -8,6 +8,7 @@
#include <memory>
#include "base/bind.h"
+#include "base/logging.h"
#include "media/base/audio_parameters.h"
#include "media/base/limits.h"
#include "media/base/video_frame.h"
@@ -137,12 +138,9 @@ base::Optional<mkvmuxer::Colour> ColorFromColorSpace(
WebmMuxer::VideoParameters::VideoParameters(
scoped_refptr<media::VideoFrame> frame)
: visible_rect_size(frame->visible_rect().size()),
- frame_rate(0.0),
+ frame_rate(frame->metadata()->frame_rate.value_or(0.0)),
codec(kUnknownVideoCodec),
- color_space(frame->ColorSpace()) {
- ignore_result(frame->metadata()->GetDouble(VideoFrameMetadata::FRAME_RATE,
- &frame_rate));
-}
+ color_space(frame->ColorSpace()) {}
WebmMuxer::VideoParameters::VideoParameters(
gfx::Size visible_rect_size,
diff --git a/chromium/media/parsers/vp8_bool_decoder.cc b/chromium/media/parsers/vp8_bool_decoder.cc
index 17607732f69..fb715e5ea29 100644
--- a/chromium/media/parsers/vp8_bool_decoder.cc
+++ b/chromium/media/parsers/vp8_bool_decoder.cc
@@ -82,15 +82,14 @@ static const unsigned char kVp8Norm[256] = {
};
Vp8BoolDecoder::Vp8BoolDecoder()
- : user_buffer_(NULL),
- user_buffer_end_(NULL),
+ : user_buffer_(nullptr),
+ user_buffer_end_(nullptr),
value_(0),
count_(-8),
- range_(255) {
-}
+ range_(255) {}
bool Vp8BoolDecoder::Initialize(const uint8_t* data, size_t size) {
- if (data == NULL || size == 0)
+ if (data == nullptr || size == 0)
return false;
user_buffer_start_ = data;
user_buffer_ = data;
@@ -102,7 +101,7 @@ bool Vp8BoolDecoder::Initialize(const uint8_t* data, size_t size) {
}
void Vp8BoolDecoder::FillDecoder() {
- DCHECK(user_buffer_ != NULL);
+ DCHECK(user_buffer_ != nullptr);
int shift = VP8_BD_VALUE_BIT - CHAR_BIT - (count_ + CHAR_BIT);
size_t bytes_left = user_buffer_end_ - user_buffer_;
size_t bits_left = bytes_left * CHAR_BIT;
diff --git a/chromium/media/parsers/vp8_bool_decoder.h b/chromium/media/parsers/vp8_bool_decoder.h
index 0f407cfbcc9..db3daade283 100644
--- a/chromium/media/parsers/vp8_bool_decoder.h
+++ b/chromium/media/parsers/vp8_bool_decoder.h
@@ -47,7 +47,6 @@
#include <stdint.h>
#include <sys/types.h>
-#include "base/logging.h"
#include "base/macros.h"
#include "media/parsers/media_parsers_export.h"
diff --git a/chromium/media/parsers/vp8_parser.cc b/chromium/media/parsers/vp8_parser.cc
index b52c59fd070..8559540395c 100644
--- a/chromium/media/parsers/vp8_parser.cc
+++ b/chromium/media/parsers/vp8_parser.cc
@@ -9,6 +9,7 @@
#include <cstring>
+#include "base/check_op.h"
#include "base/logging.h"
namespace media {
diff --git a/chromium/media/remoting/BUILD.gn b/chromium/media/remoting/BUILD.gn
index f302a0571b0..b01ab5ffd6b 100644
--- a/chromium/media/remoting/BUILD.gn
+++ b/chromium/media/remoting/BUILD.gn
@@ -31,7 +31,7 @@ source_set("rpc") {
public_deps = [ ":media_remoting_proto" ]
}
-source_set("remoting") {
+source_set("remoting_sender") {
sources = [
"courier_renderer_factory.cc",
"courier_renderer_factory.h",
@@ -64,16 +64,48 @@ source_set("remoting") {
}
}
+source_set("remoting_constants") {
+ sources = [ "remoting_constants.h" ]
+}
+
+source_set("remoting_renderer") {
+ sources = [
+ "receiver.cc",
+ "receiver.h",
+ "receiver_controller.cc",
+ "receiver_controller.h",
+ "remoting_renderer_factory.cc",
+ "remoting_renderer_factory.h",
+ "stream_provider.cc",
+ "stream_provider.h",
+ ]
+
+ deps = [
+ ":remoting_constants",
+ ":rpc",
+ "//media/mojo/common:common",
+ "//media/mojo/mojom:remoting",
+ ]
+}
+
source_set("media_remoting_tests") {
testonly = true
sources = [
"fake_remoter.cc",
"fake_remoter.h",
+ "mock_receiver_controller.cc",
+ "mock_receiver_controller.h",
+ "receiver_unittest.cc",
"renderer_controller_unittest.cc",
+ "stream_provider_unittest.cc",
+ "test_utils.cc",
+ "test_utils.h",
]
deps = [
- ":remoting",
+ ":remoting_renderer",
+ ":remoting_sender",
+ ":rpc",
"//base",
"//base/test:test_support",
"//media:test_support",
@@ -94,16 +126,13 @@ source_set("media_remoting_tests") {
"fake_media_resource.h",
"integration_test.cc",
"proto_utils_unittest.cc",
- "receiver.cc",
- "receiver.h",
"rpc_broker_unittest.cc",
- "stream_provider.cc",
- "stream_provider.h",
]
deps += [
":rpc",
"//media/test:pipeline_integration_test_base",
+ "//services/service_manager/public/cpp:cpp",
"//ui/gfx:test_support",
"//ui/gfx/geometry",
]
diff --git a/chromium/media/remoting/courier_renderer.cc b/chromium/media/remoting/courier_renderer.cc
index d820952f0ca..dfd4b8a1e2a 100644
--- a/chromium/media/remoting/courier_renderer.cc
+++ b/chromium/media/remoting/courier_renderer.cc
@@ -74,7 +74,6 @@ CourierRenderer::CourierRenderer(
remote_renderer_handle_(RpcBroker::kInvalidHandle),
video_renderer_sink_(video_renderer_sink),
clock_(base::DefaultTickClock::GetInstance()) {
- VLOG(2) << __func__;
// Note: The constructor is running on the main thread, but will be destroyed
// on the media thread. Therefore, all weak pointers must be dereferenced on
// the media thread.
@@ -85,7 +84,6 @@ CourierRenderer::CourierRenderer(
}
CourierRenderer::~CourierRenderer() {
- VLOG(2) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
// Post task on main thread to unregister message receiver.
@@ -102,7 +100,6 @@ CourierRenderer::~CourierRenderer() {
void CourierRenderer::Initialize(MediaResource* media_resource,
RendererClient* client,
PipelineStatusCallback init_cb) {
- VLOG(2) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(media_resource);
DCHECK(client);
@@ -150,19 +147,10 @@ void CourierRenderer::Initialize(MediaResource* media_resource,
rpc_broker_)));
}
-void CourierRenderer::SetCdm(CdmContext* cdm_context,
- CdmAttachedCB cdm_attached_cb) {
- DCHECK(media_task_runner_->BelongsToCurrentThread());
-
- // Media remoting doesn't support encrypted content.
- NOTIMPLEMENTED();
-}
-
void CourierRenderer::SetLatencyHint(
base::Optional<base::TimeDelta> latency_hint) {}
void CourierRenderer::Flush(base::OnceClosure flush_cb) {
- VLOG(2) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(!flush_cb_);
@@ -188,14 +176,13 @@ void CourierRenderer::Flush(base::OnceClosure flush_cb) {
(video_demuxer_stream_adapter_ && !flush_video_count.has_value()) ||
(audio_demuxer_stream_adapter_ && video_demuxer_stream_adapter_ &&
flush_audio_count.has_value() != flush_video_count.has_value())) {
- VLOG(1) << "Ignoring flush request while under flushing operation";
return;
}
flush_cb_ = std::move(flush_cb);
// Issues RPC_R_FLUSHUNTIL RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_renderer_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_FLUSHUNTIL);
pb::RendererFlushUntil* message = rpc->mutable_renderer_flushuntil_rpc();
@@ -204,15 +191,10 @@ void CourierRenderer::Flush(base::OnceClosure flush_cb) {
if (flush_video_count.has_value())
message->set_video_count(*flush_video_count);
message->set_callback_handle(rpc_handle_);
- VLOG(2) << __func__ << ": Sending RPC_R_FLUSHUNTIL to " << rpc->handle()
- << " with audio_count=" << message->audio_count()
- << ", video_count=" << message->video_count()
- << ", callback_handle=" << message->callback_handle();
SendRpcToRemote(std::move(rpc));
}
void CourierRenderer::StartPlayingFrom(base::TimeDelta time) {
- VLOG(2) << __func__ << ": " << time.InMicroseconds();
DCHECK(media_task_runner_->BelongsToCurrentThread());
if (state_ != STATE_PLAYING) {
@@ -221,12 +203,10 @@ void CourierRenderer::StartPlayingFrom(base::TimeDelta time) {
}
// Issues RPC_R_STARTPLAYINGFROM RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_renderer_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_STARTPLAYINGFROM);
rpc->set_integer64_value(time.InMicroseconds());
- VLOG(2) << __func__ << ": Sending RPC_R_STARTPLAYINGFROM to " << rpc->handle()
- << " with time_usec=" << rpc->integer64_value();
SendRpcToRemote(std::move(rpc));
{
@@ -237,7 +217,6 @@ void CourierRenderer::StartPlayingFrom(base::TimeDelta time) {
}
void CourierRenderer::SetPlaybackRate(double playback_rate) {
- VLOG(2) << __func__ << ": " << playback_rate;
DCHECK(media_task_runner_->BelongsToCurrentThread());
if (state_ != STATE_FLUSHING && state_ != STATE_PLAYING) {
@@ -246,19 +225,16 @@ void CourierRenderer::SetPlaybackRate(double playback_rate) {
}
// Issues RPC_R_SETPLAYBACKRATE RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_renderer_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_SETPLAYBACKRATE);
rpc->set_double_value(playback_rate);
- VLOG(2) << __func__ << ": Sending RPC_R_SETPLAYBACKRATE to " << rpc->handle()
- << " with rate=" << rpc->double_value();
SendRpcToRemote(std::move(rpc));
playback_rate_ = playback_rate;
ResetMeasurements();
}
void CourierRenderer::SetVolume(float volume) {
- VLOG(2) << __func__ << ": " << volume;
DCHECK(media_task_runner_->BelongsToCurrentThread());
if (state_ != STATE_FLUSHING && state_ != STATE_PLAYING) {
@@ -267,12 +243,10 @@ void CourierRenderer::SetVolume(float volume) {
}
// Issues RPC_R_SETVOLUME RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_renderer_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_SETVOLUME);
rpc->set_double_value(volume);
- VLOG(2) << __func__ << ": Sending RPC_R_SETVOLUME to " << rpc->handle()
- << " with volume=" << rpc->double_value();
SendRpcToRemote(std::move(rpc));
}
@@ -314,7 +288,6 @@ void CourierRenderer::OnDataPipeCreated(
mojo::ScopedDataPipeProducerHandle video_handle,
int audio_rpc_handle,
int video_rpc_handle) {
- VLOG(2) << __func__;
DCHECK(media_task_runner_->BelongsToCurrentThread());
if (state_ == STATE_ERROR)
@@ -332,7 +305,6 @@ void CourierRenderer::OnDataPipeCreated(
// Create audio demuxer stream adapter if audio is available.
if (audio_demuxer_stream && audio.is_valid() && audio_handle.is_valid() &&
audio_rpc_handle != RpcBroker::kInvalidHandle) {
- VLOG(2) << "Initialize audio";
audio_demuxer_stream_adapter_.reset(new DemuxerStreamAdapter(
main_task_runner_, media_task_runner_, "audio", audio_demuxer_stream,
rpc_broker_, audio_rpc_handle, std::move(audio),
@@ -344,7 +316,6 @@ void CourierRenderer::OnDataPipeCreated(
// Create video demuxer stream adapter if video is available.
if (video_demuxer_stream && video.is_valid() && video_handle.is_valid() &&
video_rpc_handle != RpcBroker::kInvalidHandle) {
- VLOG(2) << "Initialize video";
video_demuxer_stream_adapter_.reset(new DemuxerStreamAdapter(
main_task_runner_, media_task_runner_, "video", video_demuxer_stream,
rpc_broker_, video_rpc_handle, std::move(video),
@@ -360,13 +331,27 @@ void CourierRenderer::OnDataPipeCreated(
}
state_ = STATE_ACQUIRING;
+
+ // Issues RPC_ACQUIRE_DEMUXER RPC message.
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(RpcBroker::kAcquireDemuxerHandle);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_DEMUXER);
+ pb::AcquireDemuxer* message = rpc->mutable_acquire_demuxer_rpc();
+ message->set_audio_demuxer_handle(
+ audio_demuxer_stream_adapter_
+ ? audio_demuxer_stream_adapter_->rpc_handle()
+ : RpcBroker::kInvalidHandle);
+ message->set_video_demuxer_handle(
+ video_demuxer_stream_adapter_
+ ? video_demuxer_stream_adapter_->rpc_handle()
+ : RpcBroker::kInvalidHandle);
+ SendRpcToRemote(std::move(rpc));
+
// Issues RPC_ACQUIRE_RENDERER RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
- rpc->set_handle(RpcBroker::kAcquireHandle);
+ rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(RpcBroker::kAcquireRendererHandle);
rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER);
rpc->set_integer_value(rpc_handle_);
- VLOG(2) << __func__ << ": Sending RPC_ACQUIRE_RENDERER to " << rpc->handle()
- << " with rpc_handle=" << rpc->integer_value();
SendRpcToRemote(std::move(rpc));
}
@@ -403,11 +388,9 @@ void CourierRenderer::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
OnBufferingStateChange(std::move(message));
break;
case pb::RpcMessage::RPC_RC_ONENDED:
- VLOG(2) << __func__ << ": Received RPC_RC_ONENDED.";
client_->OnEnded();
break;
case pb::RpcMessage::RPC_RC_ONERROR:
- VLOG(2) << __func__ << ": Received RPC_RC_ONERROR.";
OnFatalError(RECEIVER_PIPELINE_ERROR);
break;
case pb::RpcMessage::RPC_RC_ONAUDIOCONFIGCHANGE:
@@ -426,12 +409,11 @@ void CourierRenderer::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
OnStatisticsUpdate(std::move(message));
break;
case pb::RpcMessage::RPC_RC_ONWAITINGFORDECRYPTIONKEY:
- VLOG(2) << __func__ << ": Received RPC_RC_ONWAITINGFORDECRYPTIONKEY.";
client_->OnWaiting(WaitingReason::kNoDecryptionKey);
break;
default:
- VLOG(1) << "Unknown RPC: " << message->proc();
+ DVLOG(1) << "Unknown RPC: " << message->proc();
}
}
@@ -449,19 +431,15 @@ void CourierRenderer::AcquireRendererDone(
DCHECK(message);
remote_renderer_handle_ = message->integer_value();
- VLOG(2) << __func__
- << ": Received RPC_ACQUIRE_RENDERER_DONE with remote_renderer_handle="
- << remote_renderer_handle_;
if (state_ != STATE_ACQUIRING || init_workflow_done_callback_.is_null()) {
- LOG(WARNING) << "Unexpected acquire renderer done RPC.";
OnFatalError(PEERS_OUT_OF_SYNC);
return;
}
state_ = STATE_INITIALIZING;
// Issues RPC_R_INITIALIZE RPC message to initialize renderer.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_renderer_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_INITIALIZE);
pb::RendererInitialize* init = rpc->mutable_renderer_initialize_rpc();
@@ -475,11 +453,6 @@ void CourierRenderer::AcquireRendererDone(
? video_demuxer_stream_adapter_->rpc_handle()
: RpcBroker::kInvalidHandle);
init->set_callback_handle(rpc_handle_);
- VLOG(2) << __func__ << ": Sending RPC_R_INITIALIZE to " << rpc->handle()
- << " with client_handle=" << init->client_handle()
- << ", audio_demuxer_handle=" << init->audio_demuxer_handle()
- << ", video_demuxer_handle=" << init->video_demuxer_handle()
- << ", callback_handle=" << init->callback_handle();
SendRpcToRemote(std::move(rpc));
}
@@ -489,11 +462,7 @@ void CourierRenderer::InitializeCallback(
DCHECK(message);
const bool success = message->boolean_value();
- VLOG(2) << __func__
- << ": Received RPC_R_INITIALIZE_CALLBACK with success=" << success;
-
if (state_ != STATE_INITIALIZING || init_workflow_done_callback_.is_null()) {
- LOG(WARNING) << "Unexpected initialize callback RPC.";
OnFatalError(PEERS_OUT_OF_SYNC);
return;
}
@@ -511,10 +480,8 @@ void CourierRenderer::InitializeCallback(
void CourierRenderer::FlushUntilCallback() {
DCHECK(media_task_runner_->BelongsToCurrentThread());
- VLOG(2) << __func__ << ": Received RPC_R_FLUSHUNTIL_CALLBACK";
if (state_ != STATE_FLUSHING || !flush_cb_) {
- LOG(WARNING) << "Unexpected flushuntil callback RPC.";
OnFatalError(PEERS_OUT_OF_SYNC);
return;
}
@@ -531,9 +498,6 @@ void CourierRenderer::FlushUntilCallback() {
void CourierRenderer::SetCdmCallback(std::unique_ptr<pb::RpcMessage> message) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(message);
- VLOG(2) << __func__ << ": Received RPC_R_SETCDM_CALLBACK with cdm_id="
- << message->renderer_set_cdm_rpc().cdm_id() << ", callback_handle="
- << message->renderer_set_cdm_rpc().callback_handle();
// TODO(erickung): add implementation once Remote CDM implementation is done.
NOTIMPLEMENTED();
}
@@ -543,7 +507,6 @@ void CourierRenderer::OnTimeUpdate(std::unique_ptr<pb::RpcMessage> message) {
DCHECK(message);
// Shutdown remoting session if receiving malformed RPC message.
if (!message->has_rendererclient_ontimeupdate_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
@@ -551,9 +514,6 @@ void CourierRenderer::OnTimeUpdate(std::unique_ptr<pb::RpcMessage> message) {
message->rendererclient_ontimeupdate_rpc().time_usec();
const int64_t max_time_usec =
message->rendererclient_ontimeupdate_rpc().max_time_usec();
- VLOG(2) << __func__
- << ": Received RPC_RC_ONTIMEUPDATE with time_usec=" << time_usec
- << ", max_time_usec=" << max_time_usec;
// Ignores invalid time, such as negative value, or time larger than max value
// (usually the time stamp that all streams are pushed into AV pipeline).
if (time_usec < 0 || max_time_usec < 0 || time_usec > max_time_usec)
@@ -575,12 +535,9 @@ void CourierRenderer::OnBufferingStateChange(
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(message);
if (!message->has_rendererclient_onbufferingstatechange_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
- VLOG(2) << __func__ << ": Received RPC_RC_ONBUFFERINGSTATECHANGE with state="
- << message->rendererclient_onbufferingstatechange_rpc().state();
base::Optional<BufferingState> state = ToMediaBufferingState(
message->rendererclient_onbufferingstatechange_rpc().state());
BufferingStateChangeReason reason = BUFFERING_CHANGE_REASON_UNKNOWN;
@@ -605,7 +562,6 @@ void CourierRenderer::OnAudioConfigChange(
DCHECK(message);
// Shutdown remoting session if receiving malformed RPC message.
if (!message->has_rendererclient_onaudioconfigchange_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
@@ -618,8 +574,6 @@ void CourierRenderer::OnAudioConfigChange(
ConvertProtoToAudioDecoderConfig(pb_audio_config, &out_audio_config);
DCHECK(out_audio_config.IsValidConfig());
- VLOG(2) << __func__ << ": Received RPC_RC_ONAUDIOCONFIGCHANGE with config:"
- << out_audio_config.AsHumanReadableString();
client_->OnAudioConfigChange(out_audio_config);
}
@@ -629,7 +583,6 @@ void CourierRenderer::OnVideoConfigChange(
DCHECK(message);
// Shutdown remoting session if receiving malformed RPC message.
if (!message->has_rendererclient_onvideoconfigchange_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
@@ -642,8 +595,6 @@ void CourierRenderer::OnVideoConfigChange(
ConvertProtoToVideoDecoderConfig(pb_video_config, &out_video_config);
DCHECK(out_video_config.IsValidConfig());
- VLOG(2) << __func__ << ": Received RPC_RC_ONVIDEOCONFIGCHANGE with config:"
- << out_video_config.AsHumanReadableString();
client_->OnVideoConfigChange(out_video_config);
}
@@ -653,14 +604,11 @@ void CourierRenderer::OnVideoNaturalSizeChange(
DCHECK(message);
// Shutdown remoting session if receiving malformed RPC message.
if (!message->has_rendererclient_onvideonatualsizechange_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
const auto& size_change =
message->rendererclient_onvideonatualsizechange_rpc();
- VLOG(2) << __func__ << ": Received RPC_RC_ONVIDEONATURALSIZECHANGE with size="
- << size_change.width() << 'x' << size_change.height();
if (size_change.width() <= 0 || size_change.height() <= 0)
return;
client_->OnVideoNaturalSizeChange(
@@ -672,8 +620,6 @@ void CourierRenderer::OnVideoOpacityChange(
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(message);
const bool opaque = message->boolean_value();
- VLOG(2) << __func__
- << ": Received RPC_RC_ONVIDEOOPACITYCHANGE with opaque=" << opaque;
client_->OnVideoOpacityChange(opaque);
}
@@ -683,7 +629,6 @@ void CourierRenderer::OnStatisticsUpdate(
DCHECK(message);
// Shutdown remoting session if receiving malformed RPC message.
if (!message->has_rendererclient_onstatisticsupdate_rpc()) {
- VLOG(1) << __func__ << " missing required RPC message";
OnFatalError(RPC_INVALID);
return;
}
@@ -691,15 +636,6 @@ void CourierRenderer::OnStatisticsUpdate(
ConvertProtoToPipelineStatistics(
message->rendererclient_onstatisticsupdate_rpc(), &stats);
// Note: Each field in |stats| is a delta, not the aggregate amount.
- VLOG(2) << __func__
- << ": Received RPC_RC_ONSTATISTICSUPDATE with audio_bytes_decoded="
- << stats.audio_bytes_decoded
- << ", video_bytes_decoded=" << stats.video_bytes_decoded
- << ", video_frames_decoded=" << stats.video_frames_decoded
- << ", video_frames_dropped=" << stats.video_frames_dropped
- << ", audio_memory_usage=" << stats.audio_memory_usage
- << ", video_memory_usage=" << stats.video_memory_usage;
-
if (stats.audio_bytes_decoded > 0 || stats.video_frames_decoded > 0 ||
stats.video_frames_dropped > 0) {
metrics_recorder_.OnEvidenceOfPlayoutAtReceiver();
@@ -712,8 +648,6 @@ void CourierRenderer::OnFatalError(StopTrigger stop_trigger) {
DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK_NE(UNKNOWN_STOP_TRIGGER, stop_trigger);
- VLOG(2) << __func__ << " with StopTrigger " << stop_trigger;
-
// If this is the first error, notify the controller. It is expected the
// controller will cause this renderer to shut down shortly.
if (state_ != STATE_ERROR) {
@@ -761,9 +695,6 @@ void CourierRenderer::OnMediaTimeUpdated() {
playback_rate_;
if ((media_duration - update_duration).magnitude() >=
kMediaPlaybackDelayThreshold) {
- VLOG(1) << "Irregular playback detected: Media playback delayed."
- << " media_duration = " << media_duration
- << " update_duration = " << update_duration;
++times_playback_delayed_;
if (times_playback_delayed_ == kPlaybackDelayCountThreshold)
OnFatalError(PACING_TOO_SLOWLY);
@@ -807,9 +738,6 @@ void CourierRenderer::UpdateVideoStatsQueue(int video_frames_decoded,
if (sum_video_frames_decoded_ &&
sum_video_frames_dropped_ * 100 >
sum_video_frames_decoded_ * kMaxNumVideoFramesDroppedPercentage) {
- VLOG(1) << "Irregular playback detected: Too many video frames dropped."
- << " video_frames_decoded= " << sum_video_frames_decoded_
- << " video_frames_dropped= " << sum_video_frames_dropped_;
OnFatalError(FRAME_DROP_RATE_HIGH);
}
// Prune |video_stats_queue_|.
diff --git a/chromium/media/remoting/courier_renderer.h b/chromium/media/remoting/courier_renderer.h
index 75111e4174c..38e5e338a44 100644
--- a/chromium/media/remoting/courier_renderer.h
+++ b/chromium/media/remoting/courier_renderer.h
@@ -5,8 +5,6 @@
#ifndef MEDIA_REMOTING_COURIER_RENDERER_H_
#define MEDIA_REMOTING_COURIER_RENDERER_H_
-#include <stdint.h>
-
#include <memory>
#include "base/callback.h"
@@ -20,6 +18,7 @@
#include "media/base/pipeline_status.h"
#include "media/base/renderer.h"
#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/remoting/media_remoting_rpc.pb.h"
#include "media/remoting/metrics.h"
#include "media/remoting/rpc_broker.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
@@ -74,7 +73,6 @@ class CourierRenderer : public Renderer {
void Initialize(MediaResource* media_resource,
RendererClient* client,
PipelineStatusCallback init_cb) final;
- void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
void Flush(base::OnceClosure flush_cb) final;
void StartPlayingFrom(base::TimeDelta time) final;
diff --git a/chromium/media/remoting/courier_renderer_unittest.cc b/chromium/media/remoting/courier_renderer_unittest.cc
index a51f3dc86ef..4d13c140856 100644
--- a/chromium/media/remoting/courier_renderer_unittest.cc
+++ b/chromium/media/remoting/courier_renderer_unittest.cc
@@ -7,6 +7,7 @@
#include <memory>
#include "base/bind.h"
+#include "base/check.h"
#include "base/run_loop.h"
#include "base/test/simple_test_tick_clock.h"
#include "base/test/task_environment.h"
@@ -20,6 +21,7 @@
#include "media/remoting/proto_enum_utils.h"
#include "media/remoting/proto_utils.h"
#include "media/remoting/renderer_controller.h"
+#include "media/remoting/rpc_broker.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -146,16 +148,7 @@ class RendererClientImpl final : public RendererClient {
class CourierRendererTest : public testing::Test {
public:
- CourierRendererTest()
- : receiver_renderer_handle_(10),
- receiver_audio_demuxer_callback_handle_(11),
- receiver_video_demuxer_callback_handle_(12),
- sender_client_handle_(RpcBroker::kInvalidHandle),
- sender_renderer_callback_handle_(RpcBroker::kInvalidHandle),
- sender_audio_demuxer_handle_(RpcBroker::kInvalidHandle),
- sender_video_demuxer_handle_(RpcBroker::kInvalidHandle),
- received_audio_ds_init_cb_(false),
- received_video_ds_init_cb_(false) {}
+ CourierRendererTest() = default;
~CourierRendererTest() override = default;
// Use this function to mimic receiver to handle RPC message for renderer
@@ -165,40 +158,87 @@ class CourierRendererTest : public testing::Test {
ASSERT_TRUE(rpc->ParseFromArray(message->data(), message->size()));
switch (rpc->proc()) {
case pb::RpcMessage::RPC_ACQUIRE_RENDERER: {
+ DCHECK(rpc->has_integer_value());
+ sender_renderer_handle_ = rpc->integer_value();
// Issues RPC_ACQUIRE_RENDERER_DONE RPC message.
- std::unique_ptr<pb::RpcMessage> acquire_done(new pb::RpcMessage());
- acquire_done->set_handle(rpc->integer_value());
+ auto acquire_done = std::make_unique<pb::RpcMessage>();
+ acquire_done->set_handle(sender_renderer_handle_);
acquire_done->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
acquire_done->set_integer_value(receiver_renderer_handle_);
controller_->GetRpcBroker()->ProcessMessageFromRemote(
std::move(acquire_done));
} break;
+ case pb::RpcMessage::RPC_ACQUIRE_DEMUXER: {
+ if (!is_backward_compatible_mode_) {
+ int acquire_demuxer_handle = RpcBroker::kAcquireDemuxerHandle;
+ EXPECT_EQ(rpc->handle(), acquire_demuxer_handle);
+ sender_audio_demuxer_handle_ =
+ rpc->acquire_demuxer_rpc().audio_demuxer_handle();
+ sender_video_demuxer_handle_ =
+ rpc->acquire_demuxer_rpc().video_demuxer_handle();
+
+ // Issues audio RPC_DS_INITIALIZE RPC message.
+ if (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ auto ds_init = std::make_unique<pb::RpcMessage>();
+ ds_init->set_handle(sender_audio_demuxer_handle_);
+ ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
+ ds_init->set_integer_value(receiver_audio_demuxer_callback_handle_);
+ controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ std::move(ds_init));
+ }
+
+ // Issues video RPC_DS_INITIALIZE RPC message.
+ if (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ auto ds_init = std::make_unique<pb::RpcMessage>();
+ ds_init->set_handle(sender_video_demuxer_handle_);
+ ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
+ ds_init->set_integer_value(receiver_video_demuxer_callback_handle_);
+ controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ std::move(ds_init));
+ }
+ }
+ } break;
case pb::RpcMessage::RPC_R_INITIALIZE: {
- EXPECT_EQ(rpc->handle(), receiver_renderer_handle_);
sender_renderer_callback_handle_ =
rpc->renderer_initialize_rpc().callback_handle();
sender_client_handle_ = rpc->renderer_initialize_rpc().client_handle();
- sender_audio_demuxer_handle_ =
- rpc->renderer_initialize_rpc().audio_demuxer_handle();
- sender_video_demuxer_handle_ =
- rpc->renderer_initialize_rpc().video_demuxer_handle();
-
- // Issues audio RPC_DS_INITIALIZE RPC message.
- if (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) {
- std::unique_ptr<pb::RpcMessage> ds_init(new pb::RpcMessage());
- ds_init->set_handle(sender_audio_demuxer_handle_);
- ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
- ds_init->set_integer_value(receiver_audio_demuxer_callback_handle_);
- controller_->GetRpcBroker()->ProcessMessageFromRemote(
- std::move(ds_init));
- }
- if (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle) {
- std::unique_ptr<pb::RpcMessage> ds_init(new pb::RpcMessage());
- ds_init->set_handle(sender_video_demuxer_handle_);
- ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
- ds_init->set_integer_value(receiver_video_demuxer_callback_handle_);
+
+ if (is_backward_compatible_mode_) {
+ EXPECT_EQ(rpc->handle(), receiver_renderer_handle_);
+
+ sender_audio_demuxer_handle_ =
+ rpc->renderer_initialize_rpc().audio_demuxer_handle();
+ sender_video_demuxer_handle_ =
+ rpc->renderer_initialize_rpc().video_demuxer_handle();
+
+ // Issues audio RPC_DS_INITIALIZE RPC message.
+ if (sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ auto ds_init = std::make_unique<pb::RpcMessage>();
+ ds_init->set_handle(sender_audio_demuxer_handle_);
+ ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
+ ds_init->set_integer_value(receiver_audio_demuxer_callback_handle_);
+ controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ std::move(ds_init));
+ }
+
+ // Issues video RPC_DS_INITIALIZE RPC message.
+ if (sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle) {
+ auto ds_init = std::make_unique<pb::RpcMessage>();
+ ds_init->set_handle(sender_video_demuxer_handle_);
+ ds_init->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
+ ds_init->set_integer_value(receiver_video_demuxer_callback_handle_);
+ controller_->GetRpcBroker()->ProcessMessageFromRemote(
+ std::move(ds_init));
+ }
+ } else {
+ // Issues RPC_R_INITIALIZE_CALLBACK RPC message when receiving
+ // RPC_R_INITIALIZE.
+ auto init_cb = std::make_unique<pb::RpcMessage>();
+ init_cb->set_handle(sender_renderer_callback_handle_);
+ init_cb->set_proc(pb::RpcMessage::RPC_R_INITIALIZE_CALLBACK);
+ init_cb->set_boolean_value(is_successfully_initialized_);
controller_->GetRpcBroker()->ProcessMessageFromRemote(
- std::move(ds_init));
+ std::move(init_cb));
}
} break;
case pb::RpcMessage::RPC_DS_INITIALIZE_CALLBACK: {
@@ -207,20 +247,24 @@ class CourierRendererTest : public testing::Test {
if (rpc->handle() == receiver_video_demuxer_callback_handle_)
received_video_ds_init_cb_ = true;
- // Issues RPC_R_INITIALIZE_CALLBACK RPC message when receiving
- // RPC_DS_INITIALIZE_CALLBACK on available streams.
+ // Check whether the demuxer at the receiver end is initialized.
if (received_audio_ds_init_cb_ ==
(sender_audio_demuxer_handle_ != RpcBroker::kInvalidHandle) &&
received_video_ds_init_cb_ ==
(sender_video_demuxer_handle_ != RpcBroker::kInvalidHandle)) {
- std::unique_ptr<pb::RpcMessage> init_cb(new pb::RpcMessage());
+ is_receiver_demuxer_initialized_ = true;
+ }
+
+ if (is_backward_compatible_mode_ && is_receiver_demuxer_initialized_) {
+ // Issues RPC_R_INITIALIZE_CALLBACK RPC message when receiving
+ // RPC_DS_INITIALIZE_CALLBACK on available streams.
+ auto init_cb = std::make_unique<pb::RpcMessage>();
init_cb->set_handle(sender_renderer_callback_handle_);
init_cb->set_proc(pb::RpcMessage::RPC_R_INITIALIZE_CALLBACK);
init_cb->set_boolean_value(is_successfully_initialized_);
controller_->GetRpcBroker()->ProcessMessageFromRemote(
std::move(init_cb));
}
-
} break;
case pb::RpcMessage::RPC_R_FLUSHUNTIL: {
// Issues RPC_R_FLUSHUNTIL_CALLBACK RPC message.
@@ -269,8 +313,16 @@ class CourierRendererTest : public testing::Test {
RunPendingTasks();
}
+ void InitializeRendererBackwardsCompatible() {
+ is_backward_compatible_mode_ = true;
+ InitializeRenderer();
+ }
+
bool IsRendererInitialized() const {
- return renderer_->state_ == CourierRenderer::STATE_PLAYING;
+ EXPECT_TRUE(received_audio_ds_init_cb_);
+ EXPECT_TRUE(received_video_ds_init_cb_);
+ return renderer_->state_ == CourierRenderer::STATE_PLAYING &&
+ is_receiver_demuxer_initialized_;
}
bool DidEncounterFatalError() const {
@@ -402,17 +454,24 @@ class CourierRendererTest : public testing::Test {
base::SimpleTestTickClock clock_;
// RPC handles.
- const int receiver_renderer_handle_;
- const int receiver_audio_demuxer_callback_handle_;
- const int receiver_video_demuxer_callback_handle_;
- int sender_client_handle_;
- int sender_renderer_callback_handle_;
- int sender_audio_demuxer_handle_;
- int sender_video_demuxer_handle_;
+ const int receiver_renderer_handle_{10};
+ const int receiver_audio_demuxer_callback_handle_{11};
+ const int receiver_video_demuxer_callback_handle_{12};
+ int sender_renderer_handle_;
+ int sender_client_handle_{RpcBroker::kInvalidHandle};
+ int sender_renderer_callback_handle_{RpcBroker::kInvalidHandle};
+ int sender_audio_demuxer_handle_{RpcBroker::kInvalidHandle};
+ int sender_video_demuxer_handle_{RpcBroker::kInvalidHandle};
+
+ // Indicates whether the test runs in backward-compatible mode.
+ bool is_backward_compatible_mode_ = false;
+
+ // Indicates whether the demuxer at receiver is initialized or not.
+ bool is_receiver_demuxer_initialized_ = false;
// Indicate whether RPC_DS_INITIALIZE_CALLBACK RPC messages are received.
- bool received_audio_ds_init_cb_;
- bool received_video_ds_init_cb_;
+ bool received_audio_ds_init_cb_ = false;
+ bool received_video_ds_init_cb_ = false;
// Indicates whether the test wants to simulate successful initialization in
// the renderer on the receiver side.
@@ -433,6 +492,14 @@ TEST_F(CourierRendererTest, Initialize) {
ASSERT_EQ(render_client_->status(), PIPELINE_OK);
}
+TEST_F(CourierRendererTest, InitializeBackwardCompatible) {
+ InitializeRendererBackwardsCompatible();
+ RunPendingTasks();
+
+ ASSERT_TRUE(IsRendererInitialized());
+ ASSERT_EQ(render_client_->status(), PIPELINE_OK);
+}
+
TEST_F(CourierRendererTest, InitializeFailed) {
is_successfully_initialized_ = false;
InitializeRenderer();
diff --git a/chromium/media/remoting/demuxer_stream_adapter.cc b/chromium/media/remoting/demuxer_stream_adapter.cc
index 2efbf3873e1..63b7b9d201d 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter.cc
@@ -128,7 +128,9 @@ void DemuxerStreamAdapter::OnReceivedRpc(
case pb::RpcMessage::RPC_DS_ENABLEBITSTREAMCONVERTER:
EnableBitstreamConverter();
break;
-
+ case pb::RpcMessage::RPC_DS_ONERROR:
+ OnFatalError(UNEXPECTED_FAILURE);
+ break;
default:
DEMUXER_VLOG(1) << "Unknown RPC: " << message->proc();
}
diff --git a/chromium/media/remoting/end2end_test_renderer.cc b/chromium/media/remoting/end2end_test_renderer.cc
index f08c848d0c4..932e0ab2fdc 100644
--- a/chromium/media/remoting/end2end_test_renderer.cc
+++ b/chromium/media/remoting/end2end_test_renderer.cc
@@ -4,23 +4,29 @@
#include "media/remoting/end2end_test_renderer.h"
-#include <memory>
-
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/check.h"
+#include "base/notreached.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/demuxer_stream.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
+#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/mojom/remoting.mojom.h"
#include "media/remoting/courier_renderer.h"
#include "media/remoting/proto_utils.h"
#include "media/remoting/receiver.h"
+#include "media/remoting/receiver_controller.h"
#include "media/remoting/renderer_controller.h"
+#include "media/remoting/stream_provider.h"
+#include "media/remoting/test_utils.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
+#include "mojo/public/cpp/system/data_pipe.h"
namespace media {
namespace remoting {
@@ -30,7 +36,8 @@ namespace {
class TestStreamSender final : public mojom::RemotingDataStreamSender {
public:
using SendFrameToSinkCallback =
- base::RepeatingCallback<void(const std::vector<uint8_t>& data,
+ base::RepeatingCallback<void(uint32_t frame_count,
+ const std::vector<uint8_t>& data,
DemuxerStream::Type type)>;
TestStreamSender(
mojo::PendingReceiver<mojom::RemotingDataStreamSender> receiver,
@@ -49,19 +56,21 @@ class TestStreamSender final : public mojom::RemotingDataStreamSender {
next_frame_data_.resize(frame_size);
data_pipe_reader_.Read(
next_frame_data_.data(), frame_size,
- base::BindOnce(&TestStreamSender::OnFrameRead, base::Unretained(this)));
+ base::BindOnce(&TestStreamSender::OnFrameRead, base::Unretained(this),
+ frame_count_++));
}
void CancelInFlightData() override { next_frame_data_.resize(0); }
private:
- void OnFrameRead(bool success) {
+ void OnFrameRead(uint32_t count, bool success) {
DCHECK(success);
if (send_frame_to_sink_cb_)
- send_frame_to_sink_cb_.Run(next_frame_data_, type_);
+ send_frame_to_sink_cb_.Run(count, next_frame_data_, type_);
next_frame_data_.resize(0);
}
+ uint32_t frame_count_ = 0;
mojo::Receiver<RemotingDataStreamSender> receiver_;
MojoDataPipeReader data_pipe_reader_;
const DemuxerStream::Type type_;
@@ -153,33 +162,201 @@ std::unique_ptr<RendererController> CreateController(
} // namespace
+class End2EndTestRenderer::TestRemotee : public mojom::Remotee {
+ public:
+ explicit TestRemotee(RendererController* controller)
+ : controller_(controller) {}
+
+ ~TestRemotee() override = default;
+
+ void OnAudioFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> decoder_buffer) {
+ ::media::mojom::DecoderBufferPtr mojo_buffer =
+ audio_buffer_writer_->WriteDecoderBuffer(std::move(decoder_buffer));
+ audio_stream_->ReceiveFrame(frame_count, std::move(mojo_buffer));
+ }
+
+ void OnVideoFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> decoder_buffer) {
+ ::media::mojom::DecoderBufferPtr mojo_buffer =
+ video_buffer_writer_->WriteDecoderBuffer(std::move(decoder_buffer));
+ video_stream_->ReceiveFrame(frame_count, std::move(mojo_buffer));
+ }
+
+ void BindMojoReceiver(mojo::PendingReceiver<mojom::Remotee> receiver) {
+ mojo_receiver_.Bind(std::move(receiver));
+ }
+
+ void OnMessage(const std::vector<uint8_t>& message) {
+ receiver_controller_->OnMessageFromSource(message);
+ }
+
+ // mojom::Remotee implementation
+ void OnRemotingSinkReady(
+ mojo::PendingRemote<::media::mojom::RemotingSink> sink) override {
+ receiver_controller_.Bind(std::move(sink));
+ }
+
+ void SendMessageToSource(const std::vector<uint8_t>& message) override {
+ controller_->OnMessageFromSink(message);
+ }
+
+ void StartDataStreams(
+ mojo::PendingRemote<::media::mojom::RemotingDataStreamReceiver>
+ audio_stream,
+ mojo::PendingRemote<::media::mojom::RemotingDataStreamReceiver>
+ video_stream) override {
+ if (audio_stream.is_valid()) {
+ // initialize data pipe for audio data stream receiver
+ mojo::ScopedDataPipeConsumerHandle audio_data_pipe;
+ audio_stream_.Bind(std::move(audio_stream));
+ audio_buffer_writer_ = ::media::MojoDecoderBufferWriter::Create(
+ GetDefaultDecoderBufferConverterCapacity(
+ ::media::DemuxerStream::AUDIO),
+ &audio_data_pipe);
+ audio_stream_->InitializeDataPipe(std::move(audio_data_pipe));
+ }
+
+ if (video_stream.is_valid()) {
+ // initialize data pipe for video data stream receiver
+ mojo::ScopedDataPipeConsumerHandle video_data_pipe;
+ video_stream_.Bind(std::move(video_stream));
+ video_buffer_writer_ = ::media::MojoDecoderBufferWriter::Create(
+ GetDefaultDecoderBufferConverterCapacity(
+ ::media::DemuxerStream::VIDEO),
+ &video_data_pipe);
+ video_stream_->InitializeDataPipe(std::move(video_data_pipe));
+ }
+ }
+
+ void OnFlushUntil(uint32_t audio_frame_count,
+ uint32_t video_frame_count) override {}
+
+ void OnVideoNaturalSizeChange(const gfx::Size& size) override {}
+
+ private:
+ RendererController* controller_;
+
+ std::unique_ptr<MojoDecoderBufferWriter> audio_buffer_writer_;
+ std::unique_ptr<MojoDecoderBufferWriter> video_buffer_writer_;
+
+ mojo::Remote<mojom::RemotingDataStreamReceiver> audio_stream_;
+ mojo::Remote<mojom::RemotingDataStreamReceiver> video_stream_;
+
+ mojo::Remote<mojom::RemotingSink> receiver_controller_;
+ mojo::Receiver<mojom::Remotee> mojo_receiver_{this};
+};
+
End2EndTestRenderer::End2EndTestRenderer(std::unique_ptr<Renderer> renderer)
- : receiver_rpc_broker_(
- base::BindRepeating(&End2EndTestRenderer::OnMessageFromSink,
- base::Unretained(this))),
- receiver_(new Receiver(std::move(renderer), &receiver_rpc_broker_)) {
+ : courier_renderer_initialized_(false), receiver_initialized_(false) {
+ // create sender components
controller_ = CreateController(
base::BindRepeating(&End2EndTestRenderer::SendMessageToSink,
weak_factory_.GetWeakPtr()),
base::BindRepeating(&End2EndTestRenderer::SendFrameToSink,
weak_factory_.GetWeakPtr()));
- courier_renderer_.reset(new CourierRenderer(
- base::ThreadTaskRunnerHandle::Get(), controller_->GetWeakPtr(), nullptr));
+ courier_renderer_ = std::make_unique<CourierRenderer>(
+ base::ThreadTaskRunnerHandle::Get(), controller_->GetWeakPtr(), nullptr);
+
+ // create receiver components
+ media_remotee_ = std::make_unique<TestRemotee>(controller_.get());
+
+ receiver_controller_ = ReceiverController::GetInstance();
+ ResetForTesting(receiver_controller_);
+
+ receiver_rpc_broker_ = receiver_controller_->rpc_broker();
+ receiver_renderer_handle_ = receiver_rpc_broker_->GetUniqueHandle();
+
+ receiver_rpc_broker_->RegisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle,
+ base::BindRepeating(&End2EndTestRenderer::OnReceivedRpc,
+ weak_factory_.GetWeakPtr()));
+
+ receiver_ = std::make_unique<Receiver>(
+ receiver_renderer_handle_, sender_renderer_handle_, receiver_controller_,
+ base::ThreadTaskRunnerHandle::Get(), std::move(renderer),
+ base::BindOnce(&End2EndTestRenderer::OnAcquireRendererDone,
+ weak_factory_.GetWeakPtr()));
+
+ mojo::PendingRemote<media::mojom::Remotee> remotee;
+ media_remotee_->BindMojoReceiver(remotee.InitWithNewPipeAndPassReceiver());
+ receiver_controller_->Initialize(std::move(remotee));
+ stream_provider_ = std::make_unique<StreamProvider>(
+ receiver_controller_, base::ThreadTaskRunnerHandle::Get());
}
-End2EndTestRenderer::~End2EndTestRenderer() = default;
+End2EndTestRenderer::~End2EndTestRenderer() {
+ receiver_rpc_broker_->UnregisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle);
+}
void End2EndTestRenderer::Initialize(MediaResource* media_resource,
RendererClient* client,
PipelineStatusCallback init_cb) {
- courier_renderer_->Initialize(media_resource, client, std::move(init_cb));
+ init_cb_ = std::move(init_cb);
+
+ stream_provider_->Initialize(
+ nullptr, base::BindOnce(&End2EndTestRenderer::InitializeReceiverRenderer,
+ weak_factory_.GetWeakPtr()));
+
+ courier_renderer_->Initialize(
+ media_resource, client,
+ base::BindOnce(&End2EndTestRenderer::OnCourierRendererInitialized,
+ weak_factory_.GetWeakPtr()));
+}
+
+void End2EndTestRenderer::InitializeReceiverRenderer(PipelineStatus status) {
+ DCHECK_EQ(PIPELINE_OK, status);
+ receiver_->Initialize(
+ stream_provider_.get(), nullptr,
+ base::BindOnce(&End2EndTestRenderer::OnReceiverInitalized,
+ weak_factory_.GetWeakPtr()));
}
-void End2EndTestRenderer::SetCdm(CdmContext* cdm_context,
- CdmAttachedCB cdc_attached_cb) {
- // TODO(xjz): Add the implementation when media remoting starts supporting
- // encrypted contents.
- NOTIMPLEMENTED() << "Media Remoting doesn't support EME for now.";
+void End2EndTestRenderer::OnCourierRendererInitialized(PipelineStatus status) {
+ DCHECK_EQ(PIPELINE_OK, status);
+ courier_renderer_initialized_ = true;
+ CompleteInitialize();
+}
+
+void End2EndTestRenderer::OnReceiverInitalized(PipelineStatus status) {
+ DCHECK_EQ(PIPELINE_OK, status);
+ receiver_initialized_ = true;
+ CompleteInitialize();
+}
+void End2EndTestRenderer::CompleteInitialize() {
+ if (!courier_renderer_initialized_ || !receiver_initialized_)
+ return;
+
+ DCHECK(init_cb_);
+ std::move(init_cb_).Run(PIPELINE_OK);
+}
+
+void End2EndTestRenderer::OnReceivedRpc(
+ std::unique_ptr<media::remoting::pb::RpcMessage> message) {
+ DCHECK(message);
+ DCHECK_EQ(message->proc(),
+ media::remoting::pb::RpcMessage::RPC_ACQUIRE_RENDERER);
+ OnAcquireRenderer(std::move(message));
+}
+
+void End2EndTestRenderer::OnAcquireRenderer(
+ std::unique_ptr<media::remoting::pb::RpcMessage> message) {
+ DCHECK(message->has_integer_value());
+ DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+
+ if (sender_renderer_handle_ == RpcBroker::kInvalidHandle) {
+ sender_renderer_handle_ = message->integer_value();
+ receiver_->SetRemoteHandle(sender_renderer_handle_);
+ }
+}
+
+void End2EndTestRenderer::OnAcquireRendererDone(int receiver_renderer_handle) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(sender_renderer_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc->set_integer_value(receiver_renderer_handle);
+ receiver_rpc_broker_->SendMessageToRemote(std::move(rpc));
}
void End2EndTestRenderer::SetLatencyHint(
@@ -187,6 +364,10 @@ void End2EndTestRenderer::SetLatencyHint(
courier_renderer_->SetLatencyHint(latency_hint);
}
+void End2EndTestRenderer::SetPreservesPitch(bool preserves_pitch) {
+ courier_renderer_->SetPreservesPitch(preserves_pitch);
+}
+
void End2EndTestRenderer::Flush(base::OnceClosure flush_cb) {
courier_renderer_->Flush(std::move(flush_cb));
}
@@ -209,19 +390,21 @@ base::TimeDelta End2EndTestRenderer::GetMediaTime() {
void End2EndTestRenderer::SendMessageToSink(
const std::vector<uint8_t>& message) {
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
- if (!rpc->ParseFromArray(message.data(), message.size())) {
- VLOG(1) << __func__ << ": Received corrupted Rpc message.";
- return;
- }
- receiver_rpc_broker_.ProcessMessageFromRemote(std::move(rpc));
+ media_remotee_->OnMessage(message);
}
-void End2EndTestRenderer::SendFrameToSink(const std::vector<uint8_t>& frame,
+void End2EndTestRenderer::SendFrameToSink(uint32_t frame_count,
+ const std::vector<uint8_t>& frame,
DemuxerStream::Type type) {
scoped_refptr<DecoderBuffer> decoder_buffer =
ByteArrayToDecoderBuffer(frame.data(), frame.size());
- receiver_->OnReceivedBuffer(type, decoder_buffer);
+ if (type == DemuxerStream::Type::AUDIO) {
+ media_remotee_->OnAudioFrame(frame_count, decoder_buffer);
+ } else if (type == DemuxerStream::Type::VIDEO) {
+ media_remotee_->OnVideoFrame(frame_count, decoder_buffer);
+ } else {
+ NOTREACHED();
+ }
}
void End2EndTestRenderer::OnMessageFromSink(
diff --git a/chromium/media/remoting/end2end_test_renderer.h b/chromium/media/remoting/end2end_test_renderer.h
index 1e8e635f878..905e27f5776 100644
--- a/chromium/media/remoting/end2end_test_renderer.h
+++ b/chromium/media/remoting/end2end_test_renderer.h
@@ -5,12 +5,14 @@
#ifndef MEDIA_REMOTING_END2END_RENDERER_H_
#define MEDIA_REMOTING_END2END_RENDERER_H_
+#include <memory>
#include <vector>
#include "base/memory/weak_ptr.h"
#include "media/base/demuxer_stream.h"
#include "media/base/renderer.h"
#include "media/remoting/rpc_broker.h"
+#include "media/remoting/stream_provider.h"
namespace media {
namespace remoting {
@@ -18,6 +20,7 @@ namespace remoting {
class RendererController;
class CourierRenderer;
class Receiver;
+class ReceiverController;
// Simulates the media remoting pipeline.
class End2EndTestRenderer final : public Renderer {
@@ -29,8 +32,8 @@ class End2EndTestRenderer final : public Renderer {
void Initialize(MediaResource* media_resource,
RendererClient* client,
PipelineStatusCallback init_cb) override;
- void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
void Flush(base::OnceClosure flush_cb) override;
void StartPlayingFrom(base::TimeDelta time) override;
void SetPlaybackRate(double playback_rate) override;
@@ -46,28 +49,55 @@ class End2EndTestRenderer final : public Renderer {
base::OnceClosure change_completed_cb) override;
private:
+ class TestRemotee;
+
+ void InitTestApi();
+
// Called to send RPC messages to |receiver_|.
void SendMessageToSink(const std::vector<uint8_t>& message);
// Called to send frame data to |receiver_|.
- void SendFrameToSink(const std::vector<uint8_t>& data,
+ void SendFrameToSink(uint32_t frame_count,
+ const std::vector<uint8_t>& data,
DemuxerStream::Type type);
// Called when receives RPC messages from |receiver_|.
void OnMessageFromSink(std::unique_ptr<std::vector<uint8_t>> message);
+ void InitializeReceiverRenderer(PipelineStatus status);
+ void OnCourierRendererInitialized(PipelineStatus status);
+ void OnReceiverInitalized(PipelineStatus status);
+ void CompleteInitialize();
+
+ // Callback function when RPC message is received.
+ void OnReceivedRpc(std::unique_ptr<media::remoting::pb::RpcMessage> message);
+ void OnAcquireRenderer(
+ std::unique_ptr<media::remoting::pb::RpcMessage> message);
+ void OnAcquireRendererDone(int receiver_renderer_handle);
+
+ PipelineStatusCallback init_cb_;
+
+ bool courier_renderer_initialized_;
+ bool receiver_initialized_;
+
+ // Sender components.
std::unique_ptr<RendererController> controller_;
std::unique_ptr<CourierRenderer> courier_renderer_;
- // The RpcBroker to handle the RPC messages to/from |receiver_|.
- RpcBroker receiver_rpc_broker_;
-
- // A receiver that renders media streams.
+ // Receiver components.
+ std::unique_ptr<TestRemotee> media_remotee_;
+ ReceiverController* receiver_controller_;
std::unique_ptr<Receiver> receiver_;
+ std::unique_ptr<StreamProvider> stream_provider_;
+ RpcBroker* receiver_rpc_broker_;
- base::WeakPtrFactory<End2EndTestRenderer> weak_factory_{this};
+ // Handle of |receiver_|
+ int receiver_renderer_handle_ = RpcBroker::kInvalidHandle;
+ // Handle of |courier_renderer_|, it would be sent with AcquireRenderer
+ // message.
+ int sender_renderer_handle_ = RpcBroker::kInvalidHandle;
- DISALLOW_COPY_AND_ASSIGN(End2EndTestRenderer);
+ base::WeakPtrFactory<End2EndTestRenderer> weak_factory_{this};
};
} // namespace remoting
diff --git a/chromium/media/remoting/fake_media_resource.cc b/chromium/media/remoting/fake_media_resource.cc
index e77cf87e10c..710b84a8931 100644
--- a/chromium/media/remoting/fake_media_resource.cc
+++ b/chromium/media/remoting/fake_media_resource.cc
@@ -101,13 +101,15 @@ void FakeDemuxerStream::CreateFakeFrame(size_t size,
}
FakeMediaResource::FakeMediaResource()
- : demuxer_stream_(new FakeDemuxerStream(true)) {}
+ : audio_stream_(new FakeDemuxerStream(true)),
+ video_stream_(new FakeDemuxerStream(false)) {}
FakeMediaResource::~FakeMediaResource() = default;
std::vector<DemuxerStream*> FakeMediaResource::GetAllStreams() {
std::vector<DemuxerStream*> streams;
- streams.push_back(demuxer_stream_.get());
+ streams.push_back(audio_stream_.get());
+ streams.push_back(video_stream_.get());
return streams;
}
diff --git a/chromium/media/remoting/fake_media_resource.h b/chromium/media/remoting/fake_media_resource.h
index 93b53180a05..f6391f8a033 100644
--- a/chromium/media/remoting/fake_media_resource.h
+++ b/chromium/media/remoting/fake_media_resource.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_REMOTING_FAKE_MEDIA_RESOURCE_H_
#define MEDIA_REMOTING_FAKE_MEDIA_RESOURCE_H_
+#include <memory>
+
#include "base/containers/circular_deque.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/demuxer_stream.h"
@@ -54,7 +56,8 @@ class FakeMediaResource : public MediaResource {
std::vector<DemuxerStream*> GetAllStreams() override;
private:
- std::unique_ptr<FakeDemuxerStream> demuxer_stream_;
+ std::unique_ptr<FakeDemuxerStream> audio_stream_;
+ std::unique_ptr<FakeDemuxerStream> video_stream_;
DISALLOW_COPY_AND_ASSIGN(FakeMediaResource);
};
diff --git a/chromium/media/remoting/integration_test.cc b/chromium/media/remoting/integration_test.cc
index bc0aa888909..bbac6f6a8a8 100644
--- a/chromium/media/remoting/integration_test.cc
+++ b/chromium/media/remoting/integration_test.cc
@@ -72,8 +72,7 @@ TEST_F(MediaRemotingIntegrationTest, MediaSource_ConfigChange_WebM) {
Stop();
}
-// Flaky: http://crbug.com/1043812.
-TEST_F(MediaRemotingIntegrationTest, DISABLED_SeekWhilePlaying) {
+TEST_F(MediaRemotingIntegrationTest, SeekWhilePlaying) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
base::TimeDelta duration(pipeline_->GetMediaDuration());
diff --git a/chromium/media/remoting/media_remoting_rpc.proto b/chromium/media/remoting/media_remoting_rpc.proto
index bea8599a538..a7b584f07ac 100644
--- a/chromium/media/remoting/media_remoting_rpc.proto
+++ b/chromium/media/remoting/media_remoting_rpc.proto
@@ -337,6 +337,11 @@ enum CdmSessionType {
kPersistentUsageRecord = 2;
};
+message AcquireDemuxer {
+ optional int32 audio_demuxer_handle = 1;
+ optional int32 video_demuxer_handle = 2;
+}
+
message RendererInitialize {
optional int32 client_handle = 1;
optional int32 audio_demuxer_handle = 2;
@@ -495,6 +500,7 @@ message RpcMessage {
RPC_ACQUIRE_RENDERER_DONE = 2;
RPC_ACQUIRE_CDM = 3;
RPC_ACQUIRE_CDM_DONE = 4;
+ RPC_ACQUIRE_DEMUXER = 5;
// Renderer message
RPC_R_INITIALIZE = 1000;
RPC_R_FLUSHUNTIL = 1001;
@@ -522,6 +528,7 @@ message RpcMessage {
RPC_DS_INITIALIZE = 3000;
RPC_DS_READUNTIL = 3001;
RPC_DS_ENABLEBITSTREAMCONVERTER = 3002;
+ RPC_DS_ONERROR = 3003;
// DemuxerStream callbacks
RPC_DS_INITIALIZE_CALLBACK = 3100;
RPC_DS_READUNTIL_CALLBACK = 3101;
@@ -594,6 +601,9 @@ message RpcMessage {
// RPC_R_SETCDM
RendererSetCdm renderer_set_cdm_rpc = 102;
+ // RPC_ACQUIRE_DEMUXER
+ AcquireDemuxer acquire_demuxer_rpc = 103;
+
// RPC_RC_ONTIMEUPDATE
RendererClientOnTimeUpdate rendererclient_ontimeupdate_rpc = 200;
// RPC_RC_ONVIDEONATURALSIZECHANGE
diff --git a/chromium/media/remoting/mock_receiver_controller.cc b/chromium/media/remoting/mock_receiver_controller.cc
new file mode 100644
index 00000000000..b0e742d7635
--- /dev/null
+++ b/chromium/media/remoting/mock_receiver_controller.cc
@@ -0,0 +1,118 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/mock_receiver_controller.h"
+
+#include "base/check.h"
+#include "media/mojo/common/mojo_decoder_buffer_converter.h"
+#include "media/remoting/test_utils.h"
+#include "mojo/public/cpp/system/data_pipe.h"
+
+namespace media {
+namespace remoting {
+
+MockRemotee::MockRemotee() = default;
+
+MockRemotee::~MockRemotee() = default;
+
+void MockRemotee::BindMojoReceiver(mojo::PendingReceiver<Remotee> receiver) {
+ DCHECK(receiver);
+ receiver_.Bind(std::move(receiver));
+}
+
+void MockRemotee::SendAudioFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> buffer) {
+ mojom::DecoderBufferPtr mojo_buffer =
+ audio_buffer_writer_->WriteDecoderBuffer(std::move(buffer));
+ audio_stream_->ReceiveFrame(frame_count, std::move(mojo_buffer));
+}
+
+void MockRemotee::SendVideoFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> buffer) {
+ mojom::DecoderBufferPtr mojo_buffer =
+ video_buffer_writer_->WriteDecoderBuffer(std::move(buffer));
+ video_stream_->ReceiveFrame(frame_count, std::move(mojo_buffer));
+}
+
+void MockRemotee::OnRemotingSinkReady(
+ mojo::PendingRemote<mojom::RemotingSink> remoting_sink) {
+ DCHECK(remoting_sink);
+ remoting_sink_.Bind(std::move(remoting_sink));
+}
+
+void MockRemotee::SendMessageToSource(const std::vector<uint8_t>& message) {}
+
+void MockRemotee::StartDataStreams(
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> audio_stream,
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> video_stream) {
+ if (audio_stream.is_valid()) {
+ // Initialize data pipe for audio data stream receiver.
+ audio_stream_.Bind(std::move(audio_stream));
+ mojo::ScopedDataPipeConsumerHandle audio_data_pipe;
+ audio_buffer_writer_ = MojoDecoderBufferWriter::Create(
+ GetDefaultDecoderBufferConverterCapacity(DemuxerStream::AUDIO),
+ &audio_data_pipe);
+ audio_stream_->InitializeDataPipe(std::move(audio_data_pipe));
+ }
+
+ if (video_stream.is_valid()) {
+ // Initialize data pipe for video data stream receiver.
+ video_stream_.Bind(std::move(video_stream));
+ mojo::ScopedDataPipeConsumerHandle video_data_pipe;
+ video_buffer_writer_ = MojoDecoderBufferWriter::Create(
+ GetDefaultDecoderBufferConverterCapacity(DemuxerStream::VIDEO),
+ &video_data_pipe);
+ video_stream_->InitializeDataPipe(std::move(video_data_pipe));
+ }
+}
+
+void MockRemotee::OnFlushUntil(uint32_t audio_count, uint32_t video_count) {
+ flush_audio_count_ = audio_count;
+ flush_video_count_ = video_count;
+
+ if (audio_stream_.is_bound()) {
+ audio_stream_->FlushUntil(audio_count);
+ }
+ if (video_stream_.is_bound()) {
+ video_stream_->FlushUntil(video_count);
+ }
+}
+
+void MockRemotee::OnVideoNaturalSizeChange(const gfx::Size& size) {
+ DCHECK(!size.IsEmpty());
+ changed_size_ = size;
+}
+
+void MockRemotee::Reset() {
+ audio_stream_.reset();
+ video_stream_.reset();
+ receiver_.reset();
+ remoting_sink_.reset();
+}
+
+// static
+MockReceiverController* MockReceiverController::GetInstance() {
+ static base::NoDestructor<MockReceiverController> controller;
+ ResetForTesting(controller.get());
+ controller->mock_remotee_->Reset();
+ return controller.get();
+}
+
+MockReceiverController::MockReceiverController()
+ : mock_remotee_(new MockRemotee()) {
+ // Overwrites |rpc_broker_|.
+ rpc_broker_.SetMessageCallbackForTesting(base::BindRepeating(
+ &MockReceiverController::OnSendRpc, base::Unretained(this)));
+}
+
+MockReceiverController::~MockReceiverController() = default;
+
+void MockReceiverController::OnSendRpc(
+ std::unique_ptr<std::vector<uint8_t>> message) {
+ std::vector<uint8_t> binary_message = *message;
+ ReceiverController::OnMessageFromSource(binary_message);
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/mock_receiver_controller.h b/chromium/media/remoting/mock_receiver_controller.h
new file mode 100644
index 00000000000..e4c6da6c37b
--- /dev/null
+++ b/chromium/media/remoting/mock_receiver_controller.h
@@ -0,0 +1,96 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_REMOTING_MOCK_RECEIVER_CONTROLLER_H_
+#define MEDIA_REMOTING_MOCK_RECEIVER_CONTROLLER_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/memory/scoped_refptr.h"
+#include "base/no_destructor.h"
+#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/remoting/receiver_controller.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+class MojoDecoderBufferWriter;
+
+namespace remoting {
+
+class MockRemotee : public mojom::Remotee {
+ public:
+ MockRemotee();
+ ~MockRemotee() override;
+
+ void BindMojoReceiver(mojo::PendingReceiver<Remotee> receiver);
+
+ void SendAudioFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> buffer);
+ void SendVideoFrame(uint32_t frame_count,
+ scoped_refptr<DecoderBuffer> buffer);
+
+ // mojom::Remotee implementation
+ void OnRemotingSinkReady(
+ mojo::PendingRemote<mojom::RemotingSink> remoting_sink) override;
+ void SendMessageToSource(const std::vector<uint8_t>& message) override;
+ void StartDataStreams(
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> audio_stream,
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> video_stream)
+ override;
+ void OnFlushUntil(uint32_t audio_count, uint32_t video_count) override;
+ void OnVideoNaturalSizeChange(const gfx::Size& size) override;
+
+ void Reset();
+
+ gfx::Size changed_size() { return changed_size_; }
+ uint32_t flush_audio_count() { return flush_audio_count_; }
+ uint32_t flush_video_count() { return flush_video_count_; }
+
+ mojo::PendingRemote<mojom::Remotee> BindNewPipeAndPassRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ mojo::Remote<mojom::RemotingDataStreamReceiver> audio_stream_;
+ mojo::Remote<mojom::RemotingDataStreamReceiver> video_stream_;
+
+ private:
+ gfx::Size changed_size_;
+
+ uint32_t flush_audio_count_{0};
+ uint32_t flush_video_count_{0};
+
+ std::unique_ptr<MojoDecoderBufferWriter> audio_buffer_writer_;
+ std::unique_ptr<MojoDecoderBufferWriter> video_buffer_writer_;
+
+ mojo::Remote<mojom::RemotingSink> remoting_sink_;
+ mojo::Receiver<mojom::Remotee> receiver_{this};
+};
+
+class MockReceiverController : public ReceiverController {
+ public:
+ static MockReceiverController* GetInstance();
+
+ MockRemotee* mock_remotee() { return mock_remotee_.get(); }
+
+ private:
+ friend base::NoDestructor<MockReceiverController>;
+ friend testing::StrictMock<MockReceiverController>;
+ friend testing::NiceMock<MockReceiverController>;
+
+ MockReceiverController();
+ ~MockReceiverController() override;
+
+ void OnSendRpc(std::unique_ptr<std::vector<uint8_t>> message);
+
+ std::unique_ptr<MockRemotee> mock_remotee_;
+};
+
+} // namespace remoting
+} // namespace media
+
+#endif // MEDIA_REMOTING_MOCK_RECEIVER_CONTROLLER_H_
diff --git a/chromium/media/remoting/receiver.cc b/chromium/media/remoting/receiver.cc
index 6db0c7d3b9d..40bffe10dd3 100644
--- a/chromium/media/remoting/receiver.cc
+++ b/chromium/media/remoting/receiver.cc
@@ -6,10 +6,14 @@
#include "base/bind.h"
#include "base/callback.h"
+#include "base/single_thread_task_runner.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/renderer.h"
#include "media/remoting/proto_enum_utils.h"
#include "media/remoting/proto_utils.h"
+#include "media/remoting/receiver_controller.h"
#include "media/remoting/stream_provider.h"
namespace media {
@@ -23,116 +27,168 @@ constexpr base::TimeDelta kTimeUpdateInterval =
} // namespace
-Receiver::Receiver(std::unique_ptr<Renderer> renderer, RpcBroker* rpc_broker)
- : renderer_(std::move(renderer)),
- rpc_broker_(rpc_broker),
- rpc_handle_(rpc_broker_->GetUniqueHandle()) {
- DCHECK(renderer_);
+Receiver::Receiver(
+ int rpc_handle,
+ int remote_handle,
+ ReceiverController* receiver_controller,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ std::unique_ptr<Renderer> renderer,
+ base::OnceCallback<void(int)> acquire_renderer_done_cb)
+ : rpc_handle_(rpc_handle),
+ remote_handle_(remote_handle),
+ receiver_controller_(receiver_controller),
+ rpc_broker_(receiver_controller_->rpc_broker()),
+ main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ media_task_runner_(media_task_runner),
+ renderer_(std::move(renderer)),
+ acquire_renderer_done_cb_(std::move(acquire_renderer_done_cb)) {
+ DCHECK(rpc_handle_ != RpcBroker::kInvalidHandle);
+ DCHECK(receiver_controller_);
DCHECK(rpc_broker_);
- rpc_broker_->RegisterMessageReceiverCallback(
- rpc_handle_, base::BindRepeating(&Receiver::OnReceivedRpc,
- weak_factory_.GetWeakPtr()));
- rpc_broker_->RegisterMessageReceiverCallback(
- RpcBroker::kAcquireHandle,
- base::BindRepeating(&Receiver::OnReceivedRpc,
- weak_factory_.GetWeakPtr()));
+ DCHECK(renderer_);
+
+ // Note: The constructor is running on the main thread, but will be destroyed
+ // on the media thread. Therefore, all weak pointers must be dereferenced on
+ // the media thread.
+ const RpcBroker::ReceiveMessageCallback receive_callback = BindToLoop(
+ media_task_runner_,
+ BindRepeating(&Receiver::OnReceivedRpc, weak_factory_.GetWeakPtr()));
+
+ // Listening all renderer rpc messages.
+ rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
+ VerifyAcquireRendererDone();
}
Receiver::~Receiver() {
rpc_broker_->UnregisterMessageReceiverCallback(rpc_handle_);
- rpc_broker_->UnregisterMessageReceiverCallback(RpcBroker::kAcquireHandle);
+ rpc_broker_->UnregisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle);
+}
+
+// Receiver::Initialize() will be called by the local pipeline, it would only
+// keep the |init_cb| in order to continue the initialization once it receives
+// RPC_R_INITIALIZE, which means Receiver::RpcInitialize() is called.
+void Receiver::Initialize(MediaResource* media_resource,
+ RendererClient* client,
+ PipelineStatusCallback init_cb) {
+ demuxer_ = media_resource;
+ init_cb_ = std::move(init_cb);
+ ShouldInitializeRenderer();
+}
+
+/* CDM is not supported for remoting media */
+void Receiver::SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) {
+ NOTREACHED();
+}
+
+// No-op. Controlled by sender via RPC calls instead.
+void Receiver::SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) {}
+
+// No-op. Controlled by sender via RPC calls instead.
+void Receiver::Flush(base::OnceClosure flush_cb) {}
+
+// No-op. Controlled by sender via RPC calls instead.
+void Receiver::StartPlayingFrom(base::TimeDelta time) {}
+
+// No-op. Controlled by sender via RPC calls instead.
+void Receiver::SetPlaybackRate(double playback_rate) {}
+
+// No-op. Controlled by sender via RPC calls instead.
+void Receiver::SetVolume(float volume) {}
+
+// No-op. Controlled by sender via RPC calls instead.
+base::TimeDelta Receiver::GetMediaTime() {
+ return base::TimeDelta();
+}
+
+void Receiver::SendRpcMessageOnMainThread(
+ std::unique_ptr<pb::RpcMessage> message) {
+ // |rpc_broker_| is owned by |receiver_controller_| which is a singleton per
+ // process, so it's safe to use Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&RpcBroker::SendMessageToRemote,
+ base::Unretained(rpc_broker_), std::move(message)));
}
void Receiver::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(message);
switch (message->proc()) {
- case pb::RpcMessage::RPC_ACQUIRE_RENDERER:
- AcquireRenderer(std::move(message));
+ case pb::RpcMessage::RPC_R_INITIALIZE:
+ RpcInitialize(std::move(message));
break;
case pb::RpcMessage::RPC_R_FLUSHUNTIL:
- FlushUntil(std::move(message));
+ RpcFlushUntil(std::move(message));
break;
case pb::RpcMessage::RPC_R_STARTPLAYINGFROM:
- StartPlayingFrom(std::move(message));
+ RpcStartPlayingFrom(std::move(message));
break;
case pb::RpcMessage::RPC_R_SETPLAYBACKRATE:
- SetPlaybackRate(std::move(message));
+ RpcSetPlaybackRate(std::move(message));
break;
case pb::RpcMessage::RPC_R_SETVOLUME:
- SetVolume(std::move(message));
- break;
- case pb::RpcMessage::RPC_R_INITIALIZE:
- Initialize(std::move(message));
+ RpcSetVolume(std::move(message));
break;
default:
- VLOG(1) << __func__ << ": Unknow RPC message. proc=" << message->proc();
+ VLOG(1) << __func__ << ": Unknown RPC message. proc=" << message->proc();
}
}
-void Receiver::AcquireRenderer(std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << ": Receives RPC_ACQUIRE_RENDERER with remote_handle= "
- << message->integer_value();
+void Receiver::SetRemoteHandle(int remote_handle) {
+ DCHECK_NE(remote_handle, RpcBroker::kInvalidHandle);
+ DCHECK_EQ(remote_handle_, RpcBroker::kInvalidHandle);
+ remote_handle_ = remote_handle;
+ VerifyAcquireRendererDone();
+}
- remote_handle_ = message->integer_value();
- if (stream_provider_) {
- VLOG(1) << "Acquire renderer error: Already acquired.";
- OnError(PipelineStatus::PIPELINE_ERROR_DECODE);
+void Receiver::VerifyAcquireRendererDone() {
+ if (remote_handle_ == RpcBroker::kInvalidHandle)
return;
- }
-
- stream_provider_.reset(new StreamProvider(
- rpc_broker_,
- base::BindOnce(&Receiver::OnError, weak_factory_.GetWeakPtr(),
- PipelineStatus::PIPELINE_ERROR_DECODE)));
- DVLOG(3) << __func__
- << ": Issues RPC_ACQUIRE_RENDERER_DONE RPC message. remote_handle="
- << remote_handle_ << " rpc_handle=" << rpc_handle_;
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
- rpc->set_handle(remote_handle_);
- rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
- rpc->set_integer_value(rpc_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ DCHECK(acquire_renderer_done_cb_);
+ std::move(acquire_renderer_done_cb_).Run(rpc_handle_);
}
-void Receiver::Initialize(std::unique_ptr<pb::RpcMessage> message) {
- DCHECK(stream_provider_);
- DVLOG(3) << __func__ << ": Receives RPC_R_INITIALIZE with callback handle= "
- << message->renderer_initialize_rpc().callback_handle();
- DCHECK(message->renderer_initialize_rpc().callback_handle() ==
- remote_handle_);
- if (!stream_provider_)
- OnRendererInitialized(PipelineStatus::PIPELINE_ERROR_INITIALIZATION_FAILED);
-
- stream_provider_->Initialize(
- message->renderer_initialize_rpc().audio_demuxer_handle(),
- message->renderer_initialize_rpc().video_demuxer_handle(),
- base::BindOnce(&Receiver::OnStreamInitialized,
- weak_factory_.GetWeakPtr()));
+void Receiver::RpcInitialize(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(renderer_);
+ rpc_initialize_received_ = true;
+ ShouldInitializeRenderer();
}
-void Receiver::OnStreamInitialized() {
- DCHECK(stream_provider_);
- renderer_->Initialize(stream_provider_.get(), this,
+void Receiver::ShouldInitializeRenderer() {
+ // ShouldInitializeRenderer() will be called from Initialize() and
+ // RpcInitialize() in different orders.
+ //
+ // |renderer_| must be initialized when both Initialize() and
+ // RpcInitialize() are called.
+ if (!rpc_initialize_received_ || !init_cb_)
+ return;
+
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(renderer_);
+ DCHECK(demuxer_);
+ renderer_->Initialize(demuxer_, this,
base::BindOnce(&Receiver::OnRendererInitialized,
weak_factory_.GetWeakPtr()));
}
void Receiver::OnRendererInitialized(PipelineStatus status) {
- DVLOG(3) << __func__ << ": Issues RPC_R_INITIALIZE_CALLBACK RPC message."
- << "remote_handle=" << remote_handle_;
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(init_cb_);
+ std::move(init_cb_).Run(status);
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_INITIALIZE_CALLBACK);
rpc->set_boolean_value(status == PIPELINE_OK);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
-void Receiver::SetPlaybackRate(std::unique_ptr<pb::RpcMessage> message) {
+void Receiver::RpcSetPlaybackRate(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
const double playback_rate = message->double_value();
- DVLOG(3) << __func__
- << ": Receives RPC_R_SETPLAYBACKRATE with rate=" << playback_rate;
renderer_->SetPlaybackRate(playback_rate);
if (playback_rate == 0.0) {
@@ -147,38 +203,32 @@ void Receiver::SetPlaybackRate(std::unique_ptr<pb::RpcMessage> message) {
}
}
-void Receiver::FlushUntil(std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << ": Receives RPC_R_FLUSHUNTIL RPC message.";
+void Receiver::RpcFlushUntil(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(message->has_renderer_flushuntil_rpc());
const pb::RendererFlushUntil flush_message =
message->renderer_flushuntil_rpc();
DCHECK_EQ(flush_message.callback_handle(), remote_handle_);
- if (stream_provider_) {
- if (flush_message.has_audio_count()) {
- stream_provider_->FlushUntil(DemuxerStream::AUDIO,
- flush_message.audio_count());
- }
- if (flush_message.has_video_count()) {
- stream_provider_->FlushUntil(DemuxerStream::VIDEO,
- flush_message.video_count());
- }
- }
+
+ receiver_controller_->OnRendererFlush(flush_message.audio_count(),
+ flush_message.video_count());
+
time_update_timer_.Stop();
renderer_->Flush(
base::BindOnce(&Receiver::OnFlushDone, weak_factory_.GetWeakPtr()));
}
void Receiver::OnFlushDone() {
- DVLOG(3) << __func__ << ": Issues RPC_R_FLUSHUNTIL_CALLBACK RPC message.";
-
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_R_FLUSHUNTIL_CALLBACK);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
-void Receiver::StartPlayingFrom(std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << ": Receives RPC_R_STARTPLAYINGFROM message.";
+void Receiver::RpcStartPlayingFrom(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
base::TimeDelta time =
base::TimeDelta::FromMicroseconds(message->integer64_value());
renderer_->StartPlayingFrom(time);
@@ -194,14 +244,14 @@ void Receiver::ScheduleMediaTimeUpdates() {
weak_factory_.GetWeakPtr()));
}
-void Receiver::SetVolume(std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << ": Receives RPC_R_SETVOLUME message.";
+void Receiver::RpcSetVolume(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
renderer_->SetVolume(message->double_value());
}
void Receiver::SendMediaTimeUpdate() {
// Issues RPC_RC_ONTIMEUPDATE RPC message.
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONTIMEUPDATE);
auto* message = rpc->mutable_rendererclient_ontimeupdate_rpc();
@@ -209,40 +259,26 @@ void Receiver::SendMediaTimeUpdate() {
message->set_time_usec(media_time.InMicroseconds());
base::TimeDelta max_time = media_time;
message->set_max_time_usec(max_time.InMicroseconds());
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONTIMEUPDATE message."
- << " media_time = " << media_time.InMicroseconds()
- << " max_time= " << max_time.InMicroseconds();
- rpc_broker_->SendMessageToRemote(std::move(rpc));
-}
-
-void Receiver::OnReceivedBuffer(DemuxerStream::Type type,
- scoped_refptr<DecoderBuffer> buffer) {
- DVLOG(3) << __func__
- << ": type=" << (type == DemuxerStream::AUDIO ? "Audio" : "Video");
- DCHECK(stream_provider_);
- stream_provider_->AppendBuffer(type, buffer);
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnError(PipelineStatus status) {
- VLOG(1) << __func__ << ": Issues RPC_RC_ONERROR message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONERROR);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnEnded() {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONENDED message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONENDED);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
time_update_timer_.Stop();
}
void Receiver::OnStatisticsUpdate(const PipelineStatistics& stats) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONSTATISTICSUPDATE message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONSTATISTICSUPDATE);
auto* message = rpc->mutable_rendererclient_onstatisticsupdate_rpc();
@@ -252,77 +288,68 @@ void Receiver::OnStatisticsUpdate(const PipelineStatistics& stats) {
message->set_video_frames_dropped(stats.video_frames_dropped);
message->set_audio_memory_usage(stats.audio_memory_usage);
message->set_video_memory_usage(stats.video_memory_usage);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnBufferingStateChange(BufferingState state,
BufferingStateChangeReason reason) {
- DVLOG(3) << __func__
- << ": Issues RPC_RC_ONBUFFERINGSTATECHANGE message: state=" << state;
-
- // The |reason| is determined on the other side of the RPC in CourierRenderer.
- // For now, there is no reason to provide this in the |message| below.
- DCHECK_EQ(reason, BUFFERING_CHANGE_REASON_UNKNOWN);
-
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONBUFFERINGSTATECHANGE);
auto* message = rpc->mutable_rendererclient_onbufferingstatechange_rpc();
message->set_state(ToProtoMediaBufferingState(state).value());
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
// TODO: Passes |reason| over.
void Receiver::OnWaiting(WaitingReason reason) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONWAITINGFORDECRYPTIONKEY message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONWAITINGFORDECRYPTIONKEY);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnAudioConfigChange(const AudioDecoderConfig& config) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONAUDIOCONFIGCHANGE message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONAUDIOCONFIGCHANGE);
auto* message = rpc->mutable_rendererclient_onaudioconfigchange_rpc();
pb::AudioDecoderConfig* proto_audio_config =
message->mutable_audio_decoder_config();
ConvertAudioDecoderConfigToProto(config, proto_audio_config);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnVideoConfigChange(const VideoDecoderConfig& config) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONVIDEOCONFIGCHANGE message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONVIDEOCONFIGCHANGE);
auto* message = rpc->mutable_rendererclient_onvideoconfigchange_rpc();
pb::VideoDecoderConfig* proto_video_config =
message->mutable_video_decoder_config();
ConvertVideoDecoderConfigToProto(config, proto_video_config);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnVideoNaturalSizeChange(const gfx::Size& size) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONVIDEONATURALSIZECHANGE message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONVIDEONATURALSIZECHANGE);
auto* message = rpc->mutable_rendererclient_onvideonatualsizechange_rpc();
message->set_width(size.width());
message->set_height(size.height());
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
+
+ // Notify the host.
+ receiver_controller_->OnVideoNaturalSizeChange(size);
}
void Receiver::OnVideoOpacityChange(bool opaque) {
- DVLOG(3) << __func__ << ": Issues RPC_RC_ONVIDEOOPACITYCHANGE message.";
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_RC_ONVIDEOOPACITYCHANGE);
rpc->set_boolean_value(opaque);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
}
void Receiver::OnVideoFrameRateChange(base::Optional<int>) {}
diff --git a/chromium/media/remoting/receiver.h b/chromium/media/remoting/receiver.h
index 38d5f18f91b..8f0ac1154b2 100644
--- a/chromium/media/remoting/receiver.h
+++ b/chromium/media/remoting/receiver.h
@@ -5,30 +5,61 @@
#ifndef MEDIA_REMOTING_RECEIVER_H_
#define MEDIA_REMOTING_RECEIVER_H_
+#include <memory>
+
+#include "base/callback_forward.h"
+#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
#include "base/timer/timer.h"
#include "media/base/buffering_state.h"
#include "media/base/demuxer_stream.h"
+#include "media/base/renderer.h"
#include "media/base/renderer_client.h"
+#include "media/remoting/media_remoting_rpc.pb.h"
#include "media/remoting/rpc_broker.h"
-namespace media {
-class Renderer;
-class DecoderBuffer;
-} // namespace media
+namespace base {
+class SingleThreadTaskRunner;
+} // namespace base
namespace media {
namespace remoting {
+class ReceiverController;
class RpcBroker;
-class StreamProvider;
-// Media remoting receiver. Media streams are rendered by |renderer|.
-// |rpc_broker| outlives this class.
-class Receiver final : public RendererClient {
+// Receiver runs on a remote device, and forwards the information sent from a
+// CourierRenderer to |renderer_|, which actually renders the media.
+//
+// Receiver implements media::Renderer to be able to work with
+// WebMediaPlayerImpl. However, most of the APIs of media::Renderer are dummy
+// functions, because the media playback of the remoting media is not controlled
+// by the local pipeline of WMPI. It should be controlled by the remoting sender
+// via RPC calls. When Receiver receives RPC calls, it will call the
+// corresponding functions of |renderer_| to control the media playback of
+// the remoting media.
+class Receiver final : public Renderer, public RendererClient {
public:
- Receiver(std::unique_ptr<Renderer> renderer, RpcBroker* rpc_broker);
- ~Receiver();
+ Receiver(int rpc_handle,
+ int remote_handle,
+ ReceiverController* receiver_controller,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ std::unique_ptr<Renderer> renderer,
+ base::OnceCallback<void(int)> acquire_renderer_done_cb);
+ ~Receiver() override;
+
+ // Renderer implementation
+ void Initialize(MediaResource* media_resource,
+ RendererClient* client,
+ PipelineStatusCallback init_cb) override;
+ void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
+ void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void Flush(base::OnceClosure flush_cb) override;
+ void StartPlayingFrom(base::TimeDelta time) override;
+ void SetPlaybackRate(double playback_rate) override;
+ void SetVolume(float volume) override;
+ base::TimeDelta GetMediaTime() override;
// RendererClient implementation.
void OnError(PipelineStatus status) override;
@@ -43,46 +74,70 @@ class Receiver final : public RendererClient {
void OnVideoOpacityChange(bool opaque) override;
void OnVideoFrameRateChange(base::Optional<int>) override;
- void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
- void OnReceivedBuffer(DemuxerStream::Type type,
- scoped_refptr<DecoderBuffer> buffer);
+ // Used to set |remote_handle_| after Receiver is created, because the remote
+ // handle might be received after Receiver is created.
+ void SetRemoteHandle(int remote_handle);
+
+ base::WeakPtr<Receiver> GetWeakPtr() { return weak_factory_.GetWeakPtr(); }
private:
+ // Send RPC message on |main_task_runner_|.
+ void SendRpcMessageOnMainThread(std::unique_ptr<pb::RpcMessage> message);
+
+ // Callback function when RPC message is received.
+ void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
+
// RPC message handlers.
- void AcquireRenderer(std::unique_ptr<pb::RpcMessage> message);
- void Initialize(std::unique_ptr<pb::RpcMessage> message);
- void SetPlaybackRate(std::unique_ptr<pb::RpcMessage> message);
- void FlushUntil(std::unique_ptr<pb::RpcMessage> message);
- void StartPlayingFrom(std::unique_ptr<pb::RpcMessage> message);
- void SetVolume(std::unique_ptr<pb::RpcMessage> message);
-
- // Initialization callbacks.
- void OnStreamInitialized();
- void OnRendererInitialized(PipelineStatus status);
+ void RpcInitialize(std::unique_ptr<pb::RpcMessage> message);
+ void RpcSetPlaybackRate(std::unique_ptr<pb::RpcMessage> message);
+ void RpcFlushUntil(std::unique_ptr<pb::RpcMessage> message);
+ void RpcStartPlayingFrom(std::unique_ptr<pb::RpcMessage> message);
+ void RpcSetVolume(std::unique_ptr<pb::RpcMessage> message);
+ void ShouldInitializeRenderer();
+ void OnRendererInitialized(PipelineStatus status);
+ void VerifyAcquireRendererDone();
void OnFlushDone();
// Periodically send the UpdateTime RPC message to update the media time.
void ScheduleMediaTimeUpdates();
void SendMediaTimeUpdate();
- const std::unique_ptr<Renderer> renderer_;
- RpcBroker* const rpc_broker_; // Outlives this class.
+ // The callback function to call when |this| is initialized.
+ PipelineStatusCallback init_cb_;
+
+ // Indicates whether |this| received RPC_R_INITIALIZE message or not.
+ bool rpc_initialize_received_ = false;
+
+ // Owns by the WebMediaPlayerImpl instance.
+ MediaResource* demuxer_ = nullptr;
+
+ // The handle of |this| for listening RPC messages.
+ const int rpc_handle_;
- // The CourierRenderer handle on sender side. Set when AcauireRenderer() is
- // called.
- int remote_handle_ = RpcBroker::kInvalidHandle;
+ // The CourierRenderer handle on sender side. |remote_handle_| could be set
+ // through the ctor or SetRemoteHandle().
+ int remote_handle_;
- int rpc_handle_ = RpcBroker::kInvalidHandle;
+ ReceiverController* const receiver_controller_; // Outlives this class.
+ RpcBroker* const rpc_broker_; // Outlives this class.
- std::unique_ptr<StreamProvider> stream_provider_;
+ // Calling SendMessageCallback() of |rpc_broker_| should be on main thread.
+ const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
+
+ // Media tasks should run on media thread.
+ const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+
+ // |renderer_| is the real renderer to render media.
+ std::unique_ptr<Renderer> renderer_;
+
+ // The callback function to send RPC_ACQUIRE_RENDERER_DONE.
+ base::OnceCallback<void(int)> acquire_renderer_done_cb_;
// The timer to periodically update the media time.
base::RepeatingTimer time_update_timer_;
base::WeakPtrFactory<Receiver> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(Receiver);
};
} // namespace remoting
diff --git a/chromium/media/remoting/receiver_controller.cc b/chromium/media/remoting/receiver_controller.cc
new file mode 100644
index 00000000000..549087cf391
--- /dev/null
+++ b/chromium/media/remoting/receiver_controller.cc
@@ -0,0 +1,116 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/receiver_controller.h"
+
+#include "base/single_thread_task_runner.h"
+#include "base/threading/thread_task_runner_handle.h"
+
+namespace media {
+namespace remoting {
+
+// static
+ReceiverController* ReceiverController::GetInstance() {
+ static base::NoDestructor<ReceiverController> controller;
+ return controller.get();
+}
+
+ReceiverController::ReceiverController()
+ : rpc_broker_(base::BindRepeating(&ReceiverController::OnSendRpc,
+ base::Unretained(this))),
+ main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
+
+ReceiverController::~ReceiverController() = default;
+
+void ReceiverController::Initialize(
+ mojo::PendingRemote<mojom::Remotee> remotee) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ DCHECK(!media_remotee_.is_bound());
+ media_remotee_.Bind(std::move(remotee));
+
+ // Calling NotifyRemotingSinkReady() to notify the host that RemotingSink is
+ // ready.
+ media_remotee_->OnRemotingSinkReady(receiver_.BindNewPipeAndPassRemote());
+}
+
+void ReceiverController::OnRendererFlush(uint32_t audio_count,
+ uint32_t video_count) {
+ if (!main_task_runner_->BelongsToCurrentThread()) {
+ // |this| is a singleton per process, it would be safe to use
+ // base::Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&ReceiverController::OnRendererFlush,
+ base::Unretained(this), audio_count, video_count));
+ return;
+ }
+
+ if (media_remotee_.is_bound())
+ media_remotee_->OnFlushUntil(audio_count, video_count);
+}
+
+void ReceiverController::OnVideoNaturalSizeChange(const gfx::Size& size) {
+ if (!main_task_runner_->BelongsToCurrentThread()) {
+ // |this| is a singleton per process, it would be safe to use
+ // base::Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&ReceiverController::OnVideoNaturalSizeChange,
+ base::Unretained(this), size));
+ return;
+ }
+
+ if (media_remotee_.is_bound())
+ media_remotee_->OnVideoNaturalSizeChange(size);
+}
+
+void ReceiverController::StartDataStreams(
+ mojo::PendingRemote<::media::mojom::RemotingDataStreamReceiver>
+ audio_stream,
+ mojo::PendingRemote<::media::mojom::RemotingDataStreamReceiver>
+ video_stream) {
+ if (!main_task_runner_->BelongsToCurrentThread()) {
+ // |this| is a singleton per process, it would be safe to use
+ // base::Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&ReceiverController::StartDataStreams,
+ base::Unretained(this), std::move(audio_stream),
+ std::move(video_stream)));
+ return;
+ }
+ if (media_remotee_.is_bound()) {
+ media_remotee_->StartDataStreams(std::move(audio_stream),
+ std::move(video_stream));
+ }
+}
+
+void ReceiverController::OnMessageFromSource(
+ const std::vector<uint8_t>& message) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ auto rpc_message = std::make_unique<pb::RpcMessage>(pb::RpcMessage());
+ if (!rpc_message->ParseFromArray(message.data(), message.size()))
+ return;
+
+ rpc_broker_.ProcessMessageFromRemote(std::move(rpc_message));
+}
+
+void ReceiverController::OnSendRpc(
+ std::unique_ptr<std::vector<uint8_t>> message) {
+ if (!main_task_runner_->BelongsToCurrentThread()) {
+ // |this| is a singleton per process, it would be safe to use
+ // base::Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&ReceiverController::OnSendRpc,
+ base::Unretained(this), std::move(message)));
+ return;
+ }
+
+ DCHECK(media_remotee_.is_bound());
+ std::vector<uint8_t> binary_message = *message;
+ if (media_remotee_.is_bound())
+ media_remotee_->SendMessageToSource(binary_message);
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/receiver_controller.h b/chromium/media/remoting/receiver_controller.h
new file mode 100644
index 00000000000..1071de2760a
--- /dev/null
+++ b/chromium/media/remoting/receiver_controller.h
@@ -0,0 +1,70 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_REMOTING_RECEIVER_CONTROLLER_H_
+#define MEDIA_REMOTING_RECEIVER_CONTROLLER_H_
+
+#include <memory>
+
+#include "base/no_destructor.h"
+#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/remoting/rpc_broker.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+#include "mojo/public/cpp/bindings/remote.h"
+
+namespace media {
+namespace remoting {
+
+// ReceiverController is the bridge that owns |rpc_broker_| to allow Receivers
+// and StreamProvider::MediaStreams to communicate with the sender via RPC
+// calls.
+//
+// It also forwards calls to a |media_remotee_| instance, which will be
+// implemented the browser process. Currently, the only use case will be on
+// Chromecast, the Remotee implementation will be implemented in the browser
+// code on Chromecast.
+//
+// NOTE: ReceiverController is a singleton per process.
+class ReceiverController : mojom::RemotingSink {
+ public:
+ static ReceiverController* GetInstance();
+ void Initialize(mojo::PendingRemote<mojom::Remotee> remotee);
+
+ // Proxy functions to |media_remotee_|.
+ void OnRendererFlush(uint32_t audio_count, uint32_t video_count);
+ void OnVideoNaturalSizeChange(const gfx::Size& size);
+ void StartDataStreams(
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> audio_stream,
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver> video_stream);
+
+ // The reference of |rpc_broker_|.
+ media::remoting::RpcBroker* rpc_broker() { return &rpc_broker_; }
+
+ private:
+ friend base::NoDestructor<ReceiverController>;
+ friend class MockReceiverController;
+ friend void ResetForTesting(ReceiverController* controller);
+
+ ReceiverController();
+ ~ReceiverController() override;
+
+ // media::mojom::RemotingSink implementation.
+ void OnMessageFromSource(const std::vector<uint8_t>& message) override;
+
+ // Callback for |rpc_broker_| to send messages.
+ void OnSendRpc(std::unique_ptr<std::vector<uint8_t>> message);
+
+ RpcBroker rpc_broker_;
+
+ const scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
+
+ mojo::Remote<media::mojom::Remotee> media_remotee_;
+ mojo::Receiver<media::mojom::RemotingSink> receiver_{this};
+};
+
+} // namespace remoting
+} // namespace media
+
+#endif // MEDIA_REMOTING_RECEIVER_CONTROLLER_H_
diff --git a/chromium/media/remoting/receiver_unittest.cc b/chromium/media/remoting/receiver_unittest.cc
new file mode 100644
index 00000000000..94cb8cc50ef
--- /dev/null
+++ b/chromium/media/remoting/receiver_unittest.cc
@@ -0,0 +1,471 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/receiver.h"
+
+#include "base/check.h"
+#include "base/optional.h"
+#include "base/test/gmock_callback_support.h"
+#include "base/test/task_environment.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/media_util.h"
+#include "media/base/mock_filters.h"
+#include "media/base/renderer.h"
+#include "media/base/test_helpers.h"
+#include "media/base/video_decoder_config.h"
+#include "media/remoting/mock_receiver_controller.h"
+#include "media/remoting/proto_enum_utils.h"
+#include "media/remoting/proto_utils.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using base::test::RunOnceCallback;
+using testing::_;
+using testing::AtLeast;
+using testing::NiceMock;
+using testing::StrictMock;
+
+namespace media {
+namespace remoting {
+
+class MockSender {
+ public:
+ MockSender(RpcBroker* rpc_broker, int remote_handle)
+ : rpc_broker_(rpc_broker),
+ rpc_handle_(rpc_broker->GetUniqueHandle()),
+ remote_handle_(remote_handle) {
+ rpc_broker_->RegisterMessageReceiverCallback(
+ rpc_handle_, base::BindRepeating(&MockSender::OnReceivedRpc,
+ base::Unretained(this)));
+ }
+
+ MOCK_METHOD(void, AcquireRendererDone, ());
+ MOCK_METHOD(void, InitializeCallback, (bool));
+ MOCK_METHOD(void, FlushUntilCallback, ());
+ MOCK_METHOD(void, OnTimeUpdate, (int64_t, int64_t));
+ MOCK_METHOD(void, OnBufferingStateChange, (BufferingState));
+ MOCK_METHOD(void, OnEnded, ());
+ MOCK_METHOD(void, OnFatalError, ());
+ MOCK_METHOD(void, OnAudioConfigChange, (AudioDecoderConfig));
+ MOCK_METHOD(void, OnVideoConfigChange, (VideoDecoderConfig));
+ MOCK_METHOD(void, OnVideoNaturalSizeChange, (gfx::Size));
+ MOCK_METHOD(void, OnVideoOpacityChange, (bool));
+ MOCK_METHOD(void, OnStatisticsUpdate, (PipelineStatistics));
+ MOCK_METHOD(void, OnWaiting, ());
+
+ void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(message);
+ switch (message->proc()) {
+ case pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE:
+ AcquireRendererDone();
+ break;
+ case pb::RpcMessage::RPC_R_INITIALIZE_CALLBACK:
+ InitializeCallback(message->boolean_value());
+ break;
+ case pb::RpcMessage::RPC_R_FLUSHUNTIL_CALLBACK:
+ FlushUntilCallback();
+ break;
+ case pb::RpcMessage::RPC_RC_ONTIMEUPDATE: {
+ DCHECK(message->has_rendererclient_ontimeupdate_rpc());
+ const int64_t time_usec =
+ message->rendererclient_ontimeupdate_rpc().time_usec();
+ const int64_t max_time_usec =
+ message->rendererclient_ontimeupdate_rpc().max_time_usec();
+ OnTimeUpdate(time_usec, max_time_usec);
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONBUFFERINGSTATECHANGE: {
+ base::Optional<BufferingState> state = ToMediaBufferingState(
+ message->rendererclient_onbufferingstatechange_rpc().state());
+ if (state.has_value())
+ OnBufferingStateChange(state.value());
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONENDED:
+ OnEnded();
+ break;
+ case pb::RpcMessage::RPC_RC_ONERROR:
+ OnFatalError();
+ break;
+ case pb::RpcMessage::RPC_RC_ONAUDIOCONFIGCHANGE: {
+ DCHECK(message->has_rendererclient_onaudioconfigchange_rpc());
+ const auto* audio_config_message =
+ message->mutable_rendererclient_onaudioconfigchange_rpc();
+ const pb::AudioDecoderConfig pb_audio_config =
+ audio_config_message->audio_decoder_config();
+ AudioDecoderConfig out_audio_config;
+ ConvertProtoToAudioDecoderConfig(pb_audio_config, &out_audio_config);
+ DCHECK(out_audio_config.IsValidConfig());
+ OnAudioConfigChange(out_audio_config);
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONVIDEOCONFIGCHANGE: {
+ DCHECK(message->has_rendererclient_onvideoconfigchange_rpc());
+ const auto* video_config_message =
+ message->mutable_rendererclient_onvideoconfigchange_rpc();
+ const pb::VideoDecoderConfig pb_video_config =
+ video_config_message->video_decoder_config();
+ VideoDecoderConfig out_video_config;
+ ConvertProtoToVideoDecoderConfig(pb_video_config, &out_video_config);
+ DCHECK(out_video_config.IsValidConfig());
+
+ OnVideoConfigChange(out_video_config);
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONVIDEONATURALSIZECHANGE: {
+ DCHECK(message->has_rendererclient_onvideonatualsizechange_rpc());
+
+ gfx::Size size(
+ message->rendererclient_onvideonatualsizechange_rpc().width(),
+ message->rendererclient_onvideonatualsizechange_rpc().height());
+ OnVideoNaturalSizeChange(size);
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONVIDEOOPACITYCHANGE:
+ OnVideoOpacityChange(message->boolean_value());
+ break;
+ case pb::RpcMessage::RPC_RC_ONSTATISTICSUPDATE: {
+ DCHECK(message->has_rendererclient_onstatisticsupdate_rpc());
+ auto rpc_message = message->rendererclient_onstatisticsupdate_rpc();
+ PipelineStatistics statistics;
+ statistics.audio_bytes_decoded = rpc_message.audio_bytes_decoded();
+ statistics.video_bytes_decoded = rpc_message.video_bytes_decoded();
+ statistics.video_frames_decoded = rpc_message.video_frames_decoded();
+ statistics.video_frames_dropped = rpc_message.video_frames_dropped();
+ statistics.audio_memory_usage = rpc_message.audio_memory_usage();
+ statistics.video_memory_usage = rpc_message.video_memory_usage();
+ OnStatisticsUpdate(statistics);
+ break;
+ }
+ case pb::RpcMessage::RPC_RC_ONWAITINGFORDECRYPTIONKEY:
+ OnWaiting();
+ break;
+
+ default:
+ VLOG(1) << "Unknown RPC: " << message->proc();
+ }
+ }
+
+ void SendRpcAcquireRenderer() {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(RpcBroker::kAcquireRendererHandle);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER);
+ rpc->set_integer_value(rpc_handle_);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void SendRpcInitialize() {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_R_INITIALIZE);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void SendRpcSetPlaybackRate(double playback_rate) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_R_SETPLAYBACKRATE);
+ rpc->set_double_value(playback_rate);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void SendRpcFlushUntil(uint32_t audio_count, uint32_t video_count) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_R_FLUSHUNTIL);
+ pb::RendererFlushUntil* message = rpc->mutable_renderer_flushuntil_rpc();
+ message->set_audio_count(audio_count);
+ message->set_video_count(video_count);
+ message->set_callback_handle(rpc_handle_);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void SendRpcStartPlayingFrom(base::TimeDelta time) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_R_STARTPLAYINGFROM);
+ rpc->set_integer64_value(time.InMicroseconds());
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void SendRpcSetVolume(float volume) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_R_SETVOLUME);
+ rpc->set_double_value(volume);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ private:
+ RpcBroker* const rpc_broker_;
+ const int rpc_handle_;
+ const int remote_handle_;
+};
+
+class ReceiverTest : public ::testing::Test {
+ public:
+ ReceiverTest() = default;
+
+ void SetUp() override {
+ mock_controller_ = MockReceiverController::GetInstance();
+ mock_controller_->Initialize(
+ mock_controller_->mock_remotee()->BindNewPipeAndPassRemote());
+ mock_remotee_ = mock_controller_->mock_remotee();
+
+ rpc_broker_ = mock_controller_->rpc_broker();
+ receiver_renderer_handle_ = rpc_broker_->GetUniqueHandle();
+
+ mock_sender_ = std::make_unique<StrictMock<MockSender>>(
+ rpc_broker_, receiver_renderer_handle_);
+
+ rpc_broker_->RegisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle,
+ base::BindRepeating(&ReceiverTest::OnReceivedRpc,
+ weak_factory_.GetWeakPtr()));
+ }
+
+ void TearDown() override {
+ rpc_broker_->UnregisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle);
+ }
+
+ void OnReceivedRpc(std::unique_ptr<media::remoting::pb::RpcMessage> message) {
+ DCHECK(message);
+ EXPECT_EQ(message->proc(),
+ media::remoting::pb::RpcMessage::RPC_ACQUIRE_RENDERER);
+ OnAcquireRenderer(std::move(message));
+ }
+
+ void OnAcquireRenderer(
+ std::unique_ptr<media::remoting::pb::RpcMessage> message) {
+ DCHECK(message->has_integer_value());
+ DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+
+ if (sender_renderer_handle_ == RpcBroker::kInvalidHandle) {
+ sender_renderer_handle_ = message->integer_value();
+ SetRemoteHandle();
+ }
+ }
+
+ void OnAcquireRendererDone(int receiver_renderer_handle) {
+ DVLOG(3) << __func__
+ << ": Issues RPC_ACQUIRE_RENDERER_DONE RPC message. remote_handle="
+ << sender_renderer_handle_
+ << " rpc_handle=" << receiver_renderer_handle;
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(sender_renderer_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc->set_integer_value(receiver_renderer_handle);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void CreateReceiver() {
+ auto renderer = std::make_unique<NiceMock<MockRenderer>>();
+ mock_renderer_ = renderer.get();
+ receiver_ = std::make_unique<Receiver>(
+ receiver_renderer_handle_, sender_renderer_handle_, mock_controller_,
+ base::ThreadTaskRunnerHandle::Get(), std::move(renderer),
+ base::BindOnce(&ReceiverTest::OnAcquireRendererDone,
+ weak_factory_.GetWeakPtr()));
+ }
+
+ void SetRemoteHandle() {
+ if (!receiver_)
+ return;
+ receiver_->SetRemoteHandle(sender_renderer_handle_);
+ }
+
+ void InitializeReceiver() {
+ receiver_->Initialize(&mock_media_resource_, nullptr,
+ base::BindOnce(&ReceiverTest::OnRendererInitialized,
+ weak_factory_.GetWeakPtr()));
+ }
+
+ MOCK_METHOD(void, OnRendererInitialized, (PipelineStatus));
+
+ base::test::TaskEnvironment task_environment_;
+
+ int sender_renderer_handle_ = RpcBroker::kInvalidHandle;
+ int receiver_renderer_handle_ = RpcBroker::kInvalidHandle;
+
+ MockMediaResource mock_media_resource_;
+ MockRenderer* mock_renderer_;
+ std::unique_ptr<MockSender> mock_sender_;
+
+ RpcBroker* rpc_broker_;
+ MockRemotee* mock_remotee_;
+ MockReceiverController* mock_controller_;
+ std::unique_ptr<Receiver> receiver_;
+
+ base::WeakPtrFactory<ReceiverTest> weak_factory_{this};
+};
+
+TEST_F(ReceiverTest, AcquireRendererBeforeCreateReceiver) {
+ mock_sender_->SendRpcAcquireRenderer();
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ CreateReceiver();
+ task_environment_.RunUntilIdle();
+}
+
+TEST_F(ReceiverTest, AcquireRendererAfterCreateReceiver) {
+ CreateReceiver();
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ mock_sender_->SendRpcAcquireRenderer();
+ task_environment_.RunUntilIdle();
+}
+
+// |Receiver::Initialize| will be called by the local pipeline, and the
+// |Receiver::RpcInitialize| will be called once it received the
+// RPC_R_INITIALIZE messages, so these two initialization functions are possible
+// to be called in difference orders.
+//
+// Call |Receiver::Initialize| first, then send RPC_R_INITIALIZE.
+TEST_F(ReceiverTest, InitializeBeforeRpcInitialize) {
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ mock_sender_->SendRpcAcquireRenderer();
+ CreateReceiver();
+
+ EXPECT_CALL(*mock_renderer_,
+ OnInitialize(&mock_media_resource_, receiver_.get(), _))
+ .WillOnce(RunOnceCallback<2>(PipelineStatus::PIPELINE_OK));
+ EXPECT_CALL(*this, OnRendererInitialized(PipelineStatus::PIPELINE_OK))
+ .Times(1);
+ EXPECT_CALL(*mock_sender_, InitializeCallback(true)).Times(1);
+
+ InitializeReceiver();
+ mock_sender_->SendRpcInitialize();
+ task_environment_.RunUntilIdle();
+}
+
+// Send RPC_R_INITIALIZE first, then call |Receiver::Initialize|.
+TEST_F(ReceiverTest, InitializeAfterRpcInitialize) {
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ mock_sender_->SendRpcAcquireRenderer();
+ CreateReceiver();
+
+ EXPECT_CALL(*mock_renderer_,
+ OnInitialize(&mock_media_resource_, receiver_.get(), _))
+ .WillOnce(RunOnceCallback<2>(PipelineStatus::PIPELINE_OK));
+ EXPECT_CALL(*this, OnRendererInitialized(PipelineStatus::PIPELINE_OK))
+ .Times(1);
+ EXPECT_CALL(*mock_sender_, InitializeCallback(true)).Times(1);
+
+ mock_sender_->SendRpcInitialize();
+ InitializeReceiver();
+ task_environment_.RunUntilIdle();
+}
+
+TEST_F(ReceiverTest, RpcRendererMessages) {
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ mock_sender_->SendRpcAcquireRenderer();
+ CreateReceiver();
+ mock_sender_->SendRpcInitialize();
+ InitializeReceiver();
+ task_environment_.RunUntilIdle();
+
+ // SetVolume
+ const float volume = 0.5;
+ EXPECT_CALL(*mock_renderer_, SetVolume(volume)).Times(1);
+ mock_sender_->SendRpcSetVolume(volume);
+ task_environment_.RunUntilIdle();
+
+ EXPECT_CALL(*mock_sender_, OnTimeUpdate(_, _)).Times(AtLeast(1));
+
+ // SetPlaybackRate
+ const double playback_rate = 1.2;
+ EXPECT_CALL(*mock_renderer_, SetPlaybackRate(playback_rate)).Times(1);
+ mock_sender_->SendRpcSetPlaybackRate(playback_rate);
+ task_environment_.RunUntilIdle();
+
+ // Flush
+ const uint32_t flush_audio_count = 10;
+ const uint32_t flush_video_count = 20;
+ EXPECT_CALL(*mock_renderer_, OnFlush(_)).WillOnce(RunOnceCallback<0>());
+ EXPECT_CALL(*mock_sender_, FlushUntilCallback()).Times(1);
+ mock_sender_->SendRpcFlushUntil(flush_audio_count, flush_video_count);
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(flush_audio_count, mock_remotee_->flush_audio_count());
+ EXPECT_EQ(flush_video_count, mock_remotee_->flush_video_count());
+
+ // StartPlayingFrom
+ const base::TimeDelta time = base::TimeDelta::FromSeconds(100);
+ EXPECT_CALL(*mock_renderer_, StartPlayingFrom(time)).Times(1);
+ mock_sender_->SendRpcStartPlayingFrom(time);
+ task_environment_.RunUntilIdle();
+}
+
+TEST_F(ReceiverTest, RendererClientInterface) {
+ EXPECT_CALL(*mock_sender_, AcquireRendererDone()).Times(1);
+ mock_sender_->SendRpcAcquireRenderer();
+ CreateReceiver();
+ mock_sender_->SendRpcInitialize();
+ InitializeReceiver();
+ task_environment_.RunUntilIdle();
+
+ // OnBufferingStateChange
+ EXPECT_CALL(*mock_sender_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH))
+ .Times(1);
+ receiver_->OnBufferingStateChange(BUFFERING_HAVE_ENOUGH,
+ BUFFERING_CHANGE_REASON_UNKNOWN);
+ task_environment_.RunUntilIdle();
+
+ // OnEnded
+ EXPECT_CALL(*mock_sender_, OnEnded()).Times(1);
+ receiver_->OnEnded();
+ task_environment_.RunUntilIdle();
+
+ // OnError
+ EXPECT_CALL(*mock_sender_, OnFatalError()).Times(1);
+ receiver_->OnError(PipelineStatus::AUDIO_RENDERER_ERROR);
+ task_environment_.RunUntilIdle();
+
+ // OnAudioConfigChange
+ const auto kNewAudioConfig = TestAudioConfig::Normal();
+ EXPECT_CALL(*mock_sender_,
+ OnAudioConfigChange(DecoderConfigEq(kNewAudioConfig)))
+ .Times(1);
+ receiver_->OnAudioConfigChange(kNewAudioConfig);
+ task_environment_.RunUntilIdle();
+
+ // OnVideoConfigChange
+ const auto kNewVideoConfig = TestVideoConfig::Normal();
+ EXPECT_CALL(*mock_sender_,
+ OnVideoConfigChange(DecoderConfigEq(kNewVideoConfig)))
+ .Times(1);
+ receiver_->OnVideoConfigChange(kNewVideoConfig);
+ task_environment_.RunUntilIdle();
+
+ // OnVideoNaturalSizeChange
+ const gfx::Size size(100, 200);
+ EXPECT_CALL(*mock_sender_, OnVideoNaturalSizeChange(size)).Times(1);
+ receiver_->OnVideoNaturalSizeChange(size);
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(size, mock_remotee_->changed_size());
+
+ // OnVideoOpacityChange
+ const bool opaque = true;
+ EXPECT_CALL(*mock_sender_, OnVideoOpacityChange(opaque)).Times(1);
+ receiver_->OnVideoOpacityChange(opaque);
+ task_environment_.RunUntilIdle();
+
+ // OnStatisticsUpdate
+ PipelineStatistics statistics;
+ statistics.audio_bytes_decoded = 100;
+ statistics.video_bytes_decoded = 200;
+ statistics.video_frames_decoded = 300;
+ statistics.video_frames_dropped = 400;
+ statistics.audio_memory_usage = 500;
+ statistics.video_memory_usage = 600;
+ EXPECT_CALL(*mock_sender_, OnStatisticsUpdate(statistics)).Times(1);
+ receiver_->OnStatisticsUpdate(statistics);
+ task_environment_.RunUntilIdle();
+
+ // OnWaiting
+ EXPECT_CALL(*mock_sender_, OnWaiting()).Times(1);
+ receiver_->OnWaiting(WaitingReason::kNoDecryptionKey);
+ task_environment_.RunUntilIdle();
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/remoting_constants.h b/chromium/media/remoting/remoting_constants.h
new file mode 100644
index 00000000000..4dd35dd21f5
--- /dev/null
+++ b/chromium/media/remoting/remoting_constants.h
@@ -0,0 +1,18 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_REMOTING_REMOTING_CONSTANTS_H_
+#define MEDIA_REMOTING_REMOTING_CONSTANTS_H_
+
+namespace media {
+namespace remoting {
+
+// The src attribute for remoting media should use the URL with this scheme.
+// The URL format is "media-remoting:<id>", e.g. "media-remoting:test".
+constexpr char kRemotingScheme[] = "media-remoting";
+
+} // namespace remoting
+} // namespace media
+
+#endif // MEDIA_REMOTING_REMOTING_CONSTANTS_H_
diff --git a/chromium/media/remoting/remoting_renderer_factory.cc b/chromium/media/remoting/remoting_renderer_factory.cc
new file mode 100644
index 00000000000..ea3051e715b
--- /dev/null
+++ b/chromium/media/remoting/remoting_renderer_factory.cc
@@ -0,0 +1,122 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/remoting_renderer_factory.h"
+
+#include "media/base/demuxer.h"
+#include "media/remoting/receiver.h"
+#include "media/remoting/receiver_controller.h"
+#include "media/remoting/stream_provider.h"
+
+namespace media {
+namespace remoting {
+
+RemotingRendererFactory::RemotingRendererFactory(
+ mojo::PendingRemote<mojom::Remotee> remotee,
+ std::unique_ptr<RendererFactory> renderer_factory,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner)
+ : receiver_controller_(ReceiverController::GetInstance()),
+ rpc_broker_(receiver_controller_->rpc_broker()),
+ renderer_handle_(rpc_broker_->GetUniqueHandle()),
+ waiting_for_remote_handle_receiver_(nullptr),
+ real_renderer_factory_(std::move(renderer_factory)),
+ media_task_runner_(media_task_runner) {
+ DVLOG(2) << __func__;
+ DCHECK(receiver_controller_);
+
+ // Register the callback to listen RPC_ACQUIRE_RENDERER message.
+ rpc_broker_->RegisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle,
+ base::BindRepeating(&RemotingRendererFactory::OnAcquireRenderer,
+ weak_factory_.GetWeakPtr()));
+ receiver_controller_->Initialize(std::move(remotee));
+}
+
+RemotingRendererFactory::~RemotingRendererFactory() {
+ rpc_broker_->UnregisterMessageReceiverCallback(
+ RpcBroker::kAcquireRendererHandle);
+}
+
+std::unique_ptr<Renderer> RemotingRendererFactory::CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& worker_task_runner,
+ AudioRendererSink* audio_renderer_sink,
+ VideoRendererSink* video_renderer_sink,
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) {
+ DVLOG(2) << __func__;
+
+ auto receiver = std::make_unique<Receiver>(
+ renderer_handle_, remote_renderer_handle_, receiver_controller_,
+ media_task_runner,
+ real_renderer_factory_->CreateRenderer(
+ media_task_runner, worker_task_runner, audio_renderer_sink,
+ video_renderer_sink, request_overlay_info_cb, target_color_space),
+ base::BindOnce(&RemotingRendererFactory::OnAcquireRendererDone,
+ base::Unretained(this)));
+
+ // If we haven't received a RPC_ACQUIRE_RENDERER yet, keep a reference to
+ // |receiver|, and set its remote handle when we get the call to
+ // OnAcquireRenderer().
+ if (remote_renderer_handle_ == RpcBroker::kInvalidHandle)
+ waiting_for_remote_handle_receiver_ = receiver->GetWeakPtr();
+
+ return std::move(receiver);
+}
+
+void RemotingRendererFactory::OnReceivedRpc(
+ std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(message);
+ if (message->proc() == pb::RpcMessage::RPC_ACQUIRE_RENDERER)
+ OnAcquireRenderer(std::move(message));
+ else
+ VLOG(1) << __func__ << ": Unknow RPC message. proc=" << message->proc();
+}
+
+void RemotingRendererFactory::OnAcquireRenderer(
+ std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(message->has_integer_value());
+ DCHECK(message->integer_value() != RpcBroker::kInvalidHandle);
+
+ remote_renderer_handle_ = message->integer_value();
+
+ // If CreateRenderer() was called before we had a valid
+ // |remote_renderer_handle_|, set it on the already created Receiver.
+ if (waiting_for_remote_handle_receiver_) {
+ // |waiting_for_remote_handle_receiver_| is the WeakPtr of the Receiver
+ // instance and should be deref in the media thread.
+ media_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&Receiver::SetRemoteHandle,
+ waiting_for_remote_handle_receiver_,
+ remote_renderer_handle_));
+ }
+}
+
+void RemotingRendererFactory::OnAcquireRendererDone(int receiver_rpc_handle) {
+ // RPC_ACQUIRE_RENDERER_DONE should be sent only once.
+ //
+ // WebMediaPlayerImpl might destroy and re-create the Receiver instance
+ // several times for saving resources. However, RPC_ACQUIRE_RENDERER_DONE
+ // shouldn't be sent multiple times whenever a Receiver instance is created.
+ if (is_acquire_renderer_done_sent_)
+ return;
+
+ DVLOG(3) << __func__
+ << ": Issues RPC_ACQUIRE_RENDERER_DONE RPC message. remote_handle="
+ << remote_renderer_handle_ << " rpc_handle=" << receiver_rpc_handle;
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_renderer_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_RENDERER_DONE);
+ rpc->set_integer_value(receiver_rpc_handle);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+
+ // Once RPC_ACQUIRE_RENDERER_DONE is sent, it implies there is no Receiver
+ // instance that is waiting the remote handle.
+ waiting_for_remote_handle_receiver_ = nullptr;
+
+ is_acquire_renderer_done_sent_ = true;
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/remoting_renderer_factory.h b/chromium/media/remoting/remoting_renderer_factory.h
new file mode 100644
index 00000000000..cf74f57efcb
--- /dev/null
+++ b/chromium/media/remoting/remoting_renderer_factory.h
@@ -0,0 +1,72 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_REMOTING_REMOTING_RENDERER_FACTORY_H_
+#define MEDIA_REMOTING_REMOTING_RENDERER_FACTORY_H_
+
+#include "media/base/renderer_factory.h"
+#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/remoting/rpc_broker.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+
+namespace media {
+namespace remoting {
+
+class Receiver;
+class ReceiverController;
+
+class RemotingRendererFactory : public RendererFactory {
+ public:
+ RemotingRendererFactory(
+ mojo::PendingRemote<mojom::Remotee> remotee,
+ std::unique_ptr<RendererFactory> renderer_factory,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
+ ~RemotingRendererFactory() override;
+
+ // RendererFactory implementation
+ std::unique_ptr<Renderer> CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& worker_task_runner,
+ AudioRendererSink* audio_renderer_sink,
+ VideoRendererSink* video_renderer_sink,
+ RequestOverlayInfoCB request_overlay_info_cb,
+ const gfx::ColorSpace& target_color_space) override;
+
+ private:
+ // Callback function when RPC message is received.
+ void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
+ void OnAcquireRenderer(std::unique_ptr<pb::RpcMessage> message);
+ void OnAcquireRendererDone(int receiver_rpc_handle);
+
+ // Indicates whether RPC_ACQUIRE_RENDERER_DONE is sent or not.
+ bool is_acquire_renderer_done_sent_ = false;
+
+ ReceiverController* receiver_controller_;
+
+ RpcBroker* rpc_broker_; // Outlives this class.
+
+ // The RPC handle used by all Receiver instances created by |this|. Sent only
+ // once to the sender side, through RPC_ACQUIRE_RENDERER_DONE, regardless of
+ // how many times CreateRenderer() is called."
+ const int renderer_handle_ = RpcBroker::kInvalidHandle;
+
+ // The RPC handle of the CourierRenderer on the sender side. Will be received
+ // once, via an RPC_ACQUIRE_RENDERER message"
+ int remote_renderer_handle_ = RpcBroker::kInvalidHandle;
+
+ // Used to set remote handle if receiving RPC_ACQUIRE_RENDERER after
+ // CreateRenderer() is called.
+ base::WeakPtr<Receiver> waiting_for_remote_handle_receiver_;
+ std::unique_ptr<RendererFactory> real_renderer_factory_;
+
+ // Used to instantiate |receiver_|.
+ const scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+
+ base::WeakPtrFactory<RemotingRendererFactory> weak_factory_{this};
+};
+
+} // namespace remoting
+} // namespace media
+
+#endif // MEDIA_REMOTING_REMOTING_RENDERER_FACTORY_H_
diff --git a/chromium/media/remoting/rpc_broker.h b/chromium/media/remoting/rpc_broker.h
index 280d63fa112..912e3b61ad4 100644
--- a/chromium/media/remoting/rpc_broker.h
+++ b/chromium/media/remoting/rpc_broker.h
@@ -46,6 +46,7 @@ class RpcBroker {
// Get unique handle value (larger than 0) for RPC message handles.
int GetUniqueHandle();
+ // TODO(chkuo): Change the parameter to accept const ref of RpcMessage.
using ReceiveMessageCallback =
base::RepeatingCallback<void(std::unique_ptr<pb::RpcMessage>)>;
// Register a component to receive messages via the given
@@ -77,10 +78,11 @@ class RpcBroker {
// Predefined handle value for RPC messages related to initialization (before
// the receiver handle(s) are known).
- static constexpr int kAcquireHandle = 0;
+ static constexpr int kAcquireRendererHandle = 0;
+ static constexpr int kAcquireDemuxerHandle = 1;
// The first handle to return from GetUniqueHandle().
- static constexpr int kFirstHandle = 1;
+ static constexpr int kFirstHandle = 100;
private:
// Checks that all method calls occur on the same thread.
diff --git a/chromium/media/remoting/stream_provider.cc b/chromium/media/remoting/stream_provider.cc
index 7ff69c808c5..198cf6c3f14 100644
--- a/chromium/media/remoting/stream_provider.cc
+++ b/chromium/media/remoting/stream_provider.cc
@@ -3,16 +3,23 @@
// found in the LICENSE file.
#include "media/remoting/stream_provider.h"
+#include <vector>
#include "base/bind.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/logging.h"
+#include "base/single_thread_task_runner.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_transformation.h"
+#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/remoting/proto_enum_utils.h"
#include "media/remoting/proto_utils.h"
+#include "media/remoting/receiver_controller.h"
+#include "media/remoting/rpc_broker.h"
namespace media {
namespace remoting {
@@ -22,131 +29,140 @@ namespace {
constexpr int kNumFramesInEachReadUntil = 10;
}
-// An implementation of media::DemuxerStream on Media Remoting receiver.
-// Receives data from mojo data pipe, and returns one frame or/and status when
-// Read() is called.
-class MediaStream final : public DemuxerStream {
- public:
- MediaStream(RpcBroker* rpc_broker,
- Type type,
- int remote_handle,
- base::OnceClosure error_callback);
- ~MediaStream() override;
-
- // DemuxerStream implementation.
- void Read(ReadCB read_cb) override;
- bool IsReadPending() const override;
- AudioDecoderConfig audio_decoder_config() override;
- VideoDecoderConfig video_decoder_config() override;
- DemuxerStream::Type type() const override;
- Liveness liveness() const override;
- bool SupportsConfigChanges() override;
-
- void Initialize(base::OnceClosure init_done_cb);
- void FlushUntil(int count);
- void AppendBuffer(scoped_refptr<DecoderBuffer> buffer);
-
- private:
- // RPC messages handlers.
- void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
- void OnInitializeCallback(std::unique_ptr<pb::RpcMessage> message);
- void OnReadUntilCallback(std::unique_ptr<pb::RpcMessage> message);
-
- // Issues the ReadUntil RPC message when read is pending and buffer is empty.
- void SendReadUntil();
-
- // Run and reset the read callback.
- void CompleteRead(DemuxerStream::Status status);
-
- // Update the audio/video decoder config When config changes in the mid
- // stream, the new config will be stored in
- // |next_audio/video_decoder_config_|. Old config will be droped when all
- // associated frames are consumed.
- void UpdateConfig(const pb::AudioDecoderConfig* audio_message,
- const pb::VideoDecoderConfig* video_message);
-
- // Called when any error occurs.
- void OnError(const std::string& error);
-
- RpcBroker* const rpc_broker_; // Outlives this class.
- const Type type_;
- const int remote_handle_;
- const int rpc_handle_;
-
- // Set when Initialize() is called, and will be run after initialization is
- // done.
- base::OnceClosure init_done_callback_;
-
- // The read until count in the last ReadUntil RPC message.
- int last_read_until_count_ = 0;
-
- // Indicates whether Audio/VideoDecoderConfig changed and the frames with the
- // old config are not yet consumed. The new config is stored in the end of
- // |audio/video_decoder_config_|;
- bool config_changed_ = false;
-
- // Indicates whether a ReadUntil RPC message was sent without receiving the
- // ReadUntilCallback message yet.
- bool read_until_sent_ = false;
-
- // Set when Read() is called. Run only once when read completes.
- ReadCB read_complete_callback_;
-
- base::OnceClosure error_callback_; // Called when first error occurs.
-
- base::circular_deque<scoped_refptr<DecoderBuffer>> buffers_;
-
- // Current audio/video config.
- AudioDecoderConfig audio_decoder_config_;
- VideoDecoderConfig video_decoder_config_;
-
- // Stores the new auido/video config when config changes.
- AudioDecoderConfig next_audio_decoder_config_;
- VideoDecoderConfig next_video_decoder_config_;
-
- base::WeakPtrFactory<MediaStream> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(MediaStream);
-};
-
-MediaStream::MediaStream(RpcBroker* rpc_broker,
- Type type,
- int remote_handle,
- base::OnceClosure error_callback)
- : rpc_broker_(rpc_broker),
+// static
+void StreamProvider::MediaStream::CreateOnMainThread(
+ RpcBroker* rpc_broker,
+ Type type,
+ int32_t handle,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ base::OnceCallback<void(MediaStream::UniquePtr)> callback) {
+ MediaStream::UniquePtr stream(
+ new MediaStream(rpc_broker, type, handle, media_task_runner),
+ &DestructionHelper);
+ std::move(callback).Run(std::move(stream));
+}
+
+// static
+void StreamProvider::MediaStream::DestructionHelper(MediaStream* stream) {
+ stream->Destroy();
+}
+
+StreamProvider::MediaStream::MediaStream(
+ RpcBroker* rpc_broker,
+ Type type,
+ int remote_handle,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner)
+ : main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ media_task_runner_(media_task_runner),
+ rpc_broker_(rpc_broker),
type_(type),
remote_handle_(remote_handle),
- rpc_handle_(rpc_broker_->GetUniqueHandle()),
- error_callback_(std::move(error_callback)) {
+ rpc_handle_(rpc_broker_->GetUniqueHandle()) {
DCHECK(remote_handle_ != RpcBroker::kInvalidHandle);
- rpc_broker_->RegisterMessageReceiverCallback(
- rpc_handle_, base::BindRepeating(&MediaStream::OnReceivedRpc,
- weak_factory_.GetWeakPtr()));
+
+ media_weak_this_ = media_weak_factory_.GetWeakPtr();
+
+ const RpcBroker::ReceiveMessageCallback receive_callback =
+ BindToLoop(media_task_runner_,
+ BindRepeating(&MediaStream::OnReceivedRpc, media_weak_this_));
+ rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
}
-MediaStream::~MediaStream() {
+StreamProvider::MediaStream::~MediaStream() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
rpc_broker_->UnregisterMessageReceiverCallback(rpc_handle_);
}
-void MediaStream::Initialize(base::OnceClosure init_done_cb) {
- DCHECK(init_done_cb);
- if (!init_done_callback_.is_null()) {
+void StreamProvider::MediaStream::Destroy() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ // Invalid weak pointers to prevent |this| from receiving RPC calls on the
+ // media thread.
+ media_weak_factory_.InvalidateWeakPtrs();
+
+ // Unbind all mojo pipes and bindings.
+ receiver_.reset();
+ decoder_buffer_reader_.reset();
+
+ // After invalidating all weak ptrs of |media_weak_factory_|, MediaStream
+ // won't be access anymore, so using |this| here is safe.
+ main_task_runner_->DeleteSoon(FROM_HERE, this);
+}
+
+void StreamProvider::MediaStream::SendRpcMessageOnMainThread(
+ std::unique_ptr<pb::RpcMessage> message) {
+ // |rpc_broker_| is owned by |receiver_controller_| which is a singleton per
+ // process, so it's safe to use Unretained() here.
+ main_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&RpcBroker::SendMessageToRemote,
+ base::Unretained(rpc_broker_), std::move(message)));
+}
+
+void StreamProvider::MediaStream::Initialize(
+ base::OnceClosure init_done_callback) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(init_done_callback);
+
+ if (init_done_callback_) {
OnError("Duplicate initialization");
return;
}
- init_done_callback_ = std::move(init_done_cb);
- DVLOG(3) << __func__ << "Issues RpcMessage::RPC_DS_INITIALIZE with "
- << "remote_handle=" << remote_handle_
- << " rpc_handle=" << rpc_handle_;
- std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
+ init_done_callback_ = std::move(init_done_callback);
+
+ auto rpc = std::make_unique<pb::RpcMessage>();
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE);
rpc->set_integer_value(rpc_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
+}
+
+void StreamProvider::MediaStream::InitializeDataPipe(
+ mojo::ScopedDataPipeConsumerHandle data_pipe) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ decoder_buffer_reader_ =
+ std::make_unique<MojoDecoderBufferReader>(std::move(data_pipe));
+ CompleteInitialize();
+}
+
+void StreamProvider::MediaStream::ReceiveFrame(uint32_t count,
+ mojom::DecoderBufferPtr buffer) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(decoder_buffer_reader_);
+
+ auto callback = BindToCurrentLoop(
+ base::BindOnce(&MediaStream::AppendBuffer, media_weak_this_, count));
+ decoder_buffer_reader_->ReadDecoderBuffer(std::move(buffer),
+ std::move(callback));
+}
+
+void StreamProvider::MediaStream::FlushUntil(uint32_t count) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (count < current_frame_count_)
+ return;
+
+ uint32_t buffers_to_erase = count - current_frame_count_;
+
+ if (buffers_to_erase > buffers_.size()) {
+ buffers_.clear();
+ } else {
+ buffers_.erase(buffers_.begin(), buffers_.begin() + buffers_to_erase);
+ }
+
+ current_frame_count_ = count;
+
+ if (!read_complete_callback_.is_null())
+ CompleteRead(DemuxerStream::kAborted);
+
+ read_until_sent_ = false;
}
-void MediaStream::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
+void StreamProvider::MediaStream::OnReceivedRpc(
+ std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(message->handle() == rpc_handle_);
switch (message->proc()) {
@@ -161,24 +177,21 @@ void MediaStream::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
}
}
-void MediaStream::OnInitializeCallback(
+void StreamProvider::MediaStream::OnInitializeCallback(
std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << "Receives RPC_DS_INITIALIZE_CALLBACK message.";
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
const pb::DemuxerStreamInitializeCallback callback_message =
message->demuxerstream_initializecb_rpc();
if (callback_message.type() != type_) {
OnError("Wrong type");
return;
}
+
if ((type_ == DemuxerStream::AUDIO &&
audio_decoder_config_.IsValidConfig()) ||
(type_ == DemuxerStream::VIDEO &&
video_decoder_config_.IsValidConfig())) {
- OnError("Duplicate Iniitialize");
- return;
- }
- if (init_done_callback_.is_null()) {
- OnError("Iniitialize callback missing");
+ OnError("Duplicate initialization");
return;
}
@@ -186,21 +199,41 @@ void MediaStream::OnInitializeCallback(
callback_message.has_audio_decoder_config()) {
const pb::AudioDecoderConfig audio_message =
callback_message.audio_decoder_config();
- UpdateConfig(&audio_message, nullptr);
+ UpdateAudioConfig(audio_message);
} else if (type_ == DemuxerStream::VIDEO &&
callback_message.has_video_decoder_config()) {
const pb::VideoDecoderConfig video_message =
callback_message.video_decoder_config();
- UpdateConfig(nullptr, &video_message);
+ UpdateVideoConfig(video_message);
} else {
- OnError("config missing");
+ OnError("Config missing");
return;
}
+
+ rpc_initialized_ = true;
+ CompleteInitialize();
+}
+
+void StreamProvider::MediaStream::CompleteInitialize() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ // Initialization finished when received RPC_DS_INITIALIZE_CALLBACK and
+ // |decoder_buffer_reader_| is created.
+ if (!rpc_initialized_ || !decoder_buffer_reader_)
+ return;
+
+ if (!init_done_callback_) {
+ OnError("Initialize callback missing");
+ return;
+ }
+
std::move(init_done_callback_).Run();
}
-void MediaStream::OnReadUntilCallback(std::unique_ptr<pb::RpcMessage> message) {
- DVLOG(3) << __func__ << ": Receives RPC_DS_READUNTIL_CALLBACK message.";
+void StreamProvider::MediaStream::OnReadUntilCallback(
+ std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
if (!read_until_sent_) {
OnError("Unexpected ReadUntilCallback");
return;
@@ -208,98 +241,90 @@ void MediaStream::OnReadUntilCallback(std::unique_ptr<pb::RpcMessage> message) {
read_until_sent_ = false;
const pb::DemuxerStreamReadUntilCallback callback_message =
message->demuxerstream_readuntilcb_rpc();
- last_read_until_count_ = callback_message.count();
+ total_received_frame_count_ = callback_message.count();
+
if (ToDemuxerStreamStatus(callback_message.status()) == kConfigChanged) {
config_changed_ = true;
+
if (callback_message.has_audio_decoder_config()) {
const pb::AudioDecoderConfig audio_message =
callback_message.audio_decoder_config();
- UpdateConfig(&audio_message, nullptr);
+ UpdateAudioConfig(audio_message);
}
+
if (callback_message.has_video_decoder_config()) {
const pb::VideoDecoderConfig video_message =
callback_message.video_decoder_config();
- UpdateConfig(nullptr, &video_message);
+ UpdateVideoConfig(video_message);
}
+
if (buffers_.empty() && !read_complete_callback_.is_null())
CompleteRead(DemuxerStream::kConfigChanged);
+
return;
}
+
if (buffers_.empty() && !read_complete_callback_.is_null())
SendReadUntil();
}
-void MediaStream::UpdateConfig(const pb::AudioDecoderConfig* audio_message,
- const pb::VideoDecoderConfig* video_message) {
- if (type_ == AUDIO) {
- DCHECK(audio_message && !video_message);
- AudioDecoderConfig audio_config;
- ConvertProtoToAudioDecoderConfig(*audio_message, &audio_config);
- if (!audio_config.IsValidConfig()) {
- OnError("Invalid audio config");
- return;
- }
- if (config_changed_) {
- DCHECK(audio_decoder_config_.IsValidConfig());
- DCHECK(!next_audio_decoder_config_.IsValidConfig());
- next_audio_decoder_config_ = audio_config;
- } else {
- DCHECK(!audio_decoder_config_.IsValidConfig());
- audio_decoder_config_ = audio_config;
- }
- } else if (type_ == VIDEO) {
- DCHECK(video_message && !audio_message);
- VideoDecoderConfig video_config;
- ConvertProtoToVideoDecoderConfig(*video_message, &video_config);
- if (!video_config.IsValidConfig()) {
- OnError("Invalid video config");
- return;
- }
- if (config_changed_) {
- DCHECK(video_decoder_config_.IsValidConfig());
- DCHECK(!next_video_decoder_config_.IsValidConfig());
- next_video_decoder_config_ = video_config;
- } else {
- DCHECK(!video_decoder_config_.IsValidConfig());
- video_decoder_config_ = video_config;
- }
+void StreamProvider::MediaStream::UpdateAudioConfig(
+ const pb::AudioDecoderConfig& audio_message) {
+ DCHECK(type_ == AUDIO);
+ AudioDecoderConfig audio_config;
+ ConvertProtoToAudioDecoderConfig(audio_message, &audio_config);
+ if (!audio_config.IsValidConfig()) {
+ OnError("Invalid audio config");
+ return;
+ }
+ if (config_changed_) {
+ DCHECK(audio_decoder_config_.IsValidConfig());
+ DCHECK(!next_audio_decoder_config_.IsValidConfig());
+ next_audio_decoder_config_ = audio_config;
} else {
- NOTREACHED() << ": Only supports video or audio stream.";
+ DCHECK(!audio_decoder_config_.IsValidConfig());
+ audio_decoder_config_ = audio_config;
}
}
-void MediaStream::SendReadUntil() {
+void StreamProvider::MediaStream::UpdateVideoConfig(
+ const pb::VideoDecoderConfig& video_message) {
+ DCHECK(type_ == VIDEO);
+ VideoDecoderConfig video_config;
+ ConvertProtoToVideoDecoderConfig(video_message, &video_config);
+ if (!video_config.IsValidConfig()) {
+ OnError("Invalid video config");
+ return;
+ }
+ if (config_changed_) {
+ DCHECK(video_decoder_config_.IsValidConfig());
+ DCHECK(!next_video_decoder_config_.IsValidConfig());
+ next_video_decoder_config_ = video_config;
+ } else {
+ DCHECK(!video_decoder_config_.IsValidConfig());
+ video_decoder_config_ = video_config;
+ }
+}
+
+void StreamProvider::MediaStream::SendReadUntil() {
if (read_until_sent_)
return;
- DVLOG(3) << "Issues RPC_DS_READUNTIL RPC message to remote_handle_="
- << remote_handle_ << " with callback handle=" << rpc_handle_
- << " count=" << last_read_until_count_;
std::unique_ptr<pb::RpcMessage> rpc(new pb::RpcMessage());
rpc->set_handle(remote_handle_);
rpc->set_proc(pb::RpcMessage::RPC_DS_READUNTIL);
auto* message = rpc->mutable_demuxerstream_readuntil_rpc();
- last_read_until_count_ += kNumFramesInEachReadUntil;
- message->set_count(last_read_until_count_);
+ message->set_count(total_received_frame_count_ + kNumFramesInEachReadUntil);
message->set_callback_handle(rpc_handle_);
- rpc_broker_->SendMessageToRemote(std::move(rpc));
+ SendRpcMessageOnMainThread(std::move(rpc));
read_until_sent_ = true;
}
-void MediaStream::FlushUntil(int count) {
- while (!buffers_.empty()) {
- buffers_.pop_front();
- }
-
- last_read_until_count_ = count;
- if (!read_complete_callback_.is_null())
- CompleteRead(DemuxerStream::kAborted);
- read_until_sent_ = false;
-}
-
-void MediaStream::Read(ReadCB read_cb) {
+void StreamProvider::MediaStream::Read(ReadCB read_cb) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
DCHECK(read_complete_callback_.is_null());
DCHECK(read_cb);
+
read_complete_callback_ = std::move(read_cb);
if (buffers_.empty() && config_changed_) {
CompleteRead(DemuxerStream::kConfigChanged);
@@ -315,26 +340,21 @@ void MediaStream::Read(ReadCB read_cb) {
CompleteRead(DemuxerStream::kOk);
}
-bool MediaStream::IsReadPending() const {
+bool StreamProvider::MediaStream::IsReadPending() const {
return !read_complete_callback_.is_null();
}
-void MediaStream::CompleteRead(DemuxerStream::Status status) {
- DVLOG(3) << __func__ << ": " << status;
+void StreamProvider::MediaStream::CompleteRead(DemuxerStream::Status status) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
switch (status) {
case DemuxerStream::kConfigChanged:
if (type_ == AUDIO) {
DCHECK(next_audio_decoder_config_.IsValidConfig());
audio_decoder_config_ = next_audio_decoder_config_;
-#if DCHECK_IS_ON()
- next_audio_decoder_config_ = AudioDecoderConfig();
-#endif // DCHECK_IS_ON()
} else {
DCHECK(next_video_decoder_config_.IsValidConfig());
video_decoder_config_ = next_video_decoder_config_;
-#if DCHECK_IS_ON()
- next_video_decoder_config_ = VideoDecoderConfig();
-#endif // DCHECK_IS_ON()
}
config_changed_ = false;
std::move(read_complete_callback_).Run(status, nullptr);
@@ -344,111 +364,263 @@ void MediaStream::CompleteRead(DemuxerStream::Status status) {
std::move(read_complete_callback_).Run(status, nullptr);
return;
case DemuxerStream::kOk:
+ DCHECK(read_complete_callback_);
DCHECK(!buffers_.empty());
+ DCHECK_LT(current_frame_count_, buffered_frame_count_);
scoped_refptr<DecoderBuffer> frame_data = buffers_.front();
buffers_.pop_front();
+ ++current_frame_count_;
std::move(read_complete_callback_).Run(status, frame_data);
return;
}
}
-AudioDecoderConfig MediaStream::audio_decoder_config() {
- DVLOG(3) << __func__;
+AudioDecoderConfig StreamProvider::MediaStream::audio_decoder_config() {
DCHECK(type_ == DemuxerStream::AUDIO);
return audio_decoder_config_;
}
-VideoDecoderConfig MediaStream::video_decoder_config() {
- DVLOG(3) << __func__;
+VideoDecoderConfig StreamProvider::MediaStream::video_decoder_config() {
DCHECK(type_ == DemuxerStream::VIDEO);
return video_decoder_config_;
}
-DemuxerStream::Type MediaStream::type() const {
+DemuxerStream::Type StreamProvider::MediaStream::type() const {
return type_;
}
-DemuxerStream::Liveness MediaStream::liveness() const {
+DemuxerStream::Liveness StreamProvider::MediaStream::liveness() const {
return DemuxerStream::LIVENESS_LIVE;
}
-bool MediaStream::SupportsConfigChanges() {
+bool StreamProvider::MediaStream::SupportsConfigChanges() {
return true;
}
-void MediaStream::AppendBuffer(scoped_refptr<DecoderBuffer> buffer) {
- DVLOG(3) << __func__;
+void StreamProvider::MediaStream::AppendBuffer(
+ uint32_t count,
+ scoped_refptr<DecoderBuffer> buffer) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ // Drop flushed frame.
+ if (count < current_frame_count_)
+ return;
+
+ // Continuity check.
+ DCHECK(buffers_.empty() || buffered_frame_count_ == count);
+
buffers_.push_back(buffer);
+ buffered_frame_count_ = count + 1;
+
if (!read_complete_callback_.is_null())
CompleteRead(DemuxerStream::kOk);
}
-void MediaStream::OnError(const std::string& error) {
- VLOG(1) << __func__ << ": " << error;
- if (error_callback_.is_null())
- return;
- std::move(error_callback_).Run();
+void StreamProvider::MediaStream::OnError(const std::string& error) {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(remote_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_DS_ONERROR);
+ SendRpcMessageOnMainThread(std::move(rpc));
}
-StreamProvider::StreamProvider(RpcBroker* rpc_broker,
- base::OnceClosure error_callback)
- : rpc_broker_(rpc_broker), error_callback_(std::move(error_callback)) {}
+StreamProvider::StreamProvider(
+ ReceiverController* receiver_controller,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner)
+ : main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ media_task_runner_(media_task_runner),
+ receiver_controller_(receiver_controller),
+ rpc_broker_(receiver_controller_->rpc_broker()) {
+ DCHECK(receiver_controller_);
+ DCHECK(rpc_broker_);
+
+ media_weak_this_ = media_weak_factory_.GetWeakPtr();
+
+ auto callback = BindToLoop(
+ media_task_runner_,
+ base::BindRepeating(&StreamProvider::OnReceivedRpc, media_weak_this_));
+ rpc_broker_->RegisterMessageReceiverCallback(RpcBroker::kAcquireDemuxerHandle,
+ callback);
+}
-StreamProvider::~StreamProvider() = default;
+StreamProvider::~StreamProvider() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ rpc_broker_->UnregisterMessageReceiverCallback(
+ RpcBroker::kAcquireDemuxerHandle);
+}
-void StreamProvider::Initialize(int remote_audio_handle,
- int remote_video_handle,
- base::OnceClosure callback) {
- DVLOG(3) << __func__ << ": remote_audio_handle=" << remote_audio_handle
- << " remote_video_handle=" << remote_video_handle;
- if (!init_done_callback_.is_null()) {
- OnError("Duplicate initialization.");
- return;
- }
- if (remote_audio_handle == RpcBroker::kInvalidHandle &&
- remote_video_handle == RpcBroker::kInvalidHandle) {
- OnError("Invalid handle.");
- return;
+std::string StreamProvider::GetDisplayName() const {
+ return "media::remoting::StreamProvider";
+}
+
+void StreamProvider::Initialize(DemuxerHost* host,
+ PipelineStatusCallback status_cb) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ init_done_callback_ = std::move(status_cb);
+ CompleteInitialize();
+}
+
+void StreamProvider::AbortPendingReads() {}
+
+void StreamProvider::StartWaitingForSeek(base::TimeDelta seek_time) {}
+
+void StreamProvider::CancelPendingSeek(base::TimeDelta seek_time) {}
+
+void StreamProvider::Seek(base::TimeDelta time,
+ PipelineStatusCallback seek_cb) {
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(std::move(seek_cb), PipelineStatus::PIPELINE_OK));
+}
+
+void StreamProvider::Stop() {}
+
+base::TimeDelta StreamProvider::GetStartTime() const {
+ return base::TimeDelta();
+}
+
+base::Time StreamProvider::GetTimelineOffset() const {
+ return base::Time();
+}
+
+int64_t StreamProvider::GetMemoryUsage() const {
+ return 0;
+}
+
+base::Optional<container_names::MediaContainerName>
+StreamProvider::GetContainerForMetrics() const {
+ return base::Optional<container_names::MediaContainerName>();
+}
+
+void StreamProvider::OnEnabledAudioTracksChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ std::vector<DemuxerStream*> streams;
+ std::move(change_completed_cb).Run(DemuxerStream::AUDIO, streams);
+ DVLOG(1) << "Track changes are not supported.";
+}
+
+void StreamProvider::OnSelectedVideoTrackChanged(
+ const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) {
+ std::vector<DemuxerStream*> streams;
+ std::move(change_completed_cb).Run(DemuxerStream::VIDEO, streams);
+ DVLOG(1) << "Track changes are not supported.";
+}
+
+void StreamProvider::Destroy() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (init_done_callback_)
+ std::move(init_done_callback_).Run(PIPELINE_ERROR_ABORT);
+
+ // Invalid weak pointers to prevent |this| from receiving RPC calls on the
+ // media thread.
+ media_weak_factory_.InvalidateWeakPtrs();
+
+ audio_stream_.reset();
+ video_stream_.reset();
+
+ // After invalidating all weak ptrs of |media_weak_factory_|, StreamProvider
+ // won't be access anymore, so using |this| here is safe.
+ main_task_runner_->DeleteSoon(FROM_HERE, this);
+}
+
+void StreamProvider::OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message) {
+ switch (message->proc()) {
+ case pb::RpcMessage::RPC_ACQUIRE_DEMUXER:
+ OnAcquireDemuxer(std::move(message));
+ break;
+ default:
+ VLOG(3) << __func__ << "Unknown RPC message.";
}
+}
- init_done_callback_ = std::move(callback);
- if (remote_audio_handle != RpcBroker::kInvalidHandle) {
- audio_stream_.reset(new MediaStream(
- rpc_broker_, DemuxerStream::AUDIO, remote_audio_handle,
- base::BindOnce(&StreamProvider::OnError, weak_factory_.GetWeakPtr(),
- "Media stream error")));
- audio_stream_->Initialize(base::BindOnce(
- &StreamProvider::AudioStreamInitialized, weak_factory_.GetWeakPtr()));
+void StreamProvider::OnAcquireDemuxer(std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(message->has_acquire_demuxer_rpc());
+
+ int32_t audio_demuxer_handle =
+ message->acquire_demuxer_rpc().audio_demuxer_handle();
+ int32_t video_demuxer_handle =
+ message->acquire_demuxer_rpc().video_demuxer_handle();
+ has_audio_ = audio_demuxer_handle != RpcBroker::kInvalidHandle;
+ has_video_ = video_demuxer_handle != RpcBroker::kInvalidHandle;
+
+ DCHECK(has_audio_ || has_video_);
+
+ if (has_audio_) {
+ auto callback = BindToCurrentLoop(base::BindOnce(
+ &StreamProvider::OnAudioStreamCreated, media_weak_this_));
+ main_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&MediaStream::CreateOnMainThread, rpc_broker_,
+ DemuxerStream::AUDIO, audio_demuxer_handle,
+ media_task_runner_, std::move(callback)));
}
- if (remote_video_handle != RpcBroker::kInvalidHandle) {
- video_stream_.reset(new MediaStream(
- rpc_broker_, DemuxerStream::VIDEO, remote_video_handle,
- base::BindOnce(&StreamProvider::OnError, weak_factory_.GetWeakPtr(),
- "Media stream error")));
- video_stream_->Initialize(base::BindOnce(
- &StreamProvider::VideoStreamInitialized, weak_factory_.GetWeakPtr()));
+
+ if (has_video_) {
+ auto callback = BindToCurrentLoop(base::BindOnce(
+ &StreamProvider::OnVideoStreamCreated, media_weak_this_));
+ main_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&MediaStream::CreateOnMainThread, rpc_broker_,
+ DemuxerStream::VIDEO, video_demuxer_handle,
+ media_task_runner_, std::move(callback)));
}
}
-void StreamProvider::OnError(const std::string& error) {
- VLOG(1) << __func__ << ": " << error;
- if (error_callback_.is_null())
+void StreamProvider::OnAudioStreamCreated(MediaStream::UniquePtr stream) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ audio_stream_ = std::move(stream);
+ audio_stream_->Initialize(base::BindOnce(
+ &StreamProvider::OnAudioStreamInitialized, media_weak_this_));
+ InitializeDataPipe();
+}
+
+void StreamProvider::OnVideoStreamCreated(MediaStream::UniquePtr stream) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ video_stream_ = std::move(stream);
+ video_stream_->Initialize(base::BindOnce(
+ &StreamProvider::OnVideoStreamInitialized, media_weak_this_));
+ InitializeDataPipe();
+}
+
+void StreamProvider::InitializeDataPipe() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if ((has_audio_ && !audio_stream_) || (has_video_ && !video_stream_))
return;
- std::move(error_callback_).Run();
+
+ receiver_controller_->StartDataStreams(
+ has_audio_ ? audio_stream_->BindNewPipeAndPassRemote()
+ : mojo::NullRemote(),
+ has_video_ ? video_stream_->BindNewPipeAndPassRemote()
+ : mojo::NullRemote());
}
-void StreamProvider::AudioStreamInitialized() {
- DCHECK(!init_done_callback_.is_null());
+void StreamProvider::OnAudioStreamInitialized() {
audio_stream_initialized_ = true;
- if (video_stream_initialized_ || !video_stream_)
- std::move(init_done_callback_).Run();
+ CompleteInitialize();
}
-void StreamProvider::VideoStreamInitialized() {
- DCHECK(!init_done_callback_.is_null());
+void StreamProvider::OnVideoStreamInitialized() {
video_stream_initialized_ = true;
- if (audio_stream_initialized_ || !audio_stream_)
- std::move(init_done_callback_).Run();
+ CompleteInitialize();
+}
+
+void StreamProvider::CompleteInitialize() {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ // Haven't receive RpcAcquireRenderer message
+ if (!has_audio_ && !has_video_)
+ return;
+
+ if ((has_audio_ && !audio_stream_initialized_) ||
+ (has_video_ && !video_stream_initialized_) || !init_done_callback_)
+ return;
+
+ // |init_done_callback_| should be called on |media_task_runner_|.
+ std::move(init_done_callback_).Run(PipelineStatus::PIPELINE_OK);
}
std::vector<DemuxerStream*> StreamProvider::GetAllStreams() {
@@ -460,25 +632,14 @@ std::vector<DemuxerStream*> StreamProvider::GetAllStreams() {
return streams;
}
-void StreamProvider::AppendBuffer(DemuxerStream::Type type,
- scoped_refptr<DecoderBuffer> buffer) {
- if (type == DemuxerStream::AUDIO)
- audio_stream_->AppendBuffer(buffer);
- else if (type == DemuxerStream::VIDEO)
- video_stream_->AppendBuffer(buffer);
- else
- NOTREACHED() << ": Only supports video or audio stream.";
-}
+} // namespace remoting
+} // namespace media
+
+namespace std {
-void StreamProvider::FlushUntil(DemuxerStream::Type type, int count) {
- DVLOG(3) << __func__ << ": type=" << type << " count=" << count;
- if (type == DemuxerStream::AUDIO)
- audio_stream_->FlushUntil(count);
- else if (type == DemuxerStream::VIDEO)
- video_stream_->FlushUntil(count);
- else
- NOTREACHED() << ": Only supports video or audio stream.";
+void default_delete<media::remoting::StreamProvider>::operator()(
+ media::remoting::StreamProvider* ptr) const {
+ ptr->Destroy();
}
-} // namespace remoting
-} // namespace media
+} // namespace std
diff --git a/chromium/media/remoting/stream_provider.h b/chromium/media/remoting/stream_provider.h
index f6c7802008b..95d2c214279 100644
--- a/chromium/media/remoting/stream_provider.h
+++ b/chromium/media/remoting/stream_provider.h
@@ -5,62 +5,280 @@
#ifndef MEDIA_REMOTING_STREAM_PROVIDER_H_
#define MEDIA_REMOTING_STREAM_PROVIDER_H_
+#include "base/callback_forward.h"
+#include "base/containers/circular_deque.h"
+#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequenced_task_runner_helpers.h"
+#include "base/single_thread_task_runner.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/demuxer.h"
#include "media/base/demuxer_stream.h"
-#include "media/base/media_resource.h"
#include "media/base/video_decoder_config.h"
-#include "media/remoting/rpc_broker.h"
+#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/remoting/media_remoting_rpc.pb.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+#include "mojo/public/cpp/bindings/remote.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+} // namespace base
namespace media {
+
+class MojoDecoderBufferReader;
+
namespace remoting {
-class MediaStream;
+class ReceiverController;
+class RpcBroker;
// The media stream provider for Media Remoting receiver.
-class StreamProvider final : public MediaResource {
+class StreamProvider final : public Demuxer {
public:
- StreamProvider(RpcBroker* rpc_broker, base::OnceClosure error_callback);
+ StreamProvider(
+ ReceiverController* receiver_controller,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
- ~StreamProvider() override;
-
- // MediaResource implemenation.
+ // Demuxer implementation.
std::vector<DemuxerStream*> GetAllStreams() override;
+ std::string GetDisplayName() const override;
+ void Initialize(DemuxerHost* host, PipelineStatusCallback status_cb) override;
+ void AbortPendingReads() override;
+ void StartWaitingForSeek(base::TimeDelta seek_time) override;
+ void CancelPendingSeek(base::TimeDelta seek_time) override;
+ void Seek(base::TimeDelta time, PipelineStatusCallback status_cb) override;
+ void Stop() override;
+ base::TimeDelta GetStartTime() const override;
+ base::Time GetTimelineOffset() const override;
+ int64_t GetMemoryUsage() const override;
+ base::Optional<container_names::MediaContainerName> GetContainerForMetrics()
+ const override;
+ void OnEnabledAudioTracksChanged(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
+ void OnSelectedVideoTrackChanged(const std::vector<MediaTrack::Id>& track_ids,
+ base::TimeDelta curr_time,
+ TrackChangeCB change_completed_cb) override;
- void Initialize(int remote_audio_handle,
- int remote_video_handle,
- base::OnceClosure callback);
- void AppendBuffer(DemuxerStream::Type type,
- scoped_refptr<DecoderBuffer> buffer);
- void FlushUntil(DemuxerStream::Type type, int count);
+ protected:
+ // Deletion is only allowed via Destroy().
+ ~StreamProvider() override;
private:
- // Called when audio/video stream is initialized.
- void AudioStreamInitialized();
- void VideoStreamInitialized();
+ // An implementation of media::DemuxerStream on Media Remoting receiver.
+ // Receives data from mojo data pipe, and returns one frame or/and status when
+ // Read() is called.
+ class MediaStream final : public DemuxerStream,
+ public mojom::RemotingDataStreamReceiver {
+ public:
+ using UniquePtr =
+ std::unique_ptr<MediaStream, std::function<void(MediaStream*)>>;
- // Called when any error occurs.
- void OnError(const std::string& error);
+ // MediaStream should be created on the main thread to be able to get unique
+ // handle ID from |rpc_broker_|.
+ static void CreateOnMainThread(
+ RpcBroker* rpc_broker,
+ Type type,
+ int32_t handle,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ base::OnceCallback<void(MediaStream::UniquePtr)> callback);
- RpcBroker* const rpc_broker_; // Outlives this class.
- std::unique_ptr<MediaStream> video_stream_;
- std::unique_ptr<MediaStream> audio_stream_;
- bool audio_stream_initialized_ = false;
- bool video_stream_initialized_ = false;
+ // In order to destroy members in the right thread, MediaStream has to use
+ // DestructionHelper() to destroy itself.
+ static void DestructionHelper(MediaStream* stream);
- // Set when Initialize() is called, and will run when both video and audio
- // streams are initialized or error occurs.
- base::OnceClosure init_done_callback_;
+ MediaStream(
+ RpcBroker* rpc_broker,
+ Type type,
+ int32_t remote_handle,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
+
+ // DemuxerStream implementation.
+ void Read(ReadCB read_cb) override;
+ bool IsReadPending() const override;
+ AudioDecoderConfig audio_decoder_config() override;
+ VideoDecoderConfig video_decoder_config() override;
+ DemuxerStream::Type type() const override;
+ Liveness liveness() const override;
+ bool SupportsConfigChanges() override;
+
+ void Initialize(base::OnceClosure init_done_cb);
+
+ mojo::PendingRemote<mojom::RemotingDataStreamReceiver>
+ BindNewPipeAndPassRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ private:
+ friend class base::DeleteHelper<MediaStream>; // For using DeleteSoon().
+ // For testing.
+ friend class StreamProviderTest;
+
+ // Prevent from unique_ptr using ~MediaStream() to destroy MediaStream
+ // instances. Use DestructionHelper() as the custom deleter with unique_ptr
+ // to destroy MediaStream instances.
+ ~MediaStream() override;
+
+ void Destroy();
+
+ // Send RPC message on |main_task_runner_|.
+ void SendRpcMessageOnMainThread(std::unique_ptr<pb::RpcMessage> message);
+
+ // mojom::RemotingDataStreamReceiver implementation.
+ void InitializeDataPipe(
+ mojo::ScopedDataPipeConsumerHandle data_pipe) override;
+ void ReceiveFrame(uint32_t count, mojom::DecoderBufferPtr buffer) override;
+ void FlushUntil(uint32_t count) override;
+
+ // RPC messages handlers.
+ void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
+ void OnInitializeCallback(std::unique_ptr<pb::RpcMessage> message);
+ void OnReadUntilCallback(std::unique_ptr<pb::RpcMessage> message);
+
+ // Issues the ReadUntil RPC message when read is pending and buffer is
+ // empty.
+ void SendReadUntil();
+
+ // Run |init_done_callback_| when MojoDecoderBufferReader is created and
+ // received RPC_DS_INITIALIZE_CALLBACK
+ void CompleteInitialize();
+
+ // Append a frame into |buffers_|.
+ void AppendBuffer(uint32_t count, scoped_refptr<DecoderBuffer> buffer);
+
+ // Run and reset the read callback.
+ void CompleteRead(DemuxerStream::Status status);
+
+ // Update the audio/video decoder config. When config changes in the mid
+ // stream, the new config will be stored in |next_audio_decoder_config_|.
+ // Old config will be dropped when all associated frames are consumed.
+ void UpdateAudioConfig(const pb::AudioDecoderConfig& audio_message);
+ void UpdateVideoConfig(const pb::VideoDecoderConfig& video_message);
+
+ // Called when any error occurs.
+ void OnError(const std::string& error);
+
+ scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+
+ RpcBroker* const rpc_broker_; // Outlives this class.
+ const Type type_;
+ const int remote_handle_;
+ const int rpc_handle_;
+
+ // Set when Initialize() is called.
+ base::OnceClosure init_done_callback_;
- // Run when first error occurs;
- base::OnceClosure error_callback_;
+ // The frame count of the frame to be returned on the next Read call. It
+ // will be increased whenever a frame is read. It will be updated when
+ // FlushUntil() is called.
+ uint32_t current_frame_count_ = 0;
- base::WeakPtrFactory<StreamProvider> weak_factory_{this};
+ // One plus the last frame count received over RTP. Used for continuity
+ // check.
+ uint32_t buffered_frame_count_ = 0;
- DISALLOW_COPY_AND_ASSIGN(StreamProvider);
+ // The total number of frames received from the sender side. It will be used
+ // as the base value for sending ReadUntil() to request more frames and be
+ // updated in OnReadUntilCallback() which would get the message that
+ // contains how many frames are sent.
+ uint32_t total_received_frame_count_ = 0;
+
+ // Indicates whether Audio/VideoDecoderConfig changed and the frames with
+ // the old config are not yet consumed. The new config is stored in the end
+ // of |audio/video_decoder_config_|.
+ bool config_changed_ = false;
+
+ // Indicates whether a ReadUntil RPC message was sent without receiving the
+ // ReadUntilCallback message yet.
+ bool read_until_sent_ = false;
+
+ // Indicates whether RPC_DS_INITIALIZE_CALLBACK received.
+ bool rpc_initialized_ = false;
+
+ // Set when Read() is called. Run only once when read completes.
+ ReadCB read_complete_callback_;
+
+ // The frame data would be sent via Mojo IPC as MojoDecoderBuffer. When a
+ // frame is sent to |this| from host by calling ReceiveFrame(),
+ // |decoder_buffer_reader_| is used to read the frame date from data pipe.
+ std::unique_ptr<MojoDecoderBufferReader> decoder_buffer_reader_;
+
+ base::circular_deque<scoped_refptr<DecoderBuffer>> buffers_;
+
+ // Current audio/video config.
+ AudioDecoderConfig audio_decoder_config_;
+ VideoDecoderConfig video_decoder_config_;
+
+ // Stores the new audio/video config when config changes.
+ AudioDecoderConfig next_audio_decoder_config_;
+ VideoDecoderConfig next_video_decoder_config_;
+
+ mojo::Receiver<mojom::RemotingDataStreamReceiver> receiver_{this};
+
+ base::WeakPtr<MediaStream> media_weak_this_;
+ base::WeakPtrFactory<MediaStream> media_weak_factory_{this};
+ };
+
+ friend std::default_delete<StreamProvider>;
+ friend class base::DeleteHelper<StreamProvider>; // For using DeleteSoon().
+
+ // For testing.
+ friend class StreamProviderTest;
+
+ void Destroy();
+
+ // RPC messages handlers.
+ void OnReceivedRpc(std::unique_ptr<pb::RpcMessage> message);
+ void OnAcquireDemuxer(std::unique_ptr<pb::RpcMessage> message);
+
+ // Called when audio/video stream is created and initialized.
+ void InitializeDataPipe();
+ void OnAudioStreamCreated(MediaStream::UniquePtr stream);
+ void OnVideoStreamCreated(MediaStream::UniquePtr stream);
+ void OnAudioStreamInitialized();
+ void OnVideoStreamInitialized();
+ void CompleteInitialize();
+
+ scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+ ReceiverController* const receiver_controller_; // Outlives this class
+ RpcBroker* const rpc_broker_; // Outlives this class
+ MediaStream::UniquePtr audio_stream_;
+ MediaStream::UniquePtr video_stream_;
+ bool has_audio_{false};
+ bool has_video_{false};
+ bool audio_stream_initialized_{false};
+ bool video_stream_initialized_{false};
+
+ // Set when Initialize() is called, and will run when both video and audio
+ // streams are initialized or error occurs.
+ PipelineStatusCallback init_done_callback_;
+
+ base::WeakPtr<StreamProvider> media_weak_this_;
+ base::WeakPtrFactory<StreamProvider> media_weak_factory_{this};
};
} // namespace remoting
} // namespace media
+namespace std {
+
+// Specialize std::default_delete to call Destroy().
+template <>
+struct default_delete<media::remoting::StreamProvider> {
+ constexpr default_delete() = default;
+
+ template <typename U,
+ typename = typename std::enable_if<std::is_convertible<
+ U*,
+ media::remoting::StreamProvider*>::value>::type>
+ explicit default_delete(const default_delete<U>& d) {}
+
+ void operator()(media::remoting::StreamProvider* ptr) const;
+};
+
+} // namespace std
+
#endif // MEDIA_REMOTING_STREAM_PROVIDER_H_
diff --git a/chromium/media/remoting/stream_provider_unittest.cc b/chromium/media/remoting/stream_provider_unittest.cc
new file mode 100644
index 00000000000..7ad43222db0
--- /dev/null
+++ b/chromium/media/remoting/stream_provider_unittest.cc
@@ -0,0 +1,316 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/stream_provider.h"
+
+#include "base/test/task_environment.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/demuxer_stream.h"
+#include "media/base/media_util.h"
+#include "media/base/test_helpers.h"
+#include "media/base/video_decoder_config.h"
+#include "media/remoting/mock_receiver_controller.h"
+#include "media/remoting/proto_enum_utils.h"
+#include "media/remoting/proto_utils.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::NiceMock;
+
+namespace {
+constexpr int kBufferSize = 10;
+} // namespace
+
+namespace media {
+namespace remoting {
+
+class StreamProviderTest : public testing::Test {
+ public:
+ StreamProviderTest()
+ : audio_config_(TestAudioConfig::Normal()),
+ video_config_(TestVideoConfig::Normal()),
+ audio_buffer_(new DecoderBuffer(kBufferSize)),
+ video_buffer_(DecoderBuffer::CreateEOSBuffer()) {}
+
+ void SetUp() override {
+ mock_controller_ = MockReceiverController::GetInstance();
+ mock_controller_->Initialize(
+ mock_controller_->mock_remotee()->BindNewPipeAndPassRemote());
+ mock_remotee_ = mock_controller_->mock_remotee();
+ stream_provider_ = std::make_unique<StreamProvider>(
+ mock_controller_, base::ThreadTaskRunnerHandle::Get());
+
+ rpc_broker_ = mock_controller_->rpc_broker();
+ sender_audio_demuxer_stream_handle_ = rpc_broker_->GetUniqueHandle();
+ sender_video_demuxer_stream_handle_ = rpc_broker_->GetUniqueHandle();
+ rpc_broker_->RegisterMessageReceiverCallback(
+ sender_audio_demuxer_stream_handle_,
+ base::BindRepeating(&StreamProviderTest::OnDemuxerStreamReceivedRpc,
+ base::Unretained(this),
+ DemuxerStream::Type::AUDIO));
+ rpc_broker_->RegisterMessageReceiverCallback(
+ sender_video_demuxer_stream_handle_,
+ base::BindRepeating(&StreamProviderTest::OnDemuxerStreamReceivedRpc,
+ base::Unretained(this),
+ DemuxerStream::Type::VIDEO));
+ }
+
+ void TearDown() override {
+ stream_provider_.reset();
+ task_environment_.RunUntilIdle();
+ }
+
+ void OnDemuxerStreamReceivedRpc(DemuxerStream::Type type,
+ std::unique_ptr<pb::RpcMessage> message) {
+ DCHECK(message);
+ switch (message->proc()) {
+ case pb::RpcMessage::RPC_DS_INITIALIZE:
+ if (type == DemuxerStream::Type::AUDIO) {
+ receiver_audio_demuxer_stream_handle_ = message->integer_value();
+ } else if (type == DemuxerStream::Type::VIDEO) {
+ receiver_video_demuxer_stream_handle_ = message->integer_value();
+ } else {
+ NOTREACHED();
+ }
+
+ RpcInitializeCallback(type);
+ break;
+
+ case pb::RpcMessage::RPC_DS_READUNTIL:
+ ReadUntil(type);
+ break;
+
+ default:
+ DVLOG(1) << __func__ << "Unknown supported message.";
+ }
+ }
+
+ void RpcInitializeCallback(DemuxerStream::Type type) {
+ // Issues RPC_DS_INITIALIZE_CALLBACK RPC message.
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(type == DemuxerStream::Type::AUDIO
+ ? receiver_audio_demuxer_stream_handle_
+ : receiver_video_demuxer_stream_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_DS_INITIALIZE_CALLBACK);
+ auto* init_cb_message = rpc->mutable_demuxerstream_initializecb_rpc();
+ init_cb_message->set_type(type);
+
+ switch (type) {
+ case DemuxerStream::Type::AUDIO: {
+ pb::AudioDecoderConfig* audio_message =
+ init_cb_message->mutable_audio_decoder_config();
+ ConvertAudioDecoderConfigToProto(audio_config_, audio_message);
+ break;
+ }
+
+ case DemuxerStream::Type::VIDEO: {
+ pb::VideoDecoderConfig* video_message =
+ init_cb_message->mutable_video_decoder_config();
+ ConvertVideoDecoderConfigToProto(video_config_, video_message);
+ break;
+ }
+
+ default:
+ NOTREACHED();
+ }
+
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void ReadUntil(DemuxerStream::Type type) {
+ switch (type) {
+ case DemuxerStream::Type::AUDIO:
+ SendAudioFrame();
+ break;
+ case DemuxerStream::Type::VIDEO:
+ SendVideoFrame();
+ break;
+ default:
+ NOTREACHED();
+ }
+ }
+
+ void SendRpcAcquireDemuxer() {
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(RpcBroker::kAcquireDemuxerHandle);
+ rpc->set_proc(pb::RpcMessage::RPC_ACQUIRE_DEMUXER);
+ pb::AcquireDemuxer* message = rpc->mutable_acquire_demuxer_rpc();
+ message->set_audio_demuxer_handle(sender_audio_demuxer_stream_handle_);
+ message->set_video_demuxer_handle(sender_video_demuxer_stream_handle_);
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void OnStreamProviderInitialized(PipelineStatus status) {
+ EXPECT_EQ(PipelineStatus::PIPELINE_OK, status);
+ stream_provider_initialized_ = true;
+ audio_stream_ =
+ stream_provider_->GetFirstStream(DemuxerStream::Type::AUDIO);
+ video_stream_ =
+ stream_provider_->GetFirstStream(DemuxerStream::Type::VIDEO);
+
+ EXPECT_TRUE(audio_stream_);
+ EXPECT_TRUE(video_stream_);
+ }
+
+ void InitializeDemuxer() {
+ DCHECK(stream_provider_);
+ stream_provider_->Initialize(
+ nullptr,
+ base::BindOnce(&StreamProviderTest::OnStreamProviderInitialized,
+ base::Unretained(this)));
+ }
+
+ void SendAudioFrame() {
+ mock_remotee_->SendAudioFrame(0, audio_buffer_);
+ SendRpcReadUntilCallback(DemuxerStream::Type::AUDIO);
+ }
+
+ void SendVideoFrame() {
+ mock_remotee_->SendVideoFrame(0, video_buffer_);
+ SendRpcReadUntilCallback(DemuxerStream::Type::VIDEO);
+ }
+
+ void SendRpcReadUntilCallback(DemuxerStream::Type type) {
+ // Issues RPC_DS_READUNTIL_CALLBACK RPC message.
+ auto rpc = std::make_unique<pb::RpcMessage>();
+ rpc->set_handle(type == DemuxerStream::Type::AUDIO
+ ? receiver_audio_demuxer_stream_handle_
+ : receiver_video_demuxer_stream_handle_);
+ rpc->set_proc(pb::RpcMessage::RPC_DS_READUNTIL_CALLBACK);
+ auto* message = rpc->mutable_demuxerstream_readuntilcb_rpc();
+ message->set_count(0);
+ message->set_status(
+ ToProtoDemuxerStreamStatus(DemuxerStream::Status::kOk).value());
+ rpc_broker_->SendMessageToRemote(std::move(rpc));
+ }
+
+ void FlushUntil(uint32_t flush_audio_count, uint32_t flush_video_count) {
+ mock_remotee_->OnFlushUntil(flush_audio_count, flush_video_count);
+ }
+
+ uint32_t GetAudioCurrentFrameCount() {
+ return stream_provider_->audio_stream_->current_frame_count_;
+ }
+
+ uint32_t GetVideoCurrentFrameCount() {
+ return stream_provider_->video_stream_->current_frame_count_;
+ }
+
+ void OnBufferReadFromDemuxerStream(DemuxerStream::Type type,
+ DemuxerStream::Status status,
+ scoped_refptr<DecoderBuffer> buffer) {
+ EXPECT_EQ(status, DemuxerStream::Status::kOk);
+ switch (type) {
+ case DemuxerStream::Type::AUDIO:
+ received_audio_buffer_ = buffer;
+ break;
+ case DemuxerStream::Type::VIDEO:
+ received_video_buffer_ = buffer;
+ break;
+ default:
+ NOTREACHED();
+ }
+ }
+
+ base::test::TaskEnvironment task_environment_;
+
+ AudioDecoderConfig audio_config_;
+ VideoDecoderConfig video_config_;
+
+ DemuxerStream* audio_stream_;
+ DemuxerStream* video_stream_;
+
+ scoped_refptr<DecoderBuffer> audio_buffer_;
+ scoped_refptr<DecoderBuffer> video_buffer_;
+
+ bool stream_provider_initialized_{false};
+ scoped_refptr<DecoderBuffer> received_audio_buffer_;
+ scoped_refptr<DecoderBuffer> received_video_buffer_;
+
+ int sender_audio_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
+ int sender_video_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
+ int receiver_audio_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
+ int receiver_video_demuxer_stream_handle_ = RpcBroker::kInvalidHandle;
+
+ RpcBroker* rpc_broker_;
+ MockReceiverController* mock_controller_;
+ MockRemotee* mock_remotee_;
+ std::unique_ptr<StreamProvider> stream_provider_;
+};
+
+TEST_F(StreamProviderTest, InitializeBeforeRpcAcquireDemuxer) {
+ InitializeDemuxer();
+ EXPECT_FALSE(stream_provider_initialized_);
+
+ SendRpcAcquireDemuxer();
+ task_environment_.RunUntilIdle();
+
+ EXPECT_TRUE(mock_remotee_->audio_stream_.is_bound());
+ EXPECT_TRUE(mock_remotee_->video_stream_.is_bound());
+ EXPECT_TRUE(stream_provider_initialized_);
+
+ // 1 audio stream and 1 video stream
+ EXPECT_EQ(size_t(2), stream_provider_->GetAllStreams().size());
+}
+
+TEST_F(StreamProviderTest, InitializeAfterRpcAcquireDemuxer) {
+ SendRpcAcquireDemuxer();
+ EXPECT_FALSE(stream_provider_initialized_);
+
+ InitializeDemuxer();
+ task_environment_.RunUntilIdle();
+
+ EXPECT_TRUE(mock_remotee_->audio_stream_.is_bound());
+ EXPECT_TRUE(mock_remotee_->video_stream_.is_bound());
+ EXPECT_TRUE(stream_provider_initialized_);
+
+ // 1 audio stream and 1 video stream
+ EXPECT_EQ(size_t(2), stream_provider_->GetAllStreams().size());
+}
+
+TEST_F(StreamProviderTest, ReadBuffer) {
+ InitializeDemuxer();
+ SendRpcAcquireDemuxer();
+ task_environment_.RunUntilIdle();
+ EXPECT_TRUE(mock_remotee_->audio_stream_.is_bound());
+ EXPECT_TRUE(mock_remotee_->video_stream_.is_bound());
+ EXPECT_TRUE(stream_provider_initialized_);
+
+ audio_stream_->Read(
+ base::BindOnce(&StreamProviderTest::OnBufferReadFromDemuxerStream,
+ base::Unretained(this), DemuxerStream::Type::AUDIO));
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(audio_buffer_->data_size(), received_audio_buffer_->data_size());
+ EXPECT_EQ(audio_buffer_->end_of_stream(),
+ received_audio_buffer_->end_of_stream());
+ EXPECT_EQ(audio_buffer_->is_key_frame(),
+ received_audio_buffer_->is_key_frame());
+
+ video_stream_->Read(
+ base::BindOnce(&StreamProviderTest::OnBufferReadFromDemuxerStream,
+ base::Unretained(this), DemuxerStream::Type::VIDEO));
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(video_buffer_->end_of_stream(),
+ received_video_buffer_->end_of_stream());
+}
+
+TEST_F(StreamProviderTest, FlushUntil) {
+ InitializeDemuxer();
+ SendRpcAcquireDemuxer();
+ task_environment_.RunUntilIdle();
+ EXPECT_TRUE(mock_remotee_->audio_stream_.is_bound());
+ EXPECT_TRUE(mock_remotee_->video_stream_.is_bound());
+ EXPECT_TRUE(stream_provider_initialized_);
+
+ uint32_t flush_audio_count = 10;
+ uint32_t flush_video_count = 20;
+ FlushUntil(flush_audio_count, flush_video_count);
+ task_environment_.RunUntilIdle();
+
+ EXPECT_EQ(GetAudioCurrentFrameCount(), flush_audio_count);
+ EXPECT_EQ(GetVideoCurrentFrameCount(), flush_video_count);
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/test_utils.cc b/chromium/media/remoting/test_utils.cc
new file mode 100644
index 00000000000..d3ec82254d4
--- /dev/null
+++ b/chromium/media/remoting/test_utils.cc
@@ -0,0 +1,17 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/test_utils.h"
+#include "media/remoting/receiver_controller.h"
+
+namespace media {
+namespace remoting {
+
+void ResetForTesting(ReceiverController* controller) {
+ controller->receiver_.reset();
+ controller->media_remotee_.reset();
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/test_utils.h b/chromium/media/remoting/test_utils.h
new file mode 100644
index 00000000000..2b5a3e77de3
--- /dev/null
+++ b/chromium/media/remoting/test_utils.h
@@ -0,0 +1,19 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_REMOTING_TEST_UTILS_H_
+#define MEDIA_REMOTING_TEST_UTILS_H_
+
+namespace media {
+namespace remoting {
+
+class ReceiverController;
+
+// Friend function for resetting the mojo binding in ReceiverController.
+void ResetForTesting(ReceiverController* controller);
+
+} // namespace remoting
+} // namespace media
+
+#endif // MEDIA_REMOTING_TEST_UTILS_H_
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index 684f41f71fe..3c9967bcce8 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -129,7 +129,6 @@ if (is_win) {
"win/media_foundation_stream_wrapper.h",
"win/media_foundation_video_stream.cc",
"win/media_foundation_video_stream.h",
- "win/mf_cdm_proxy.h",
]
deps = [
"//media",
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 95308f4a746..0c1e6b03bd9 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -48,7 +48,7 @@ AudioRendererImpl::AudioRendererImpl(
media::AudioRendererSink* sink,
const CreateAudioDecodersCB& create_audio_decoders_cb,
MediaLog* media_log,
- const TranscribeAudioCallback& transcribe_audio_callback)
+ SpeechRecognitionClient* speech_recognition_client)
: task_runner_(task_runner),
expecting_config_changes_(false),
sink_(sink),
@@ -70,8 +70,12 @@ AudioRendererImpl::AudioRendererImpl(
received_end_of_stream_(false),
rendered_end_of_stream_(false),
is_suspending_(false),
+#if defined(OS_ANDROID)
+ is_passthrough_(false) {
+#else
is_passthrough_(false),
- transcribe_audio_callback_(transcribe_audio_callback) {
+ speech_recognition_client_(speech_recognition_client) {
+#endif
DCHECK(create_audio_decoders_cb_);
// PowerObserver's must be added and removed from the same thread, but we
@@ -369,6 +373,14 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
sink_->GetOutputDeviceInfoAsync(
base::BindOnce(&AudioRendererImpl::OnDeviceInfoReceived,
weak_factory_.GetWeakPtr(), demuxer_stream_, cdm_context));
+
+#if !defined(OS_ANDROID)
+ if (speech_recognition_client_) {
+ speech_recognition_client_->SetOnReadyCallback(
+ base::BindOnce(&AudioRendererImpl::EnableSpeechRecognition,
+ weak_factory_.GetWeakPtr()));
+ }
+#endif
}
void AudioRendererImpl::OnDeviceInfoReceived(
@@ -619,6 +631,8 @@ void AudioRendererImpl::OnAudioDecoderStreamInitialized(bool success) {
algorithm_->Initialize(audio_parameters_, is_encrypted_);
if (latency_hint_)
algorithm_->SetLatencyHint(latency_hint_);
+
+ algorithm_->SetPreservesPitch(preserves_pitch_);
ConfigureChannelMask();
ChangeState_Locked(kFlushed);
@@ -708,6 +722,15 @@ void AudioRendererImpl::SetLatencyHint(
}
}
+void AudioRendererImpl::SetPreservesPitch(bool preserves_pitch) {
+ base::AutoLock auto_lock(lock_);
+
+ preserves_pitch_ = preserves_pitch;
+
+ if (algorithm_)
+ algorithm_->SetPreservesPitch(preserves_pitch);
+}
+
void AudioRendererImpl::OnSuspend() {
base::AutoLock auto_lock(lock_);
is_suspending_ = true;
@@ -871,8 +894,10 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
if (first_packet_timestamp_ == kNoTimestamp)
first_packet_timestamp_ = buffer->timestamp();
- if (!transcribe_audio_callback_.is_null())
+#if !defined(OS_ANDROID)
+ if (transcribe_audio_callback_)
transcribe_audio_callback_.Run(buffer);
+#endif
if (state_ != kUninitialized)
algorithm_->EnqueueBuffer(std::move(buffer));
@@ -1281,4 +1306,20 @@ void AudioRendererImpl::ConfigureChannelMask() {
algorithm_->SetChannelMask(std::move(channel_mask));
}
+void AudioRendererImpl::EnableSpeechRecognition() {
+#if !defined(OS_ANDROID)
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ transcribe_audio_callback_ = base::BindRepeating(
+ &AudioRendererImpl::TranscribeAudio, weak_factory_.GetWeakPtr());
+#endif
+}
+
+void AudioRendererImpl::TranscribeAudio(
+ scoped_refptr<media::AudioBuffer> buffer) {
+#if !defined(OS_ANDROID)
+ DCHECK(task_runner_->BelongsToCurrentThread());
+ if (speech_recognition_client_)
+ speech_recognition_client_->AddAudio(std::move(buffer));
+#endif
+}
} // namespace media
diff --git a/chromium/media/renderers/audio_renderer_impl.h b/chromium/media/renderers/audio_renderer_impl.h
index 33ef7e22a69..7755d20ce15 100644
--- a/chromium/media/renderers/audio_renderer_impl.h
+++ b/chromium/media/renderers/audio_renderer_impl.h
@@ -49,6 +49,7 @@ namespace media {
class AudioBufferConverter;
class AudioBus;
class AudioClock;
+class SpeechRecognitionClient;
class MEDIA_EXPORT AudioRendererImpl
: public AudioRenderer,
@@ -62,6 +63,9 @@ class MEDIA_EXPORT AudioRendererImpl
using TranscribeAudioCallback =
base::RepeatingCallback<void(scoped_refptr<media::AudioBuffer>)>;
+ using EnableSpeechRecognitionCallback =
+ base::OnceCallback<void(TranscribeAudioCallback)>;
+
// |task_runner| is the thread on which AudioRendererImpl will execute.
//
// |sink| is used as the destination for the rendered audio.
@@ -72,7 +76,7 @@ class MEDIA_EXPORT AudioRendererImpl
AudioRendererSink* sink,
const CreateAudioDecodersCB& create_audio_decoders_cb,
MediaLog* media_log,
- const TranscribeAudioCallback& transcribe_audio_callback);
+ SpeechRecognitionClient* speech_recognition_client = nullptr);
~AudioRendererImpl() override;
// TimeSource implementation.
@@ -95,6 +99,7 @@ class MEDIA_EXPORT AudioRendererImpl
void StartPlaying() override;
void SetVolume(float volume) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
// base::PowerObserver implementation.
void OnSuspend() override;
@@ -224,6 +229,9 @@ class MEDIA_EXPORT AudioRendererImpl
// changes. Expect the layout in |last_decoded_channel_layout_|.
void ConfigureChannelMask();
+ void EnableSpeechRecognition();
+ void TranscribeAudio(scoped_refptr<media::AudioBuffer> buffer);
+
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unique_ptr<AudioBufferConverter> buffer_converter_;
@@ -293,6 +301,10 @@ class MEDIA_EXPORT AudioRendererImpl
// during Initialize().
base::Optional<base::TimeDelta> latency_hint_;
+ // Passed to |algorithm_|. Indicates whether |algorithm_| should or should not
+ // make pitch adjustments at playbacks other than 1.0.
+ bool preserves_pitch_ = true;
+
// Simple state tracking variable.
State state_;
@@ -350,7 +362,10 @@ class MEDIA_EXPORT AudioRendererImpl
// End variables which must be accessed under |lock_|. ----------------------
+#if !defined(OS_ANDROID)
+ SpeechRecognitionClient* speech_recognition_client_;
TranscribeAudioCallback transcribe_audio_callback_;
+#endif
// NOTE: Weak pointers must be invalidated before all other member variables.
base::WeakPtrFactory<AudioRendererImpl> weak_factory_{this};
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index dc1dcee3520..39df7808435 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -133,9 +133,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
renderer_->tick_clock_ = &tick_clock_;
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
}
@@ -162,9 +160,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(false);
}
@@ -178,9 +174,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(true);
}
@@ -191,9 +185,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, mock_sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
testing::Mock::VerifyAndClearExpectations(&demuxer_stream_);
ConfigureDemuxerStream(true);
}
@@ -255,9 +247,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
main_thread_task_runner_, sink_.get(),
base::BindRepeating(&AudioRendererImplTest::CreateAudioDecoderForTest,
base::Unretained(this)),
- &media_log_,
- base::BindRepeating(&AudioRendererImplTest::TranscribeAudioCallback,
- base::Unretained(this))));
+ &media_log_, nullptr));
Initialize();
}
@@ -676,7 +666,7 @@ TEST_F(AudioRendererImplTest, StartTicking) {
TEST_F(AudioRendererImplTest, TranscribeAudioCallback) {
Initialize();
- EXPECT_CALL(*this, TranscribeAudioCallback(_)).Times(testing::AtLeast(1));
+ EXPECT_CALL(*this, TranscribeAudioCallback(_)).Times(0);
Preroll();
StartTicking();
diff --git a/chromium/media/renderers/decrypting_renderer.cc b/chromium/media/renderers/decrypting_renderer.cc
index f2df8566fbc..b231ec7a40a 100644
--- a/chromium/media/renderers/decrypting_renderer.cc
+++ b/chromium/media/renderers/decrypting_renderer.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
+#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_log.h"
#include "media/base/media_resource.h"
@@ -110,6 +111,10 @@ void DecryptingRenderer::SetLatencyHint(
renderer_->SetLatencyHint(latency_hint);
}
+void DecryptingRenderer::SetPreservesPitch(bool preserves_pitch) {
+ renderer_->SetPreservesPitch(preserves_pitch);
+}
+
void DecryptingRenderer::Flush(base::OnceClosure flush_cb) {
renderer_->Flush(std::move(flush_cb));
}
diff --git a/chromium/media/renderers/decrypting_renderer.h b/chromium/media/renderers/decrypting_renderer.h
index 6f443b4655e..84b9747b805 100644
--- a/chromium/media/renderers/decrypting_renderer.h
+++ b/chromium/media/renderers/decrypting_renderer.h
@@ -46,6 +46,7 @@ class MEDIA_EXPORT DecryptingRenderer : public Renderer {
PipelineStatusCallback init_cb) override;
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void SetPreservesPitch(bool preserves_pitch) override;
void Flush(base::OnceClosure flush_cb) override;
void StartPlayingFrom(base::TimeDelta time) override;
diff --git a/chromium/media/renderers/decrypting_renderer_unittest.cc b/chromium/media/renderers/decrypting_renderer_unittest.cc
index d2b5d61ea7d..8d55c714dd9 100644
--- a/chromium/media/renderers/decrypting_renderer_unittest.cc
+++ b/chromium/media/renderers/decrypting_renderer_unittest.cc
@@ -87,7 +87,7 @@ class DecryptingRendererTest : public testing::Test {
bool use_aes_decryptor_ = false;
base::test::TaskEnvironment task_environment_;
- base::MockCallback<CdmAttachedCB> set_cdm_cb_;
+ base::MockCallback<Renderer::CdmAttachedCB> set_cdm_cb_;
base::MockOnceCallback<void(PipelineStatus)> renderer_init_cb_;
NullMediaLog null_media_log_;
StrictMock<MockCdmContext> cdm_context_;
diff --git a/chromium/media/renderers/default_renderer_factory.cc b/chromium/media/renderers/default_renderer_factory.cc
index 9d5c2ac959f..8596f9f58fc 100644
--- a/chromium/media/renderers/default_renderer_factory.cc
+++ b/chromium/media/renderers/default_renderer_factory.cc
@@ -10,7 +10,6 @@
#include "base/bind.h"
#include "build/build_config.h"
#include "media/base/audio_buffer.h"
-#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_factory.h"
#include "media/renderers/audio_renderer_impl.h"
#include "media/renderers/renderer_impl.h"
@@ -92,9 +91,11 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
// finishes.
base::BindRepeating(&DefaultRendererFactory::CreateAudioDecoders,
base::Unretained(this), media_task_runner),
- media_log_,
- BindToCurrentLoop(base::BindRepeating(
- &DefaultRendererFactory::TranscribeAudio, base::Unretained(this)))));
+#if defined(OS_ANDROID)
+ media_log_));
+#else
+ media_log_, speech_recognition_client_.get()));
+#endif
GpuVideoAcceleratorFactories* gpu_factories = nullptr;
if (get_gpu_factories_cb_)
@@ -126,14 +127,4 @@ std::unique_ptr<Renderer> DefaultRendererFactory::CreateRenderer(
media_task_runner, std::move(audio_renderer), std::move(video_renderer));
}
-void DefaultRendererFactory::TranscribeAudio(
- scoped_refptr<media::AudioBuffer> buffer) {
-#if !defined(OS_ANDROID)
- if (speech_recognition_client_ &&
- speech_recognition_client_->IsSpeechRecognitionAvailable()) {
- speech_recognition_client_->AddAudio(std::move(buffer));
- }
-#endif
-}
-
} // namespace media
diff --git a/chromium/media/renderers/default_renderer_factory.h b/chromium/media/renderers/default_renderer_factory.h
index 09de8928651..455ce1bb782 100644
--- a/chromium/media/renderers/default_renderer_factory.h
+++ b/chromium/media/renderers/default_renderer_factory.h
@@ -61,8 +61,6 @@ class MEDIA_EXPORT DefaultRendererFactory : public RendererFactory {
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) final;
- void TranscribeAudio(scoped_refptr<media::AudioBuffer> buffer);
-
private:
std::vector<std::unique_ptr<AudioDecoder>> CreateAudioDecoders(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner);
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 5eaa9529549..248f46e23d0 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -15,7 +15,7 @@
#include "base/numerics/checked_math.h"
#include "base/synchronization/waitable_event.h"
#include "base/system/sys_info.h"
-#include "base/task/post_task.h"
+#include "base/task/thread_pool.h"
#include "base/threading/thread_restrictions.h"
#include "cc/paint/paint_canvas.h"
#include "cc/paint/paint_flags.h"
@@ -212,16 +212,6 @@ GLuint SynchronizeAndImportMailbox(gpu::gles2::GLES2Interface* gl,
: gl->CreateAndConsumeTextureCHROMIUM(mailbox.name);
}
-// TODO(crbug.com/1023270): Remove this ctor once we're no longer relying on
-// texture ids for Mailbox access as that is only supported on
-// RasterImplementationGLES.
-GLuint SynchronizeAndImportMailbox(gpu::raster::RasterInterface* ri,
- const gpu::SyncToken& sync_token,
- const gpu::Mailbox& mailbox) {
- ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
- return ri->CreateAndConsumeForGpuRaster(mailbox);
-}
-
const gpu::MailboxHolder& GetVideoFrameMailboxHolder(VideoFrame* video_frame) {
DCHECK(video_frame->HasTextures());
DCHECK_EQ(video_frame->NumTextures(), 1u);
@@ -340,8 +330,7 @@ void SynchronizeVideoFrameRead(scoped_refptr<VideoFrame> video_frame,
SyncTokenClientImpl client(ri);
video_frame->UpdateReleaseSyncToken(&client);
- if (video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
+ if (video_frame->metadata()->read_lock_fences_enabled) {
// |video_frame| must be kept alive during read operations.
DCHECK(context_support);
unsigned query_id = 0;
@@ -366,6 +355,7 @@ size_t LCM(size_t a, size_t b) {
void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
void* rgb_pixels,
size_t row_bytes,
+ bool premultiply_alpha,
size_t task_index,
size_t n_tasks,
base::RepeatingClosure* done) {
@@ -419,7 +409,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
if (!video_frame->data(VideoFrame::kUPlane) &&
!video_frame->data(VideoFrame::kVPlane)) {
- DCHECK_EQ(video_frame->format(), PIXEL_FORMAT_I420);
+ DCHECK_EQ(format, PIXEL_FORMAT_I420);
auto func = (color_space == kJPEG_SkYUVColorSpace) ? LIBYUV_J400_TO_ARGB
: LIBYUV_I400_TO_ARGB;
func(plane_meta[VideoFrame::kYPlane].data,
@@ -450,7 +440,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
rows);
};
- switch (video_frame->format()) {
+ switch (format) {
case PIXEL_FORMAT_YV12:
case PIXEL_FORMAT_I420:
switch (color_space) {
@@ -490,16 +480,15 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
break;
case PIXEL_FORMAT_I420A:
- LIBYUV_I420ALPHA_TO_ARGB(
- plane_meta[VideoFrame::kYPlane].data,
- plane_meta[VideoFrame::kYPlane].stride,
- plane_meta[VideoFrame::kUPlane].data,
- plane_meta[VideoFrame::kUPlane].stride,
- plane_meta[VideoFrame::kVPlane].data,
- plane_meta[VideoFrame::kVPlane].stride,
- plane_meta[VideoFrame::kAPlane].data,
- plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes, width,
- rows, 1); // 1 = enable RGB premultiplication by Alpha.
+ LIBYUV_I420ALPHA_TO_ARGB(plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels,
+ row_bytes, width, rows, premultiply_alpha);
break;
case PIXEL_FORMAT_I444:
@@ -555,7 +544,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
NOTREACHED();
}
break;
- case PIXEL_FORMAT_UYVY:
+
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV422P9:
case PIXEL_FORMAT_YUV444P9:
@@ -564,7 +553,8 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
case PIXEL_FORMAT_YUV422P12:
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
- NOTREACHED() << "These cases should be handled above";
+ NOTREACHED()
+ << "These cases should be handled in ConvertVideoFrameToRGBPixels";
break;
case PIXEL_FORMAT_NV12:
@@ -575,6 +565,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
row_bytes, width, rows);
break;
+ case PIXEL_FORMAT_UYVY:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_YUY2:
case PIXEL_FORMAT_ARGB:
@@ -589,7 +580,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_UNKNOWN:
NOTREACHED() << "Only YUV formats and Y16 are supported, got: "
- << media::VideoPixelFormatToString(video_frame->format());
+ << media::VideoPixelFormatToString(format);
}
done->Run();
}
@@ -1110,7 +1101,8 @@ void TextureSubImageUsingIntermediate(unsigned target,
void PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
const VideoFrame* video_frame,
void* rgb_pixels,
- size_t row_bytes) {
+ size_t row_bytes,
+ bool premultiply_alpha) {
if (!video_frame->IsMappable()) {
NOTREACHED() << "Cannot extract pixels from non-CPU frame formats.";
return;
@@ -1163,13 +1155,14 @@ void PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
base::BindOnce(&base::WaitableEvent::Signal, base::Unretained(&event)));
for (size_t i = 1; i < n_tasks; ++i) {
- base::PostTask(FROM_HERE,
- base::BindOnce(ConvertVideoFrameToRGBPixelsTask,
- base::Unretained(video_frame), rgb_pixels,
- row_bytes, i, n_tasks, &barrier));
+ base::ThreadPool::PostTask(
+ FROM_HERE,
+ base::BindOnce(ConvertVideoFrameToRGBPixelsTask,
+ base::Unretained(video_frame), rgb_pixels, row_bytes,
+ premultiply_alpha, i, n_tasks, &barrier));
}
- ConvertVideoFrameToRGBPixelsTask(video_frame, rgb_pixels, row_bytes, 0,
- n_tasks, &barrier);
+ ConvertVideoFrameToRGBPixelsTask(video_frame, rgb_pixels, row_bytes,
+ premultiply_alpha, 0, n_tasks, &barrier);
{
base::ScopedAllowBaseSyncPrimitivesOutsideBlockingScope allow_wait;
event.Wait();
@@ -1223,8 +1216,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
if (video_frame->NumTextures() > 1 ||
- video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED)) {
+ video_frame->metadata()->read_lock_fences_enabled) {
if (!raster_context_provider)
return false;
GrContext* gr_context = raster_context_provider->GrContext();
@@ -1361,11 +1353,6 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
bool premultiply_alpha,
bool flip_y) {
DCHECK(raster_context_provider);
- GrContext* gr_context = raster_context_provider->GrContext();
- if (!gr_context) {
- return false;
- }
-
if (!video_frame.IsMappable()) {
return false;
}
@@ -1373,47 +1360,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
if (video_frame.format() != media::PIXEL_FORMAT_I420) {
return false;
}
- // Could handle NV12 here as well. See NewSkImageFromVideoFrameYUVTextures.
-
- static constexpr size_t kNumPlanes = 3;
- DCHECK_EQ(video_frame.NumPlanes(video_frame.format()), kNumPlanes);
- // Y,U,V GPU-side SkImages. (These must outlive the yuv_textures).
- sk_sp<SkImage> yuv_images[kNumPlanes]{};
- // Y,U,V GPU textures from those SkImages.
- // (A GrBackendTexture is a non-owned reference to the SkImage's texture.)
- GrBackendTexture yuv_textures[kNumPlanes]{};
-
- // Upload the whole coded image area (not visible rect).
- gfx::Size y_tex_size = video_frame.coded_size();
- gfx::Size uv_tex_size((y_tex_size.width() + 1) / 2,
- (y_tex_size.height() + 1) / 2);
-
- for (size_t plane = 0; plane < kNumPlanes; ++plane) {
- const uint8_t* data = video_frame.data(plane);
- int plane_stride = video_frame.stride(plane);
-
- bool is_y_plane = plane == media::VideoFrame::kYPlane;
- gfx::Size tex_size = is_y_plane ? y_tex_size : uv_tex_size;
- int data_size = plane_stride * (tex_size.height() - 1) + tex_size.width();
-
- // Create a CPU-side SkImage from the channel.
- sk_sp<SkData> sk_data = SkData::MakeWithoutCopy(data, data_size);
- DCHECK(sk_data);
- SkImageInfo image_info =
- SkImageInfo::Make(tex_size.width(), tex_size.height(),
- kGray_8_SkColorType, kUnknown_SkAlphaType);
- sk_sp<SkImage> plane_image_cpu =
- SkImage::MakeRasterData(image_info, sk_data, plane_stride);
- DCHECK(plane_image_cpu);
-
- // Upload the CPU-side SkImage into a GPU-side SkImage.
- // (Note the original video_frame data is no longer used after this point.)
- yuv_images[plane] = plane_image_cpu->makeTextureImage(gr_context);
- DCHECK(yuv_images[plane]);
-
- // Extract the backend texture from the GPU-side image.
- yuv_textures[plane] = yuv_images[plane]->getBackendTexture(false);
- }
+ // Could handle NV12 here as well. See NewSkImageFromVideoFrameYUV.
auto* sii = raster_context_provider->SharedImageInterface();
gpu::raster::RasterInterface* source_ri =
@@ -1421,52 +1368,39 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
// We need a shared image to receive the intermediate RGB result. Try to reuse
// one if compatible, otherwise create a new one.
- if (yuv_cache_.texture && yuv_cache_.size == video_frame.coded_size() &&
+ gpu::SyncToken token;
+ if (!yuv_cache_.mailbox.IsZero() &&
+ yuv_cache_.size == video_frame.coded_size() &&
yuv_cache_.raster_context_provider == raster_context_provider) {
- source_ri->WaitSyncTokenCHROMIUM(yuv_cache_.sync_token.GetConstData());
+ token = yuv_cache_.sync_token;
} else {
yuv_cache_.Reset();
yuv_cache_.raster_context_provider = raster_context_provider;
yuv_cache_.size = video_frame.coded_size();
- yuv_cache_.mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::RGBA_8888, video_frame.coded_size(),
- gfx::ColorSpace(), gpu::SHARED_IMAGE_USAGE_GLES2);
- yuv_cache_.texture = SynchronizeAndImportMailbox(
- source_ri, sii->GenUnverifiedSyncToken(), yuv_cache_.mailbox);
- }
- // On the source GL context, do the YUV->RGB conversion using Skia.
- gpu::SyncToken post_conversion_sync_token;
- {
- source_ri->BeginSharedImageAccessDirectCHROMIUM(
- yuv_cache_.texture, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
-
- GrGLTextureInfo backend_texture = {};
- backend_texture.fTarget = GL_TEXTURE_2D;
- backend_texture.fID = yuv_cache_.texture;
- backend_texture.fFormat = GL_RGBA8;
- GrBackendTexture result_texture(video_frame.coded_size().width(),
- video_frame.coded_size().height(),
- GrMipMapped::kNo, backend_texture);
-
- sk_sp<SkImage> yuv_image = YUVGrBackendTexturesToSkImage(
- gr_context, video_frame.ColorSpace(), video_frame.format(),
- yuv_textures, result_texture);
-
- gr_context->flush();
- source_ri->EndSharedImageAccessDirectCHROMIUM(yuv_cache_.texture);
-
- source_ri->GenUnverifiedSyncTokenCHROMIUM(
- post_conversion_sync_token.GetData());
-
- if (!yuv_image) {
- // Conversion failed. Note the last use sync token for destruction.
- yuv_cache_.sync_token = post_conversion_sync_token;
- yuv_cache_.Reset();
- return false;
+ uint32_t usage = gpu::SHARED_IMAGE_USAGE_GLES2;
+ if (raster_context_provider->ContextCapabilities().supports_oop_raster) {
+ usage |= gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
}
+
+ yuv_cache_.mailbox = sii->CreateSharedImage(viz::ResourceFormat::RGBA_8888,
+ video_frame.coded_size(),
+ gfx::ColorSpace(), usage);
+ token = sii->GenUnverifiedSyncToken();
}
+ // On the source Raster context, do the YUV->RGB conversion.
+ gpu::MailboxHolder dest_holder;
+ dest_holder.mailbox = yuv_cache_.mailbox;
+ dest_holder.texture_target = GL_TEXTURE_2D;
+ dest_holder.sync_token = token;
+ ConvertFromVideoFrameYUV(&video_frame, raster_context_provider, dest_holder);
+
+ gpu::SyncToken post_conversion_sync_token;
+ source_ri->GenUnverifiedSyncTokenCHROMIUM(
+ post_conversion_sync_token.GetData());
+
// On the destination GL context, do a copy (with cropping) into the
// destination texture.
GLuint intermediate_texture = SynchronizeAndImportMailbox(
@@ -1601,7 +1535,7 @@ bool PaintCanvasVideoRenderer::Cache::Recycle() {
return false;
// Flush any pending GPU work using this texture.
- sk_image->flush(raster_context_provider->GrContext());
+ sk_image->flushAndSubmit(raster_context_provider->GrContext());
// We need a new texture ID because skia will destroy the previous one with
// the SkImage.
@@ -1681,7 +1615,7 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
ConvertFromVideoFrameYUV(video_frame.get(), raster_context_provider,
dest_holder);
}
- raster_context_provider->GrContext()->flush();
+ raster_context_provider->GrContext()->flushAndSubmit();
}
// TODO(jochin): Don't always generate SkImage here.
@@ -1779,18 +1713,17 @@ PaintCanvasVideoRenderer::YUVTextureCache::YUVTextureCache() = default;
PaintCanvasVideoRenderer::YUVTextureCache::~YUVTextureCache() = default;
void PaintCanvasVideoRenderer::YUVTextureCache::Reset() {
- if (!texture)
+ if (mailbox.IsZero())
return;
DCHECK(raster_context_provider);
gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
ri->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
- ri->DeleteGpuRasterTexture(texture);
- texture = 0;
ri->OrderingBarrierCHROMIUM();
auto* sii = raster_context_provider->SharedImageInterface();
sii->DestroySharedImage(sync_token, mailbox);
+ mailbox.SetZero();
// Kick off the GL work up to the OrderingBarrierCHROMIUM above as well as the
// SharedImageInterface work, to ensure the shared image memory is released in
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index d5b1a141670..ffe7925bf85 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -61,7 +61,9 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
VideoTransformation video_transformation,
viz::RasterContextProvider* raster_context_provider);
- // Paints |video_frame| scaled to its visible size on |canvas|.
+ // Paints |video_frame|, scaled to its |video_frame->visible_rect().size()|
+ // on |canvas|. Note that the origin of |video_frame->visible_rect()| is
+ // ignored -- the copy is done to the origin of |canvas|.
//
// If the format of |video_frame| is PIXEL_FORMAT_NATIVE_TEXTURE, |context_3d|
// and |context_support| must be provided.
@@ -71,10 +73,18 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// Convert the contents of |video_frame| to raw RGB pixels. |rgb_pixels|
// should point into a buffer large enough to hold as many 32 bit RGBA pixels
- // as are in the visible_rect() area of the frame.
+ // as are in the visible_rect() area of the frame. |premultiply_alpha|
+ // indicates whether the R, G, B samples in |rgb_pixels| should be multiplied
+ // by alpha.
+ //
+ // NOTE: If |video_frame| doesn't have an alpha plane, all the A samples in
+ // |rgb_pixels| will be 255 (equivalent to an alpha of 1.0) and therefore the
+ // value of |premultiply_alpha| has no effect on the R, G, B samples in
+ // |rgb_pixels|.
static void ConvertVideoFrameToRGBPixels(const media::VideoFrame* video_frame,
void* rgb_pixels,
- size_t row_bytes);
+ size_t row_bytes,
+ bool premultiply_alpha = true);
// Copy the visible rect size contents of texture of |video_frame| to
// texture |texture|. |level|, |internal_format|, |type| specify target
@@ -271,9 +281,6 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// The shared image backing the texture.
gpu::Mailbox mailbox;
- // The GL texture.
- uint32_t texture = 0;
-
// A SyncToken after last usage, used for reusing or destroying texture and
// shared image.
gpu::SyncToken sync_token;
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index 33b64084b98..5c1ab11ebda 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -6,6 +6,7 @@
#include <stdint.h>
#include "base/bind.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
#include "base/sys_byteorder.h"
@@ -1389,8 +1390,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureRGBA_ReadLockFence) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestRGBAFrame(run_loop.QuitClosure());
- frame->metadata()->SetBoolean(VideoFrameMetadata::READ_LOCK_FENCES_ENABLED,
- true);
+ frame->metadata()->read_lock_fences_enabled = true;
CopyVideoFrameTexturesAndCheckPixels(frame, &CheckRGBAFramePixels);
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index 98b8c805dd6..aa1c01263c5 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -93,7 +93,7 @@ RendererImpl::RendererImpl(
video_renderer_(std::move(video_renderer)),
current_audio_stream_(nullptr),
current_video_stream_(nullptr),
- time_source_(NULL),
+ time_source_(nullptr),
time_ticking_(false),
playback_rate_(0.0),
audio_buffering_state_(BUFFERING_HAVE_NOTHING),
@@ -206,6 +206,14 @@ void RendererImpl::SetLatencyHint(
audio_renderer_->SetLatencyHint(latency_hint);
}
+void RendererImpl::SetPreservesPitch(bool preserves_pitch) {
+ DVLOG(1) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_)
+ audio_renderer_->SetPreservesPitch(preserves_pitch);
+}
+
void RendererImpl::Flush(base::OnceClosure flush_cb) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -853,19 +861,15 @@ void RendererImpl::OnRendererEnded(DemuxerStream::Type type) {
DCHECK((type == DemuxerStream::AUDIO) || (type == DemuxerStream::VIDEO));
TRACE_EVENT1("media", "RendererImpl::OnRendererEnded", "type", type_string);
- if (state_ != STATE_PLAYING)
+ // If all streams are ended, do not propagate a redundant ended event.
+ if (state_ != STATE_PLAYING || PlaybackHasEnded())
return;
if (type == DemuxerStream::AUDIO) {
- // If all streams are ended, do not propagate a redundant ended event.
- if (audio_ended_ && PlaybackHasEnded())
- return;
+ DCHECK(audio_renderer_);
audio_ended_ = true;
} else {
DCHECK(video_renderer_);
- // If all streams are ended, do not propagate a redundant ended event.
- if (audio_ended_ && PlaybackHasEnded())
- return;
video_ended_ = true;
video_renderer_->OnTimeStopped();
}
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index 847186215b7..f6603d3d084 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -58,6 +58,7 @@ class MEDIA_EXPORT RendererImpl : public Renderer {
PipelineStatusCallback init_cb) final;
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
+ void SetPreservesPitch(bool preserves_pitch) final;
void Flush(base::OnceClosure flush_cb) final;
void StartPlayingFrom(base::TimeDelta time) final;
void SetPlaybackRate(double playback_rate) final;
diff --git a/chromium/media/renderers/video_overlay_factory.cc b/chromium/media/renderers/video_overlay_factory.cc
index be395346f37..d73990b1174 100644
--- a/chromium/media/renderers/video_overlay_factory.cc
+++ b/chromium/media/renderers/video_overlay_factory.cc
@@ -4,6 +4,7 @@
#include "media/renderers/video_overlay_factory.h"
+#include "base/logging.h"
#include "base/time/time.h"
#include "media/base/video_frame.h"
#include "ui/gfx/geometry/size.h"
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index af3e6488466..99d67e45c5c 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -566,8 +566,7 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
last_frame_ready_time_ = tick_clock_->NowTicks();
- const bool is_eos =
- frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM);
+ const bool is_eos = frame->metadata()->end_of_stream;
const bool is_before_start_time = !is_eos && IsBeforeStartTime(*frame);
const bool cant_read = !video_decoder_stream_->CanReadWithoutStalling();
@@ -599,9 +598,9 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
// RemoveFramesForUnderflowOrBackgroundRendering() below to actually expire
// this frame if it's too far behind the current media time. Without this,
// we may resume too soon after a track change in the low delay case.
- if (!frame->metadata()->HasKey(VideoFrameMetadata::FRAME_DURATION)) {
- frame->metadata()->SetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- video_decoder_stream_->AverageDuration());
+ if (!frame->metadata()->frame_duration.has_value()) {
+ frame->metadata()->frame_duration =
+ video_decoder_stream_->AverageDuration();
}
AddReadyFrame_Locked(std::move(frame));
@@ -731,16 +730,12 @@ void VideoRendererImpl::TransitionToHaveNothing_Locked() {
void VideoRendererImpl::AddReadyFrame_Locked(scoped_refptr<VideoFrame> frame) {
DCHECK(task_runner_->BelongsToCurrentThread());
lock_.AssertAcquired();
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
+ DCHECK(!frame->metadata()->end_of_stream);
++stats_.video_frames_decoded;
- bool power_efficient = false;
- if (frame->metadata()->GetBoolean(VideoFrameMetadata::POWER_EFFICIENT,
- &power_efficient) &&
- power_efficient) {
+ if (frame->metadata()->power_efficient)
++stats_.video_frames_decoded_power_efficient;
- }
algorithm_->EnqueueFrame(std::move(frame));
}
@@ -929,12 +924,13 @@ base::TimeTicks VideoRendererImpl::GetCurrentMediaTimeAsWallClockTime() {
bool VideoRendererImpl::IsBeforeStartTime(const VideoFrame& frame) {
// Prefer the actual frame duration over the average if available.
- base::TimeDelta metadata_frame_duration;
- if (frame.metadata()->GetTimeDelta(VideoFrameMetadata::FRAME_DURATION,
- &metadata_frame_duration)) {
- return frame.timestamp() + metadata_frame_duration < start_timestamp_;
+ if (frame.metadata()->frame_duration.has_value()) {
+ return frame.timestamp() + *frame.metadata()->frame_duration <
+ start_timestamp_;
}
+ // TODO(tguilbert): video_decoder_stream_->AverageDuration() can be accessed
+ // from the wrong thread.
return frame.timestamp() + video_decoder_stream_->AverageDuration() <
start_timestamp_;
}
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 27678f2b65f..5667c75131c 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -13,6 +13,7 @@
#include "base/atomic_sequence_num.h"
#include "base/bind.h"
#include "base/bit_cast.h"
+#include "base/logging.h"
#include "base/memory/shared_memory_mapping.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/strings/stringprintf.h"
@@ -121,6 +122,12 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
buffer_formats[1] = gfx::BufferFormat::RG_88;
return VideoFrameResourceType::YUV;
+ case PIXEL_FORMAT_P016LE:
+ DCHECK_EQ(num_textures, 1);
+ // TODO(mcasas): Support other formats such as e.g. P012.
+ buffer_formats[0] = gfx::BufferFormat::P010;
+ return VideoFrameResourceType::RGB;
+
case PIXEL_FORMAT_UYVY:
NOTREACHED();
FALLTHROUGH;
@@ -143,7 +150,6 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
case PIXEL_FORMAT_XBGR:
- case PIXEL_FORMAT_P016LE:
case PIXEL_FORMAT_UNKNOWN:
break;
}
@@ -465,9 +471,9 @@ VideoResourceUpdater::~VideoResourceUpdater() {
void VideoResourceUpdater::ObtainFrameResources(
scoped_refptr<VideoFrame> video_frame) {
- if (video_frame->metadata()->GetUnguessableToken(
- VideoFrameMetadata::OVERLAY_PLANE_ID, &overlay_plane_id_)) {
+ if (video_frame->metadata()->overlay_plane_id.has_value()) {
// This is a hole punching VideoFrame, there is nothing to display.
+ overlay_plane_id_ = *video_frame->metadata()->overlay_plane_id;
frame_resource_type_ = VideoFrameResourceType::VIDEO_HOLE;
return;
}
@@ -584,8 +590,8 @@ void VideoResourceUpdater::AppendQuads(viz::RenderPass* render_pass,
frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
frame->ColorSpace(), frame_resource_offset_,
frame_resource_multiplier_, frame_bits_per_channel_);
- if (frame->metadata()->IsTrue(VideoFrameMetadata::PROTECTED_VIDEO)) {
- if (frame->metadata()->IsTrue(VideoFrameMetadata::HW_PROTECTED)) {
+ if (frame->metadata()->protected_video) {
+ if (frame->metadata()->hw_protected) {
yuv_video_quad->protected_video_type =
gfx::ProtectedVideoType::kHardwareProtected;
} else {
@@ -613,8 +619,8 @@ void VideoResourceUpdater::AppendQuads(viz::RenderPass* render_pass,
bool nearest_neighbor = false;
gfx::ProtectedVideoType protected_video_type =
gfx::ProtectedVideoType::kClear;
- if (frame->metadata()->IsTrue(VideoFrameMetadata::PROTECTED_VIDEO)) {
- if (frame->metadata()->IsTrue(VideoFrameMetadata::HW_PROTECTED))
+ if (frame->metadata()->protected_video) {
+ if (frame->metadata()->hw_protected)
protected_video_type = gfx::ProtectedVideoType::kHardwareProtected;
else
protected_video_type = gfx::ProtectedVideoType::kSoftwareProtected;
@@ -814,8 +820,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
VideoFrameExternalResources external_resources;
gfx::ColorSpace resource_color_space = video_frame->ColorSpace();
- bool copy_required =
- video_frame->metadata()->IsTrue(VideoFrameMetadata::COPY_REQUIRED);
+ bool copy_required = video_frame->metadata()->copy_required;
GLuint target = video_frame->mailbox_holder(0).texture_target;
// If |copy_required| then we will copy into a GL_TEXTURE_2D target.
@@ -857,19 +862,18 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
auto transfer_resource = viz::TransferableResource::MakeGL(
mailbox_holder.mailbox, GL_LINEAR, mailbox_holder.texture_target,
mailbox_holder.sync_token, plane_size,
- video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY));
+ video_frame->metadata()->allow_overlay);
transfer_resource.color_space = resource_color_space;
transfer_resource.read_lock_fences_enabled =
- video_frame->metadata()->IsTrue(
- VideoFrameMetadata::READ_LOCK_FENCES_ENABLED);
+ video_frame->metadata()->read_lock_fences_enabled;
transfer_resource.format = viz::GetResourceFormat(buffer_formats[i]);
transfer_resource.ycbcr_info = video_frame->ycbcr_info();
#if defined(OS_ANDROID)
transfer_resource.is_backed_by_surface_texture =
- video_frame->metadata()->IsTrue(VideoFrameMetadata::TEXTURE_OWNER);
- transfer_resource.wants_promotion_hint = video_frame->metadata()->IsTrue(
- VideoFrameMetadata::WANTS_PROMOTION_HINT);
+ video_frame->metadata()->texture_owner;
+ transfer_resource.wants_promotion_hint =
+ video_frame->metadata()->wants_promotion_hint;
#endif
external_resources.resources.push_back(std::move(transfer_resource));
external_resources.release_callbacks.push_back(
@@ -990,11 +994,19 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
SkBitmap sk_bitmap;
sk_bitmap.installPixels(info, software_resource->pixels(),
info.minRowBytes());
+ // This is software path, so |canvas| and |video_frame| are always
+ // backed by software.
cc::SkiaPaintCanvas canvas(sk_bitmap);
-
- // This is software path, so canvas and video_frame are always backed
- // by software.
- video_renderer_->Copy(video_frame, &canvas, nullptr);
+ cc::PaintFlags flags;
+ flags.setBlendMode(SkBlendMode::kSrc);
+ flags.setFilterQuality(kLow_SkFilterQuality);
+
+ // Note that PaintCanvasVideoRenderer::Copy would copy to the origin,
+ // not |video_frame->visible_rect|, so call Paint instead.
+ // https://crbug.com/1090435
+ video_renderer_->Paint(video_frame, &canvas,
+ gfx::RectF(video_frame->visible_rect()), flags,
+ media::kNoTransformation, nullptr);
} else {
HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(
@@ -1242,7 +1254,7 @@ void VideoResourceUpdater::RecycleResource(uint32_t plane_resource_id,
if (resource_it == all_resources_.end())
return;
- if (context_provider_ && sync_token.HasData()) {
+ if ((raster_context_provider_ || context_provider_) && sync_token.HasData()) {
auto* gl = raster_context_provider_ ? raster_context_provider_->ContextGL()
: context_provider_->ContextGL();
gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
index 79577a39f81..6e515ba2bae 100644
--- a/chromium/media/renderers/video_resource_updater_unittest.cc
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -204,8 +204,7 @@ class VideoResourceUpdaterTest : public testing::Test {
bool needs_copy) {
scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
media::PIXEL_FORMAT_ARGB, GL_TEXTURE_EXTERNAL_OES);
- video_frame->metadata()->SetBoolean(
- media::VideoFrameMetadata::COPY_REQUIRED, needs_copy);
+ video_frame->metadata()->copy_required = needs_copy;
return video_frame;
}
@@ -531,8 +530,7 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
GL_TEXTURE_RECTANGLE_ARB);
- video_frame->metadata()->SetBoolean(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED, true);
+ video_frame->metadata()->read_lock_fences_enabled = true;
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_TRUE(resources.resources[0].read_lock_fences_enabled);
diff --git a/chromium/media/renderers/win/media_foundation_protection_manager.h b/chromium/media/renderers/win/media_foundation_protection_manager.h
index 9e428b589ba..3ccb8b39ae9 100644
--- a/chromium/media/renderers/win/media_foundation_protection_manager.h
+++ b/chromium/media/renderers/win/media_foundation_protection_manager.h
@@ -10,7 +10,7 @@
#include <windows.media.protection.h>
#include <wrl.h>
-#include "media/renderers/win/mf_cdm_proxy.h"
+#include "media/base/win/mf_cdm_proxy.h"
namespace media {
diff --git a/chromium/media/renderers/win/media_foundation_renderer.cc b/chromium/media/renderers/win/media_foundation_renderer.cc
index b8cfde9f7b7..99d1dc99b61 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer.cc
@@ -21,6 +21,7 @@
#include "base/win/windows_version.h"
#include "base/win/wrapped_window_proc.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/base/timestamp_constants.h"
#include "media/base/win/mf_helpers.h"
@@ -330,8 +331,8 @@ void MediaFoundationRenderer::SetLatencyHint(
NOTIMPLEMENTED() << "We do not use the latency hint today";
}
-// TODO(frankli): Use ComPtr<> for |cdm|.
-void MediaFoundationRenderer::OnCdmProxyReceived(IMFCdmProxy* cdm) {
+void MediaFoundationRenderer::OnCdmProxyReceived(
+ ComPtr<IMFCdmProxy> cdm_proxy) {
DVLOG_FUNC(1);
if (!waiting_for_mf_cdm_ || !content_protection_manager_) {
@@ -342,8 +343,6 @@ void MediaFoundationRenderer::OnCdmProxyReceived(IMFCdmProxy* cdm) {
waiting_for_mf_cdm_ = false;
- ComPtr<IMFCdmProxy> cdm_proxy;
- cdm_proxy.Attach(cdm);
content_protection_manager_->SetCdmProxy(cdm_proxy.Get());
mf_source_->SetCdmProxy(cdm_proxy.Get());
HRESULT hr = SetSourceOnMediaEngine();
diff --git a/chromium/media/renderers/win/media_foundation_renderer.h b/chromium/media/renderers/win/media_foundation_renderer.h
index 99c5193550f..d71cc5d7200 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.h
+++ b/chromium/media/renderers/win/media_foundation_renderer.h
@@ -88,7 +88,7 @@ class MediaFoundationRenderer : public Renderer,
void OnVideoNaturalSizeChanged();
void OnTimeUpdate();
- void OnCdmProxyReceived(IMFCdmProxy* cdm);
+ void OnCdmProxyReceived(Microsoft::WRL::ComPtr<IMFCdmProxy> cdm_proxy);
HRESULT SetDCompModeInternal(bool enabled);
HRESULT GetDCompSurfaceInternal(HANDLE* surface_handle);
diff --git a/chromium/media/renderers/win/media_foundation_source_wrapper.h b/chromium/media/renderers/win/media_foundation_source_wrapper.h
index 76e1d2fc34c..4ab2ac0810a 100644
--- a/chromium/media/renderers/win/media_foundation_source_wrapper.h
+++ b/chromium/media/renderers/win/media_foundation_source_wrapper.h
@@ -14,8 +14,8 @@
#include "base/sequenced_task_runner.h"
#include "media/base/media_resource.h"
+#include "media/base/win/mf_cdm_proxy.h"
#include "media/renderers/win/media_foundation_stream_wrapper.h"
-#include "media/renderers/win/mf_cdm_proxy.h"
namespace media {
diff --git a/chromium/media/renderers/yuv_util.cc b/chromium/media/renderers/yuv_util.cc
index 3a66d63bd94..91ede20c4ee 100644
--- a/chromium/media/renderers/yuv_util.cc
+++ b/chromium/media/renderers/yuv_util.cc
@@ -9,7 +9,9 @@
#include "components/viz/common/gpu/raster_context_provider.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/raster_interface.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
+#include "gpu/command_buffer/common/shared_image_usage.h"
#include "media/base/video_frame.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/gpu/GrContext.h"
@@ -18,72 +20,165 @@ namespace media {
namespace {
-static constexpr size_t kNumNV12Planes = 2;
-static constexpr size_t kNumYUVPlanes = 3;
-using YUVMailboxes = std::array<gpu::MailboxHolder, kNumYUVPlanes>;
-
-YUVMailboxes GetYUVMailboxes(const VideoFrame* video_frame,
- gpu::raster::RasterInterface* ri) {
- YUVMailboxes mailboxes;
-
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- mailboxes[i] = video_frame->mailbox_holder(i);
- DCHECK(mailboxes[i].texture_target == GL_TEXTURE_2D ||
- mailboxes[i].texture_target == GL_TEXTURE_EXTERNAL_OES ||
- mailboxes[i].texture_target == GL_TEXTURE_RECTANGLE_ARB)
- << "Unsupported texture target " << std::hex << std::showbase
- << mailboxes[i].texture_target;
- ri->WaitSyncTokenCHROMIUM(mailboxes[i].sync_token.GetConstData());
- }
-
- return mailboxes;
-}
+enum YUVIndex : size_t {
+ kYIndex = 0,
+ kUIndex = 1,
+ kVIndex = 2,
+};
+static constexpr size_t kNumNV12Planes = kUIndex + 1;
+static constexpr size_t kNumYUVPlanes = kVIndex + 1;
+using YUVMailboxes = std::array<gpu::MailboxHolder, kNumYUVPlanes>;
struct YUVPlaneTextureInfo {
GrGLTextureInfo texture = {0, 0};
bool is_shared_image = false;
};
using YUVTexturesInfo = std::array<YUVPlaneTextureInfo, kNumYUVPlanes>;
-YUVTexturesInfo GetYUVTexturesInfo(
- const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider) {
- gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
- YUVMailboxes mailboxes = GetYUVMailboxes(video_frame, ri);
- YUVTexturesInfo yuv_textures_info;
-
- GrGLenum skia_texture_format =
- video_frame->format() == PIXEL_FORMAT_NV12 ? GL_RGB8 : GL_R8_EXT;
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- yuv_textures_info[i].texture.fID =
- ri->CreateAndConsumeForGpuRaster(mailboxes[i].mailbox);
- if (mailboxes[i].mailbox.IsSharedImage()) {
- yuv_textures_info[i].is_shared_image = true;
- ri->BeginSharedImageAccessDirectCHROMIUM(
- yuv_textures_info[i].texture.fID,
- GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+class VideoFrameYUVMailboxesHolder {
+ public:
+ VideoFrameYUVMailboxesHolder(const VideoFrame* video_frame,
+ viz::RasterContextProvider* provider,
+ bool import_textures)
+ : provider_(provider) {
+ DCHECK(video_frame);
+ DCHECK(video_frame->HasTextures() || video_frame->IsMappable());
+ DCHECK(video_frame->format() == PIXEL_FORMAT_I420 |
+ video_frame->format() == PIXEL_FORMAT_NV12)
+ << "VideoFrame has an unsupported YUV format " << video_frame->format();
+ is_nv12_ = video_frame->format() == PIXEL_FORMAT_NV12;
+
+ DCHECK(provider_);
+ auto* ri = provider_->RasterInterface();
+ DCHECK(ri);
+
+ if (video_frame->HasTextures()) {
+ video_frame_owns_holders_ = true;
+ for (size_t plane = 0; plane < video_frame->NumTextures(); ++plane) {
+ holders_[plane] = video_frame->mailbox_holder(plane);
+ DCHECK(holders_[plane].texture_target == GL_TEXTURE_2D ||
+ holders_[plane].texture_target == GL_TEXTURE_EXTERNAL_OES ||
+ holders_[plane].texture_target == GL_TEXTURE_RECTANGLE_ARB)
+ << "Unsupported texture target " << std::hex << std::showbase
+ << holders_[plane].texture_target;
+ ri->WaitSyncTokenCHROMIUM(holders_[plane].sync_token.GetConstData());
+ }
+ } else {
+ DCHECK(!is_nv12_) << "NV12 CPU backed VideoFrames aren't supported.";
+ video_frame_owns_holders_ = false;
+ gfx::Size y_size = video_frame->coded_size();
+ gfx::Size uv_size = gfx::Size(y_size.width() / 2, y_size.height() / 2);
+
+ auto* sii = provider_->SharedImageInterface();
+ DCHECK(sii);
+ uint32_t mailbox_usage;
+ if (provider_->ContextCapabilities().supports_oop_raster) {
+ mailbox_usage = gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
+ } else {
+ mailbox_usage = gpu::SHARED_IMAGE_USAGE_GLES2;
+ }
+ for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
+ gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
+ holders_[plane].mailbox =
+ sii->CreateSharedImage(viz::ResourceFormat::LUMINANCE_8, tex_size,
+ video_frame->ColorSpace(), mailbox_usage);
+ holders_[plane].texture_target = GL_TEXTURE_2D;
+ }
+
+ // Split up shared image creation from upload so we only have to wait on
+ // one sync token.
+ ri->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
+ for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
+ gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
+ SkImageInfo info =
+ SkImageInfo::Make(tex_size.width(), tex_size.height(),
+ kGray_8_SkColorType, kUnknown_SkAlphaType);
+ ri->WritePixels(holders_[plane].mailbox, 0, 0, GL_TEXTURE_2D,
+ video_frame->stride(plane), info,
+ video_frame->data(plane));
+ }
}
- yuv_textures_info[i].texture.fTarget = mailboxes[i].texture_target;
- yuv_textures_info[i].texture.fFormat = skia_texture_format;
+ if (import_textures) {
+ ImportTextures();
+ }
}
- return yuv_textures_info;
-}
+ ~VideoFrameYUVMailboxesHolder() {
+ auto* ri = provider_->RasterInterface();
+ DCHECK(ri);
+ if (imported_textures_) {
+ for (auto& tex_info : textures_) {
+ if (!tex_info.texture.fID)
+ continue;
+
+ if (tex_info.is_shared_image)
+ ri->EndSharedImageAccessDirectCHROMIUM(tex_info.texture.fID);
+ ri->DeleteGpuRasterTexture(tex_info.texture.fID);
+ }
+ }
-void DeleteYUVTextures(const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- const YUVTexturesInfo& yuv_textures_info) {
- gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
+ // Don't destroy shared images we don't own.
+ if (video_frame_owns_holders_)
+ return;
- for (size_t i = 0; i < video_frame->NumTextures(); ++i) {
- if (yuv_textures_info[i].is_shared_image)
- ri->EndSharedImageAccessDirectCHROMIUM(yuv_textures_info[i].texture.fID);
- ri->DeleteGpuRasterTexture(yuv_textures_info[i].texture.fID);
+ gpu::SyncToken token;
+ ri->GenUnverifiedSyncTokenCHROMIUM(token.GetData());
+
+ auto* sii = provider_->SharedImageInterface();
+ DCHECK(sii);
+ for (auto& mailbox_holder : holders_) {
+ if (!mailbox_holder.mailbox.IsZero())
+ sii->DestroySharedImage(token, mailbox_holder.mailbox);
+ mailbox_holder.mailbox.SetZero();
+ }
}
-}
+
+ bool is_nv12() { return is_nv12_; }
+
+ const gpu::Mailbox& mailbox(size_t plane) {
+ DCHECK_LE(plane, is_nv12_ ? kNumNV12Planes : kNumYUVPlanes);
+ return holders_[plane].mailbox;
+ }
+
+ const GrGLTextureInfo& texture(size_t plane) {
+ DCHECK_LE(plane, is_nv12_ ? kNumNV12Planes : kNumYUVPlanes);
+ DCHECK(imported_textures_);
+ return textures_[plane].texture;
+ }
+
+ private:
+ void ImportTextures() {
+ auto* ri = provider_->RasterInterface();
+ GrGLenum skia_texture_format = is_nv12_ ? GL_RGB8 : GL_LUMINANCE8_EXT;
+ for (size_t plane = 0; plane < NumPlanes(); ++plane) {
+ textures_[plane].texture.fID =
+ ri->CreateAndConsumeForGpuRaster(holders_[plane].mailbox);
+ if (holders_[plane].mailbox.IsSharedImage()) {
+ textures_[plane].is_shared_image = true;
+ ri->BeginSharedImageAccessDirectCHROMIUM(
+ textures_[plane].texture.fID,
+ GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+ }
+
+ textures_[plane].texture.fTarget = holders_[plane].texture_target;
+ textures_[plane].texture.fFormat = skia_texture_format;
+ }
+
+ imported_textures_ = true;
+ }
+
+ size_t NumPlanes() { return is_nv12_ ? kNumNV12Planes : kNumYUVPlanes; }
+
+ viz::RasterContextProvider* provider_ = nullptr;
+ bool imported_textures_ = false;
+ bool video_frame_owns_holders_ = false;
+ bool is_nv12_ = false;
+
+ YUVMailboxes holders_;
+ YUVTexturesInfo textures_;
+};
void ConvertFromVideoFrameYUVWithGrContext(
const VideoFrame* video_frame,
@@ -99,9 +194,8 @@ void ConvertFromVideoFrameYUVWithGrContext(
dest_tex_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
}
// Let the SkImage fall out of scope and track the result using dest_tex_id
- NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
- video_frame, raster_context_provider, dest_mailbox_holder.texture_target,
- dest_tex_id);
+ NewSkImageFromVideoFrameYUV(video_frame, raster_context_provider,
+ dest_mailbox_holder.texture_target, dest_tex_id);
if (dest_mailbox_holder.mailbox.IsSharedImage())
ri->EndSharedImageAccessDirectCHROMIUM(dest_tex_id);
ri->DeleteGpuRasterTexture(dest_tex_id);
@@ -132,28 +226,33 @@ void ConvertFromVideoFrameYUV(
auto* ri = raster_context_provider->RasterInterface();
DCHECK(ri);
ri->WaitSyncTokenCHROMIUM(dest_mailbox_holder.sync_token.GetConstData());
- YUVMailboxes mailboxes = GetYUVMailboxes(video_frame, ri);
SkYUVColorSpace color_space =
ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace());
- if (video_frame->format() == PIXEL_FORMAT_I420) {
+
+ VideoFrameYUVMailboxesHolder yuv_mailboxes(video_frame,
+ raster_context_provider, false);
+
+ if (yuv_mailboxes.is_nv12()) {
+ ri->ConvertNV12MailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
+ yuv_mailboxes.mailbox(kYIndex),
+ yuv_mailboxes.mailbox(kUIndex));
+ } else {
DCHECK_EQ(video_frame->NumTextures(), kNumYUVPlanes);
ri->ConvertYUVMailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
- mailboxes[0].mailbox, mailboxes[1].mailbox,
- mailboxes[2].mailbox);
- } else {
- DCHECK_EQ(video_frame->format(), PIXEL_FORMAT_NV12);
- DCHECK_EQ(video_frame->NumTextures(), kNumNV12Planes);
- ri->ConvertNV12MailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
- mailboxes[0].mailbox, mailboxes[1].mailbox);
+ yuv_mailboxes.mailbox(kYIndex),
+ yuv_mailboxes.mailbox(kUIndex),
+ yuv_mailboxes.mailbox(kVIndex));
}
}
-sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
+sk_sp<SkImage> NewSkImageFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,
unsigned int texture_id) {
- DCHECK(video_frame->HasTextures());
+ DCHECK(video_frame->HasTextures() ||
+ (video_frame->IsMappable() &&
+ video_frame->format() == PIXEL_FORMAT_I420));
GrContext* gr_context = raster_context_provider->GrContext();
DCHECK(gr_context);
// TODO: We should compare the DCHECK vs when UpdateLastImage calls this
@@ -167,16 +266,16 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
GrGLTextureInfo backend_texture{};
- YUVTexturesInfo yuv_textures_info =
- GetYUVTexturesInfo(video_frame, raster_context_provider);
+ VideoFrameYUVMailboxesHolder yuv_textures_info(video_frame,
+ raster_context_provider, true);
GrBackendTexture yuv_textures[3] = {
GrBackendTexture(ya_tex_size.width(), ya_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[0].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kYIndex)),
GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[1].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kUIndex)),
GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo, yuv_textures_info[2].texture),
+ GrMipMapped::kNo, yuv_textures_info.texture(kVIndex)),
};
backend_texture.fID = texture_id;
backend_texture.fTarget = texture_target;
@@ -188,9 +287,7 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
sk_sp<SkImage> img = YUVGrBackendTexturesToSkImage(
gr_context, video_frame->ColorSpace(), video_frame->format(),
yuv_textures, result_texture);
- gr_context->flush();
-
- DeleteYUVTextures(video_frame, raster_context_provider, yuv_textures_info);
+ gr_context->flushAndSubmit();
return img;
}
diff --git a/chromium/media/renderers/yuv_util.h b/chromium/media/renderers/yuv_util.h
index e8fe451ab07..cd17d4d07b4 100644
--- a/chromium/media/renderers/yuv_util.h
+++ b/chromium/media/renderers/yuv_util.h
@@ -29,14 +29,15 @@ class VideoFrame;
// Converts a YUV video frame to RGB format and stores the results in the
// provided mailbox. The caller of this function maintains ownership of the
-// mailbox.
+// mailbox. Automatically handles upload of CPU memory backed VideoFrames in
+// I420 format. VideoFrames that wrap external textures can be I420 or NV12
+// format.
MEDIA_EXPORT void ConvertFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder);
-MEDIA_EXPORT sk_sp<SkImage>
-NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
+MEDIA_EXPORT sk_sp<SkImage> NewSkImageFromVideoFrameYUV(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,
diff --git a/chromium/media/video/fake_gpu_memory_buffer.cc b/chromium/media/video/fake_gpu_memory_buffer.cc
index 586f1f835a7..6fbdb059a2b 100644
--- a/chromium/media/video/fake_gpu_memory_buffer.cc
+++ b/chromium/media/video/fake_gpu_memory_buffer.cc
@@ -4,7 +4,11 @@
#include "media/video/fake_gpu_memory_buffer.h"
+#include "base/atomic_sequence_num.h"
+#include "base/no_destructor.h"
#include "build/build_config.h"
+#include "media/base/format_utils.h"
+#include "media/base/video_frame.h"
#if defined(OS_LINUX)
#include <fcntl.h>
@@ -55,26 +59,27 @@ base::ScopedFD GetDummyFD() {
FakeGpuMemoryBuffer::FakeGpuMemoryBuffer(const gfx::Size& size,
gfx::BufferFormat format)
: size_(size), format_(format) {
- // We use only NV12 or R8 in unit tests.
- CHECK(format == gfx::BufferFormat::YUV_420_BIPLANAR ||
- format == gfx::BufferFormat::R_8);
+ base::Optional<VideoPixelFormat> video_pixel_format =
+ GfxBufferFormatToVideoPixelFormat(format);
+ CHECK(video_pixel_format);
+ video_pixel_format_ = *video_pixel_format;
- size_t y_plane_size = size_.width() * size_.height();
- size_t uv_plane_size = size_.width() * size_.height() / 2;
- data_ = std::vector<uint8_t>(y_plane_size + uv_plane_size);
+ const size_t allocation_size =
+ VideoFrame::AllocationSize(video_pixel_format_, size_);
+ data_ = std::vector<uint8_t>(allocation_size);
handle_.type = gfx::NATIVE_PIXMAP;
- // Set a dummy id since this is for testing only.
- handle_.id = gfx::GpuMemoryBufferId(0);
+
+ static base::NoDestructor<base::AtomicSequenceNumber> buffer_id_generator;
+ handle_.id = gfx::GpuMemoryBufferId(buffer_id_generator->GetNext());
#if defined(OS_LINUX)
- // Set a dummy fd since this is for testing only.
- handle_.native_pixmap_handle.planes.push_back(
- gfx::NativePixmapPlane(size_.width(), 0, y_plane_size, GetDummyFD()));
- if (format == gfx::BufferFormat::YUV_420_BIPLANAR) {
- handle_.native_pixmap_handle.planes.push_back(gfx::NativePixmapPlane(
- size_.width(), handle_.native_pixmap_handle.planes[0].size,
- uv_plane_size, GetDummyFD()));
+ for (size_t i = 0; i < VideoFrame::NumPlanes(video_pixel_format_); i++) {
+ const gfx::Size plane_size_in_bytes =
+ VideoFrame::PlaneSize(video_pixel_format_, i, size_);
+ handle_.native_pixmap_handle.planes.emplace_back(
+ plane_size_in_bytes.width(), 0, plane_size_in_bytes.GetArea(),
+ GetDummyFD());
}
#endif // defined(OS_LINUX)
}
@@ -86,17 +91,13 @@ bool FakeGpuMemoryBuffer::Map() {
}
void* FakeGpuMemoryBuffer::memory(size_t plane) {
+ DCHECK_LT(plane, VideoFrame::NumPlanes(video_pixel_format_));
auto* data_ptr = data_.data();
- size_t y_plane_size = size_.width() * size_.height();
- switch (plane) {
- case 0:
- return reinterpret_cast<void*>(data_ptr);
- case 1:
- return reinterpret_cast<void*>(data_ptr + y_plane_size);
- default:
- NOTREACHED() << "Unsupported plane: " << plane;
- return nullptr;
+ for (size_t i = 1; i <= plane; i++) {
+ data_ptr +=
+ VideoFrame::PlaneSize(video_pixel_format_, i - 1, size_).GetArea();
}
+ return data_ptr;
}
void FakeGpuMemoryBuffer::Unmap() {}
@@ -110,15 +111,8 @@ gfx::BufferFormat FakeGpuMemoryBuffer::GetFormat() const {
}
int FakeGpuMemoryBuffer::stride(size_t plane) const {
- switch (plane) {
- case 0:
- return size_.width();
- case 1:
- return size_.width();
- default:
- NOTREACHED() << "Unsupported plane: " << plane;
- return 0;
- }
+ DCHECK_LT(plane, VideoFrame::NumPlanes(video_pixel_format_));
+ return VideoFrame::PlaneSize(video_pixel_format_, plane, size_).width();
}
void FakeGpuMemoryBuffer::SetColorSpace(const gfx::ColorSpace& color_space) {}
diff --git a/chromium/media/video/fake_gpu_memory_buffer.h b/chromium/media/video/fake_gpu_memory_buffer.h
index 9ca8cda13d7..9a6499a9609 100644
--- a/chromium/media/video/fake_gpu_memory_buffer.h
+++ b/chromium/media/video/fake_gpu_memory_buffer.h
@@ -8,6 +8,7 @@
#include <memory>
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
+#include "media/base/video_types.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
@@ -39,6 +40,7 @@ class FakeGpuMemoryBuffer : public gfx::GpuMemoryBuffer {
private:
gfx::Size size_;
gfx::BufferFormat format_;
+ VideoPixelFormat video_pixel_format_ = PIXEL_FORMAT_UNKNOWN;
std::vector<uint8_t> data_;
gfx::GpuMemoryBufferHandle handle_;
DISALLOW_IMPLICIT_CONSTRUCTORS(FakeGpuMemoryBuffer);
diff --git a/chromium/media/video/fake_video_encode_accelerator.cc b/chromium/media/video/fake_video_encode_accelerator.cc
index 783f8f0ba7f..8400e49040f 100644
--- a/chromium/media/video/fake_video_encode_accelerator.cc
+++ b/chromium/media/video/fake_video_encode_accelerator.cc
@@ -17,7 +17,7 @@ FakeVideoEncodeAccelerator::FakeVideoEncodeAccelerator(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
: task_runner_(task_runner),
will_initialization_succeed_(true),
- client_(NULL),
+ client_(nullptr),
next_frame_is_first_frame_(true) {}
FakeVideoEncodeAccelerator::~FakeVideoEncodeAccelerator() {
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index cd549e59c7f..d883b9966ab 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -20,6 +20,7 @@
#include "base/containers/circular_deque.h"
#include "base/containers/stack_container.h"
#include "base/location.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
@@ -933,8 +934,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
#else
switch (output_format_) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
- allow_overlay =
- video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY);
+ allow_overlay = video_frame->metadata()->allow_overlay;
break;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
allow_overlay = true;
@@ -964,10 +964,8 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
}
#endif // OS_WIN
frame->metadata()->MergeMetadataFrom(video_frame->metadata());
- frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
- allow_overlay);
- frame->metadata()->SetBoolean(VideoFrameMetadata::READ_LOCK_FENCES_ENABLED,
- true);
+ frame->metadata()->allow_overlay = allow_overlay;
+ frame->metadata()->read_lock_fences_enabled = true;
CompleteCopyRequestAndMaybeStartNextCopy(std::move(frame));
}
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index b46a17e5a6e..c624fee627e 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -283,8 +283,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame) {
EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame2) {
@@ -301,8 +300,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame2) {
EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
EXPECT_EQ(2u, frame->NumTextures());
EXPECT_EQ(2u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
@@ -319,8 +317,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -344,8 +341,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT709) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -369,8 +365,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT601) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -393,8 +388,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXB30Frame) {
EXPECT_EQ(PIXEL_FORMAT_XB30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareRGBAFrame) {
@@ -411,18 +405,15 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareRGBAFrame) {
EXPECT_EQ(PIXEL_FORMAT_ABGR, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, PreservesMetadata) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
- software_frame->metadata()->SetBoolean(
- media::VideoFrameMetadata::END_OF_STREAM, true);
+ software_frame->metadata()->end_of_stream = true;
base::TimeTicks kTestReferenceTime =
base::TimeDelta::FromMilliseconds(12345) + base::TimeTicks();
- software_frame->metadata()->SetTimeTicks(VideoFrameMetadata::REFERENCE_TIME,
- kTestReferenceTime);
+ software_frame->metadata()->reference_time = kTestReferenceTime;
scoped_refptr<VideoFrame> frame;
gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
@@ -430,14 +421,8 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, PreservesMetadata) {
RunUntilIdle();
EXPECT_NE(software_frame.get(), frame.get());
- bool end_of_stream = false;
- EXPECT_TRUE(frame->metadata()->GetBoolean(
- media::VideoFrameMetadata::END_OF_STREAM, &end_of_stream));
- EXPECT_TRUE(end_of_stream);
- base::TimeTicks render_time;
- EXPECT_TRUE(frame->metadata()->GetTimeTicks(
- VideoFrameMetadata::REFERENCE_TIME, &render_time));
- EXPECT_EQ(kTestReferenceTime, render_time);
+ EXPECT_TRUE(frame->metadata()->end_of_stream);
+ EXPECT_EQ(kTestReferenceTime, *frame->metadata()->reference_time);
}
// CreateGpuMemoryBuffer can return null (e.g: when the GPU process is down).
@@ -638,8 +623,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, VideoFrameChangesPixelFormat) {
EXPECT_EQ(PIXEL_FORMAT_ABGR, frame_1->format());
EXPECT_EQ(1u, frame_1->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame_1->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
+ EXPECT_TRUE(frame_1->metadata()->read_lock_fences_enabled);
scoped_refptr<VideoFrame> software_frame_2 = CreateTestYUVVideoFrame(10);
mock_gpu_factories_->SetVideoFrameOutputFormat(
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index cfdbdbb44e1..a12b5fe12ba 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -55,7 +55,7 @@ class MediaLog;
// * The GpuVideoAcceleratorFactories has an associated message loop, which may
// be retrieved as |GetMessageLoop()|.
// * All calls to the Factories after construction must be made on its message
-// loop.
+// loop, unless otherwise documented below.
class MEDIA_EXPORT GpuVideoAcceleratorFactories {
public:
enum class OutputFormat {
@@ -90,15 +90,61 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
// Supported::kFalse if |config| is not supported.
//
// May be called on any thread.
+ //
+ // TODO(sandersd): Switch to bool if/when all clients check
+ // IsDecoderSupportKnown().
virtual Supported IsDecoderConfigSupported(
VideoDecoderImplementation implementation,
const VideoDecoderConfig& config) = 0;
+ // Returns true if IsDecoderConfigSupported() is ready to answer queries.
+ // Once decoder support is known, it remains known for the lifetime of |this|.
+ //
+ // May be called on any thread.
+ virtual bool IsDecoderSupportKnown() = 0;
+
+ // Registers a callback to be notified when IsDecoderConfigSupported() is
+ // ready to answer queries. The callback will be invoked on the caller's
+ // sequence.
+ //
+ // There is no way to unsubscribe a callback, it is recommended to use a
+ // WeakPtr if you need this feature.
+ //
+ // May be called on any thread.
+ virtual void NotifyDecoderSupportKnown(base::OnceClosure callback) = 0;
+
virtual std::unique_ptr<media::VideoDecoder> CreateVideoDecoder(
MediaLog* media_log,
VideoDecoderImplementation implementation,
RequestOverlayInfoCB request_overlay_info_cb) = 0;
+ // Returns the supported codec profiles of video encode accelerator.
+ // Returns nullopt if GpuVideoAcceleratorFactories don't know the VEA
+ // supported profiles.
+ //
+ // May be called on any thread.
+ //
+ // TODO(sandersd): Remove Optional if/when all clients check
+ // IsEncoderSupportKnown().
+ virtual base::Optional<VideoEncodeAccelerator::SupportedProfiles>
+ GetVideoEncodeAcceleratorSupportedProfiles() = 0;
+
+ // Returns true if GetVideoEncodeAcceleratorSupportedProfiles() is populated.
+ // Once encoder support is known, it remains known for the lifetime of |this|.
+ //
+ // May be called on any thread.
+ virtual bool IsEncoderSupportKnown() = 0;
+
+ // Registers a callback to be notified when
+ // GetVideoEncodeAcceleratorSupportedProfiles() has been populated. The
+ // callback will be invoked on the caller's sequence.
+ //
+ // There is no way to unsubscribe a callback, it is recommended to use a
+ // WeakPtr if you need this feature.
+ //
+ // May be called on any thread.
+ virtual void NotifyEncoderSupportKnown(base::OnceClosure callback) = 0;
+
// Caller owns returned pointer, but should call Destroy() on it (instead of
// directly deleting) for proper destruction, as per the
// VideoEncodeAccelerator interface.
@@ -140,12 +186,6 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
// Returns the task runner the video accelerator runs on.
virtual scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner() = 0;
- // Returns the supported codec profiles of video encode accelerator.
- // Returns nullopt if GpuVideoAcceleratorFactories don't know the VEA
- // supported profiles.
- virtual base::Optional<VideoEncodeAccelerator::SupportedProfiles>
- GetVideoEncodeAcceleratorSupportedProfiles() = 0;
-
virtual viz::RasterContextProvider* GetMediaContextProvider() = 0;
// Sets the current pipeline rendering color space.
diff --git a/chromium/media/video/h264_bit_reader.cc b/chromium/media/video/h264_bit_reader.cc
index 0fe227055c2..45c3d75c00c 100644
--- a/chromium/media/video/h264_bit_reader.cc
+++ b/chromium/media/video/h264_bit_reader.cc
@@ -8,7 +8,7 @@
namespace media {
H264BitReader::H264BitReader()
- : data_(NULL),
+ : data_(nullptr),
bytes_left_(0),
curr_byte_(0),
num_remaining_bits_in_curr_byte_(0),
diff --git a/chromium/media/video/h264_parser.cc b/chromium/media/video/h264_parser.cc
index f00faf3287d..8dbe250864e 100644
--- a/chromium/media/video/h264_parser.cc
+++ b/chromium/media/video/h264_parser.cc
@@ -8,6 +8,7 @@
#include <memory>
#include "base/logging.h"
+#include "base/notreached.h"
#include "base/numerics/safe_math.h"
#include "base/stl_util.h"
#include "media/base/subsample_entry.h"
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index 63b5877c01a..6d962f4b939 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -34,11 +34,15 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
MOCK_METHOD2(IsDecoderConfigSupported,
Supported(VideoDecoderImplementation,
const VideoDecoderConfig&));
+ MOCK_METHOD0(IsDecoderSupportKnown, bool());
+ MOCK_METHOD1(NotifyDecoderSupportKnown, void(base::OnceClosure));
MOCK_METHOD3(CreateVideoDecoder,
std::unique_ptr<media::VideoDecoder>(MediaLog*,
VideoDecoderImplementation,
RequestOverlayInfoCB));
+ MOCK_METHOD0(IsEncoderSupportKnown, bool());
+ MOCK_METHOD1(NotifyEncoderSupportKnown, void(base::OnceClosure));
// CreateVideoEncodeAccelerator returns scoped_ptr, which the mocking
// framework does not want. Trampoline it.
MOCK_METHOD0(DoCreateVideoEncodeAccelerator, VideoEncodeAccelerator*());
diff --git a/chromium/media/video/picture.cc b/chromium/media/video/picture.cc
index 3e169be0b80..c32b025e7d9 100644
--- a/chromium/media/video/picture.cc
+++ b/chromium/media/video/picture.cc
@@ -4,6 +4,7 @@
#include "media/video/picture.h"
+#include "base/check_op.h"
#include "base/logging.h"
#include "base/macros.h"
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 81528abd02a..1f1c14d067a 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -15,6 +15,8 @@ Vp8Metadata::Vp8Metadata()
BitstreamBufferMetadata::BitstreamBufferMetadata()
: payload_size_bytes(0), key_frame(false) {}
BitstreamBufferMetadata::BitstreamBufferMetadata(
+ const BitstreamBufferMetadata& other) = default;
+BitstreamBufferMetadata::BitstreamBufferMetadata(
BitstreamBufferMetadata&& other) = default;
BitstreamBufferMetadata::BitstreamBufferMetadata(size_t payload_size_bytes,
bool key_frame,
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index 84a31261293..c0d50d9814e 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -51,6 +51,7 @@ struct MEDIA_EXPORT Vp8Metadata final {
// |vp8|, if set, contains metadata specific to VP8. See above.
struct MEDIA_EXPORT BitstreamBufferMetadata final {
BitstreamBufferMetadata();
+ BitstreamBufferMetadata(const BitstreamBufferMetadata& other);
BitstreamBufferMetadata(BitstreamBufferMetadata&& other);
BitstreamBufferMetadata(size_t payload_size_bytes,
bool key_frame,
diff --git a/chromium/media/video/vpx_video_encoder.cc b/chromium/media/video/vpx_video_encoder.cc
index 808d585b919..696afed6ee5 100644
--- a/chromium/media/video/vpx_video_encoder.cc
+++ b/chromium/media/video/vpx_video_encoder.cc
@@ -221,12 +221,11 @@ void VpxVideoEncoder::ChangeOptions(const Options& options, StatusCB done_cb) {
}
uint64_t VpxVideoEncoder::GetFrameDuration(const VideoFrame& frame) {
- base::TimeDelta result;
- if (!frame.metadata()->GetTimeDelta(media::VideoFrameMetadata::FRAME_DURATION,
- &result)) {
- result = base::TimeDelta::FromSecondsD(1.0 / options_.framerate);
- }
- return result.InMicroseconds();
+ base::TimeDelta default_duration =
+ base::TimeDelta::FromSecondsD(1.0 / options_.framerate);
+ return frame.metadata()
+ ->frame_duration.value_or(default_duration)
+ .InMicroseconds();
}
VpxVideoEncoder::~VpxVideoEncoder() {
diff --git a/chromium/media/webcodecs/BUILD.gn b/chromium/media/webcodecs/BUILD.gn
deleted file mode 100644
index 325aa27053d..00000000000
--- a/chromium/media/webcodecs/BUILD.gn
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2020 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import("//testing/test.gni")
-
-source_set("webcodecs") {
- # Do not expand the visibility here without double-checking with OWNERS, this
- # is a roll-up target which is part of the //media component. Most other DEPs
- # should be using //media and not directly DEP this roll-up target.
- visibility = [ "//media" ]
-
- sources = [
- "wc_decoder_selector.cc",
- "wc_decoder_selector.h",
- ]
-
- public_deps = [
- "//base",
- "//media/base",
- "//media/filters",
- ]
-
- deps = []
-
- configs += [ "//media:subcomponent_config" ]
-}
-
-source_set("unit_tests") {
- testonly = true
- sources = [ "wc_decoder_selector_unittest.cc" ]
-
- deps = [
- "//base/test:test_support",
- "//media:test_support",
- "//testing/gmock",
- "//testing/gtest",
- ]
-}
diff --git a/chromium/media/webcodecs/wc_decoder_selector.cc b/chromium/media/webcodecs/wc_decoder_selector.cc
deleted file mode 100644
index 73b927dc4cc..00000000000
--- a/chromium/media/webcodecs/wc_decoder_selector.cc
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/webcodecs/wc_decoder_selector.h"
-
-#include "base/bind.h"
-#include "base/check_op.h"
-#include "base/notreached.h"
-#include "base/single_thread_task_runner.h"
-#include "media/base/channel_layout.h"
-#include "media/base/demuxer_stream.h"
-#include "media/filters/decrypting_demuxer_stream.h"
-
-namespace media {
-
-// Demuxing isn't part of WebCodecs. This shim allows us to reuse decoder
-// selection logic from <video>.
-// TODO(chcunningham): Maybe refactor DecoderSelector to separate dependency on
-// DemuxerStream. DecoderSelection doesn't conceptually require a Demuxer. The
-// tough part is re-working DecryptingDemuxerStream.
-template <DemuxerStream::Type StreamType>
-class ShimDemuxerStream : public DemuxerStream {
- public:
- using DecoderConfigType =
- typename DecoderStreamTraits<StreamType>::DecoderConfigType;
-
- ~ShimDemuxerStream() override = default;
-
- void Read(ReadCB read_cb) override { NOTREACHED(); }
- bool IsReadPending() const override {
- NOTREACHED();
- return false;
- }
-
- void Configure(DecoderConfigType config);
-
- AudioDecoderConfig audio_decoder_config() override {
- DCHECK_EQ(type(), DemuxerStream::AUDIO);
- return audio_decoder_config_;
- }
-
- VideoDecoderConfig video_decoder_config() override {
- DCHECK_EQ(type(), DemuxerStream::VIDEO);
- return video_decoder_config_;
- }
-
- Type type() const override { return stream_type; }
-
- bool SupportsConfigChanges() override {
- NOTREACHED();
- return true;
- }
-
- private:
- static const DemuxerStream::Type stream_type = StreamType;
-
- AudioDecoderConfig audio_decoder_config_;
- VideoDecoderConfig video_decoder_config_;
-};
-
-template <>
-void ShimDemuxerStream<DemuxerStream::AUDIO>::Configure(
- DecoderConfigType config) {
- audio_decoder_config_ = config;
-}
-
-template <>
-void ShimDemuxerStream<DemuxerStream::VIDEO>::Configure(
- DecoderConfigType config) {
- video_decoder_config_ = config;
-}
-
-template <DemuxerStream::Type StreamType>
-WebCodecsDecoderSelector<StreamType>::WebCodecsDecoderSelector(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- CreateDecodersCB create_decoders_cb,
- typename Decoder::OutputCB output_cb)
- : impl_(std::move(task_runner),
- std::move(create_decoders_cb),
- &null_media_log_),
- demuxer_stream_(new ShimDemuxerStream<StreamType>()),
- stream_traits_(CreateStreamTraits()),
- output_cb_(output_cb) {
- impl_.Initialize(stream_traits_.get(), demuxer_stream_.get(),
- nullptr /*CdmContext*/, WaitingCB());
-}
-
-template <DemuxerStream::Type StreamType>
-WebCodecsDecoderSelector<StreamType>::~WebCodecsDecoderSelector() {}
-
-template <DemuxerStream::Type StreamType>
-void WebCodecsDecoderSelector<StreamType>::SelectDecoder(
- const DecoderConfig& config,
- SelectDecoderCB select_decoder_cb) {
- // |impl_| will internally use this the |config| from our ShimDemuxerStream.
- demuxer_stream_->Configure(config);
-
- // |impl_| uses a WeakFactory for its SelectDecoderCB, so we're safe to use
- // Unretained here.
- impl_.SelectDecoder(
- base::BindOnce(&WebCodecsDecoderSelector<StreamType>::OnDecoderSelected,
- base::Unretained(this), std::move(select_decoder_cb)),
- output_cb_);
-}
-
-template <>
-std::unique_ptr<WebCodecsAudioDecoderSelector::StreamTraits>
-WebCodecsDecoderSelector<DemuxerStream::AUDIO>::CreateStreamTraits() {
- // TODO(chcunningham): Consider plumbing real hw channel layout.
- return std::make_unique<WebCodecsDecoderSelector::StreamTraits>(
- &null_media_log_, CHANNEL_LAYOUT_NONE);
-}
-
-template <>
-std::unique_ptr<WebCodecsVideoDecoderSelector::StreamTraits>
-WebCodecsDecoderSelector<DemuxerStream::VIDEO>::CreateStreamTraits() {
- return std::make_unique<WebCodecsDecoderSelector::StreamTraits>(
- &null_media_log_);
-}
-
-template <DemuxerStream::Type StreamType>
-void WebCodecsDecoderSelector<StreamType>::OnDecoderSelected(
- SelectDecoderCB select_decoder_cb,
- std::unique_ptr<Decoder> decoder,
- std::unique_ptr<DecryptingDemuxerStream> decrypting_demuxer_stream) {
- DCHECK(!decrypting_demuxer_stream);
-
- // We immediately finalize decoder selection. From a spec POV we strongly
- // prefer to avoid replicating our internal design of having to wait for the
- // first frame to arrive before we consider configuration successful.
- // TODO(chcunningham): Measure first frame decode failures and find other ways
- // to solve (or minimize) the problem.
- impl_.FinalizeDecoderSelection();
-
- std::move(select_decoder_cb).Run(std::move(decoder));
-}
-
-template class WebCodecsDecoderSelector<DemuxerStream::VIDEO>;
-template class WebCodecsDecoderSelector<DemuxerStream::AUDIO>;
-
-} // namespace media
diff --git a/chromium/media/webcodecs/wc_decoder_selector.h b/chromium/media/webcodecs/wc_decoder_selector.h
deleted file mode 100644
index 207350f1a94..00000000000
--- a/chromium/media/webcodecs/wc_decoder_selector.h
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_WEBCODECS_WC_DECODER_SELECTOR_H_
-#define MEDIA_WEBCODECS_WC_DECODER_SELECTOR_H_
-
-#include <memory>
-
-#include "media/base/demuxer_stream.h"
-#include "media/base/media_export.h"
-#include "media/base/media_util.h"
-#include "media/filters/decoder_selector.h"
-#include "media/filters/decoder_stream_traits.h"
-
-namespace media {
-
-template <DemuxerStream::Type StreamType>
-class ShimDemuxerStream;
-
-template <DemuxerStream::Type StreamType>
-class MEDIA_EXPORT WebCodecsDecoderSelector {
- public:
- typedef DecoderStreamTraits<StreamType> StreamTraits;
- typedef typename StreamTraits::DecoderType Decoder;
- typedef typename StreamTraits::DecoderConfigType DecoderConfig;
-
- // Callback to create a list of decoders to select from.
- using CreateDecodersCB =
- base::RepeatingCallback<std::vector<std::unique_ptr<Decoder>>()>;
-
- // Emits the result of a single call to SelectDecoder(). Parameter is
- // the initialized Decoder. nullptr if selection failed. The caller owns the
- // Decoder.
- using SelectDecoderCB = base::OnceCallback<void(std::unique_ptr<Decoder>)>;
-
- WebCodecsDecoderSelector(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- CreateDecodersCB create_decoders_cb,
- typename Decoder::OutputCB output_cb);
-
- // Aborts any pending decoder selection.
- ~WebCodecsDecoderSelector();
-
- // Selects and initializes a decoder using |config|. Decoder will
- // be returned via |select_decoder_cb| posted to |task_runner_|. Subsequent
- // calls will again select from the full list of decoders.
- void SelectDecoder(const DecoderConfig& config,
- SelectDecoderCB select_decoder_cb);
-
- private:
- // Helper to create |stream_traits_|.
- std::unique_ptr<StreamTraits> CreateStreamTraits();
-
- // Proxy SelectDecoderCB from impl_ to our |select_decoder_cb|.
- void OnDecoderSelected(SelectDecoderCB select_decoder_cb,
- std::unique_ptr<Decoder> decoder,
- std::unique_ptr<DecryptingDemuxerStream>);
-
- // Implements heavy lifting for decoder selection.
- DecoderSelector<StreamType> impl_;
-
- // Shim to satisfy dependencies of |impl_|. Provides DecoderConfig to |impl_|.
- std::unique_ptr<ShimDemuxerStream<StreamType>> demuxer_stream_;
-
- // Helper to unify API for configuring audio/video decoders.
- std::unique_ptr<StreamTraits> stream_traits_;
-
- // Repeating callback for decoder outputs.
- typename Decoder::OutputCB output_cb_;
-
- // TODO(chcunningham): Route MEDIA_LOG for WebCodecs.
- NullMediaLog null_media_log_;
-};
-
-typedef WebCodecsDecoderSelector<DemuxerStream::VIDEO>
- WebCodecsVideoDecoderSelector;
-typedef WebCodecsDecoderSelector<DemuxerStream::AUDIO>
- WebCodecsAudioDecoderSelector;
-
-} // namespace media
-
-#endif // MEDIA_WEBCODECS_WC_DECODER_SELECTOR_H_
diff --git a/chromium/media/webcodecs/wc_decoder_selector_unittest.cc b/chromium/media/webcodecs/wc_decoder_selector_unittest.cc
deleted file mode 100644
index a14258468ee..00000000000
--- a/chromium/media/webcodecs/wc_decoder_selector_unittest.cc
+++ /dev/null
@@ -1,240 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <vector>
-
-#include "base/test/task_environment.h"
-#include "media/base/demuxer_stream.h"
-#include "media/base/media_util.h"
-#include "media/base/mock_filters.h"
-#include "media/base/status.h"
-#include "media/base/test_helpers.h"
-#include "media/base/video_decoder.h"
-#include "media/filters/decoder_stream.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-#include "media/webcodecs/wc_decoder_selector.h"
-
-using ::testing::_;
-using ::testing::IsNull;
-using ::testing::StrictMock;
-
-namespace media {
-
-namespace {
-
-enum DecoderCapability {
- kFail,
- kSucceed,
-};
-
-const char kNoDecoder[] = "";
-const char kDecoder1[] = "Decoder1";
-const char kDecoder2[] = "Decoder2";
-
-// Specializations for the AUDIO version of the test.
-class AudioDecoderSelectorTestParam {
- public:
- static constexpr DemuxerStream::Type kStreamType = DemuxerStream::AUDIO;
-
- using DecoderSelector = WebCodecsDecoderSelector<DemuxerStream::AUDIO>;
- using MockDecoder = MockAudioDecoder;
- using Output = AudioBuffer;
-
- static AudioDecoderConfig CreateConfig() { return TestAudioConfig::Normal(); }
-
- // Create a config that won't match the return of CreateConfig().
- static AudioDecoderConfig CreateAlternateConfig() {
- return TestAudioConfig::NormalEncrypted();
- }
-
- // Decoder::Initialize() takes different parameters depending on the type.
- static void ExpectInitialize(MockDecoder* decoder,
- DecoderCapability capability,
- AudioDecoderConfig expected_config) {
- EXPECT_CALL(*decoder, Initialize_(_, _, _, _, _))
- .WillRepeatedly([capability, expected_config](
- const AudioDecoderConfig& config, CdmContext*,
- AudioDecoder::InitCB& init_cb,
- const AudioDecoder::OutputCB&, const WaitingCB&) {
- EXPECT_TRUE(config.Matches(expected_config));
- std::move(init_cb).Run(capability == kSucceed
- ? OkStatus()
- : StatusCode::kCodeOnlyForTesting);
- });
- }
-};
-
-// Specializations for the VIDEO version of the test.
-class VideoDecoderSelectorTestParam {
- public:
- static constexpr DemuxerStream::Type kStreamType = DemuxerStream::VIDEO;
-
- using DecoderSelector = WebCodecsDecoderSelector<DemuxerStream::VIDEO>;
- using MockDecoder = MockVideoDecoder;
- using Output = VideoFrame;
-
- static VideoDecoderConfig CreateConfig() { return TestVideoConfig::Normal(); }
-
- // Create a config that won't match the return of CreateConfig().
- static VideoDecoderConfig CreateAlternateConfig() {
- return TestVideoConfig::LargeEncrypted();
- }
-
- static void ExpectInitialize(MockDecoder* decoder,
- DecoderCapability capability,
- VideoDecoderConfig expected_config) {
- EXPECT_CALL(*decoder, Initialize_(_, _, _, _, _, _))
- .WillRepeatedly([capability, expected_config](
- const VideoDecoderConfig& config, bool low_delay,
- CdmContext*, VideoDecoder::InitCB& init_cb,
- const VideoDecoder::OutputCB&, const WaitingCB&) {
- EXPECT_TRUE(config.Matches(expected_config));
- std::move(init_cb).Run(capability == kSucceed
- ? OkStatus()
- : StatusCode::kCodeOnlyForTesting);
- });
- }
-};
-
-// Allocate storage for the member variables.
-constexpr DemuxerStream::Type AudioDecoderSelectorTestParam::kStreamType;
-constexpr DemuxerStream::Type VideoDecoderSelectorTestParam::kStreamType;
-
-} // namespace
-
-// Note: The parameter is called TypeParam in the test cases regardless of what
-// we call it here. It's been named the same for convenience.
-// Note: The test fixtures inherit from this class. Inside the test cases the
-// test fixture class is called TestFixture.
-template <typename TypeParam>
-class WebCodecsDecoderSelectorTest : public ::testing::Test {
- public:
- // Convenience aliases.
- using Self = WebCodecsDecoderSelectorTest<TypeParam>;
- using Decoder = typename TypeParam::DecoderSelector::Decoder;
- using DecoderConfig = typename TypeParam::DecoderSelector::DecoderConfig;
- using MockDecoder = typename TypeParam::MockDecoder;
- using Output = typename TypeParam::Output;
-
- WebCodecsDecoderSelectorTest() { CreateDecoderSelector(); }
-
- void OnOutput(scoped_refptr<Output> output) { NOTREACHED(); }
-
- MOCK_METHOD1_T(OnDecoderSelected, void(std::string));
-
- void OnDecoderSelectedThunk(std::unique_ptr<Decoder> decoder) {
- // Report only the name of the decoder, since that's what the tests care
- // about. The decoder will be destructed immediately.
- OnDecoderSelected(decoder ? decoder->GetDisplayName() : kNoDecoder);
- }
-
- void AddMockDecoder(const std::string& decoder_name,
- DecoderCapability capability) {
- // Actual decoders are created in CreateDecoders(), which may be called
- // multiple times by the DecoderSelector.
- mock_decoders_to_create_.emplace_back(decoder_name, capability);
- }
-
- std::vector<std::unique_ptr<Decoder>> CreateDecoders() {
- std::vector<std::unique_ptr<Decoder>> decoders;
-
- for (const auto& info : mock_decoders_to_create_) {
- std::unique_ptr<StrictMock<MockDecoder>> decoder =
- std::make_unique<StrictMock<MockDecoder>>(info.first);
- TypeParam::ExpectInitialize(decoder.get(), info.second,
- last_set_decoder_config_);
- decoders.push_back(std::move(decoder));
- }
-
- return decoders;
- }
-
- void CreateDecoderSelector() {
- decoder_selector_ =
- std::make_unique<WebCodecsDecoderSelector<TypeParam::kStreamType>>(
- task_environment_.GetMainThreadTaskRunner(),
- base::BindRepeating(&Self::CreateDecoders, base::Unretained(this)),
- base::BindRepeating(&Self::OnOutput, base::Unretained(this)));
- }
-
- void SelectDecoder(DecoderConfig config = TypeParam::CreateConfig()) {
- last_set_decoder_config_ = config;
- decoder_selector_->SelectDecoder(
- config,
- base::BindOnce(&Self::OnDecoderSelectedThunk, base::Unretained(this)));
- RunUntilIdle();
- }
-
- void RunUntilIdle() { task_environment_.RunUntilIdle(); }
-
- base::test::TaskEnvironment task_environment_;
- NullMediaLog media_log_;
-
- DecoderConfig last_set_decoder_config_;
-
- std::unique_ptr<WebCodecsDecoderSelector<TypeParam::kStreamType>>
- decoder_selector_;
-
- std::vector<std::pair<std::string, DecoderCapability>>
- mock_decoders_to_create_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(WebCodecsDecoderSelectorTest);
-};
-
-using WebCodecsDecoderSelectorTestParams =
- ::testing::Types<AudioDecoderSelectorTestParam,
- VideoDecoderSelectorTestParam>;
-TYPED_TEST_SUITE(WebCodecsDecoderSelectorTest,
- WebCodecsDecoderSelectorTestParams);
-
-TYPED_TEST(WebCodecsDecoderSelectorTest, NoDecoders) {
- EXPECT_CALL(*this, OnDecoderSelected(kNoDecoder));
- this->SelectDecoder();
-}
-
-TYPED_TEST(WebCodecsDecoderSelectorTest, OneDecoder) {
- this->AddMockDecoder(kDecoder1, kSucceed);
-
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder1));
- this->SelectDecoder();
-}
-
-TYPED_TEST(WebCodecsDecoderSelectorTest, TwoDecoders) {
- this->AddMockDecoder(kDecoder1, kFail);
- this->AddMockDecoder(kDecoder2, kSucceed);
-
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder2));
- this->SelectDecoder();
-}
-
-TYPED_TEST(WebCodecsDecoderSelectorTest, TwoDecoders_SelectAgain) {
- this->AddMockDecoder(kDecoder1, kSucceed);
- this->AddMockDecoder(kDecoder2, kSucceed);
-
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder1));
- this->SelectDecoder();
-
- // Selecting again should give (a new instance of) the same decoder.
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder1));
- this->SelectDecoder();
-}
-
-TYPED_TEST(WebCodecsDecoderSelectorTest, TwoDecoders_NewConfigSelectAgain) {
- this->AddMockDecoder(kDecoder1, kSucceed);
- this->AddMockDecoder(kDecoder2, kSucceed);
-
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder1));
- this->SelectDecoder(TypeParam::CreateConfig());
-
- // Selecting again should give (a new instance of) the same decoder.
- EXPECT_CALL(*this, OnDecoderSelected(kDecoder1));
- // Select again with a different config. Expected config verified during
- // CreateDecoders() the SelectDecoder() call.
- this->SelectDecoder(TypeParam::CreateAlternateConfig());
-}
-
-} // namespace media